1 //===-- SafeStack.cpp - Safe Stack Insertion ------------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This pass splits the stack into the safe stack (kept as-is for LLVM backend) 11 // and the unsafe stack (explicitly allocated and managed through the runtime 12 // support library). 13 // 14 // http://clang.llvm.org/docs/SafeStack.html 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "llvm/Transforms/Instrumentation.h" 19 #include "llvm/ADT/Statistic.h" 20 #include "llvm/ADT/Triple.h" 21 #include "llvm/Analysis/ScalarEvolution.h" 22 #include "llvm/Analysis/ScalarEvolutionExpressions.h" 23 #include "llvm/CodeGen/Passes.h" 24 #include "llvm/IR/Constants.h" 25 #include "llvm/IR/DataLayout.h" 26 #include "llvm/IR/DerivedTypes.h" 27 #include "llvm/IR/DIBuilder.h" 28 #include "llvm/IR/Function.h" 29 #include "llvm/IR/InstIterator.h" 30 #include "llvm/IR/Instructions.h" 31 #include "llvm/IR/IntrinsicInst.h" 32 #include "llvm/IR/Intrinsics.h" 33 #include "llvm/IR/IRBuilder.h" 34 #include "llvm/IR/Module.h" 35 #include "llvm/Pass.h" 36 #include "llvm/Support/CommandLine.h" 37 #include "llvm/Support/Debug.h" 38 #include "llvm/Support/Format.h" 39 #include "llvm/Support/MathExtras.h" 40 #include "llvm/Support/raw_os_ostream.h" 41 #include "llvm/Target/TargetLowering.h" 42 #include "llvm/Target/TargetSubtargetInfo.h" 43 #include "llvm/Transforms/Utils/Local.h" 44 #include "llvm/Transforms/Utils/ModuleUtils.h" 45 46 using namespace llvm; 47 48 #define DEBUG_TYPE "safestack" 49 50 enum UnsafeStackPtrStorageVal { ThreadLocalUSP, SingleThreadUSP }; 51 52 static cl::opt<UnsafeStackPtrStorageVal> USPStorage("safe-stack-usp-storage", 53 cl::Hidden, cl::init(ThreadLocalUSP), 54 cl::desc("Type of storage for the unsafe stack pointer"), 55 cl::values(clEnumValN(ThreadLocalUSP, "thread-local", 56 "Thread-local storage"), 57 clEnumValN(SingleThreadUSP, "single-thread", 58 "Non-thread-local storage"), 59 clEnumValEnd)); 60 61 namespace llvm { 62 63 STATISTIC(NumFunctions, "Total number of functions"); 64 STATISTIC(NumUnsafeStackFunctions, "Number of functions with unsafe stack"); 65 STATISTIC(NumUnsafeStackRestorePointsFunctions, 66 "Number of functions that use setjmp or exceptions"); 67 68 STATISTIC(NumAllocas, "Total number of allocas"); 69 STATISTIC(NumUnsafeStaticAllocas, "Number of unsafe static allocas"); 70 STATISTIC(NumUnsafeDynamicAllocas, "Number of unsafe dynamic allocas"); 71 STATISTIC(NumUnsafeByValArguments, "Number of unsafe byval arguments"); 72 STATISTIC(NumUnsafeStackRestorePoints, "Number of setjmps and landingpads"); 73 74 } // namespace llvm 75 76 namespace { 77 78 /// Rewrite an SCEV expression for a memory access address to an expression that 79 /// represents offset from the given alloca. 80 /// 81 /// The implementation simply replaces all mentions of the alloca with zero. 82 class AllocaOffsetRewriter : public SCEVRewriteVisitor<AllocaOffsetRewriter> { 83 const Value *AllocaPtr; 84 85 public: 86 AllocaOffsetRewriter(ScalarEvolution &SE, const Value *AllocaPtr) 87 : SCEVRewriteVisitor(SE), AllocaPtr(AllocaPtr) {} 88 89 const SCEV *visitUnknown(const SCEVUnknown *Expr) { 90 if (Expr->getValue() == AllocaPtr) 91 return SE.getZero(Expr->getType()); 92 return Expr; 93 } 94 }; 95 96 /// The SafeStack pass splits the stack of each function into the safe 97 /// stack, which is only accessed through memory safe dereferences (as 98 /// determined statically), and the unsafe stack, which contains all 99 /// local variables that are accessed in ways that we can't prove to 100 /// be safe. 101 class SafeStack : public FunctionPass { 102 const TargetMachine *TM; 103 const TargetLoweringBase *TL; 104 const DataLayout *DL; 105 ScalarEvolution *SE; 106 107 Type *StackPtrTy; 108 Type *IntPtrTy; 109 Type *Int32Ty; 110 Type *Int8Ty; 111 112 Value *UnsafeStackPtr = nullptr; 113 114 /// Unsafe stack alignment. Each stack frame must ensure that the stack is 115 /// aligned to this value. We need to re-align the unsafe stack if the 116 /// alignment of any object on the stack exceeds this value. 117 /// 118 /// 16 seems like a reasonable upper bound on the alignment of objects that we 119 /// might expect to appear on the stack on most common targets. 120 enum { StackAlignment = 16 }; 121 122 /// \brief Build a value representing a pointer to the unsafe stack pointer. 123 Value *getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F); 124 125 /// \brief Find all static allocas, dynamic allocas, return instructions and 126 /// stack restore points (exception unwind blocks and setjmp calls) in the 127 /// given function and append them to the respective vectors. 128 void findInsts(Function &F, SmallVectorImpl<AllocaInst *> &StaticAllocas, 129 SmallVectorImpl<AllocaInst *> &DynamicAllocas, 130 SmallVectorImpl<Argument *> &ByValArguments, 131 SmallVectorImpl<ReturnInst *> &Returns, 132 SmallVectorImpl<Instruction *> &StackRestorePoints); 133 134 /// \brief Calculate the allocation size of a given alloca. Returns 0 if the 135 /// size can not be statically determined. 136 uint64_t getStaticAllocaAllocationSize(const AllocaInst* AI); 137 138 /// \brief Allocate space for all static allocas in \p StaticAllocas, 139 /// replace allocas with pointers into the unsafe stack and generate code to 140 /// restore the stack pointer before all return instructions in \p Returns. 141 /// 142 /// \returns A pointer to the top of the unsafe stack after all unsafe static 143 /// allocas are allocated. 144 Value *moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F, 145 ArrayRef<AllocaInst *> StaticAllocas, 146 ArrayRef<Argument *> ByValArguments, 147 ArrayRef<ReturnInst *> Returns); 148 149 /// \brief Generate code to restore the stack after all stack restore points 150 /// in \p StackRestorePoints. 151 /// 152 /// \returns A local variable in which to maintain the dynamic top of the 153 /// unsafe stack if needed. 154 AllocaInst * 155 createStackRestorePoints(IRBuilder<> &IRB, Function &F, 156 ArrayRef<Instruction *> StackRestorePoints, 157 Value *StaticTop, bool NeedDynamicTop); 158 159 /// \brief Replace all allocas in \p DynamicAllocas with code to allocate 160 /// space dynamically on the unsafe stack and store the dynamic unsafe stack 161 /// top to \p DynamicTop if non-null. 162 void moveDynamicAllocasToUnsafeStack(Function &F, Value *UnsafeStackPtr, 163 AllocaInst *DynamicTop, 164 ArrayRef<AllocaInst *> DynamicAllocas); 165 166 bool IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize); 167 168 bool IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U, 169 const Value *AllocaPtr, uint64_t AllocaSize); 170 bool IsAccessSafe(Value *Addr, uint64_t Size, const Value *AllocaPtr, 171 uint64_t AllocaSize); 172 173 public: 174 static char ID; // Pass identification, replacement for typeid. 175 SafeStack(const TargetMachine *TM) 176 : FunctionPass(ID), TM(TM), TL(nullptr), DL(nullptr) { 177 initializeSafeStackPass(*PassRegistry::getPassRegistry()); 178 } 179 SafeStack() : SafeStack(nullptr) {} 180 181 void getAnalysisUsage(AnalysisUsage &AU) const override { 182 AU.addRequired<ScalarEvolutionWrapperPass>(); 183 } 184 185 bool doInitialization(Module &M) override { 186 DL = &M.getDataLayout(); 187 188 StackPtrTy = Type::getInt8PtrTy(M.getContext()); 189 IntPtrTy = DL->getIntPtrType(M.getContext()); 190 Int32Ty = Type::getInt32Ty(M.getContext()); 191 Int8Ty = Type::getInt8Ty(M.getContext()); 192 193 return false; 194 } 195 196 bool runOnFunction(Function &F) override; 197 }; // class SafeStack 198 199 uint64_t SafeStack::getStaticAllocaAllocationSize(const AllocaInst* AI) { 200 uint64_t Size = DL->getTypeAllocSize(AI->getAllocatedType()); 201 if (AI->isArrayAllocation()) { 202 auto C = dyn_cast<ConstantInt>(AI->getArraySize()); 203 if (!C) 204 return 0; 205 Size *= C->getZExtValue(); 206 } 207 return Size; 208 } 209 210 bool SafeStack::IsAccessSafe(Value *Addr, uint64_t AccessSize, 211 const Value *AllocaPtr, uint64_t AllocaSize) { 212 AllocaOffsetRewriter Rewriter(*SE, AllocaPtr); 213 const SCEV *Expr = Rewriter.visit(SE->getSCEV(Addr)); 214 215 uint64_t BitWidth = SE->getTypeSizeInBits(Expr->getType()); 216 ConstantRange AccessStartRange = SE->getUnsignedRange(Expr); 217 ConstantRange SizeRange = 218 ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AccessSize)); 219 ConstantRange AccessRange = AccessStartRange.add(SizeRange); 220 ConstantRange AllocaRange = 221 ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AllocaSize)); 222 bool Safe = AllocaRange.contains(AccessRange); 223 224 DEBUG(dbgs() << "[SafeStack] " 225 << (isa<AllocaInst>(AllocaPtr) ? "Alloca " : "ByValArgument ") 226 << *AllocaPtr << "\n" 227 << " Access " << *Addr << "\n" 228 << " SCEV " << *Expr 229 << " U: " << SE->getUnsignedRange(Expr) 230 << ", S: " << SE->getSignedRange(Expr) << "\n" 231 << " Range " << AccessRange << "\n" 232 << " AllocaRange " << AllocaRange << "\n" 233 << " " << (Safe ? "safe" : "unsafe") << "\n"); 234 235 return Safe; 236 } 237 238 bool SafeStack::IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U, 239 const Value *AllocaPtr, 240 uint64_t AllocaSize) { 241 // All MemIntrinsics have destination address in Arg0 and size in Arg2. 242 if (MI->getRawDest() != U) return true; 243 const auto *Len = dyn_cast<ConstantInt>(MI->getLength()); 244 // Non-constant size => unsafe. FIXME: try SCEV getRange. 245 if (!Len) return false; 246 return IsAccessSafe(U, Len->getZExtValue(), AllocaPtr, AllocaSize); 247 } 248 249 /// Check whether a given allocation must be put on the safe 250 /// stack or not. The function analyzes all uses of AI and checks whether it is 251 /// only accessed in a memory safe way (as decided statically). 252 bool SafeStack::IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize) { 253 // Go through all uses of this alloca and check whether all accesses to the 254 // allocated object are statically known to be memory safe and, hence, the 255 // object can be placed on the safe stack. 256 SmallPtrSet<const Value *, 16> Visited; 257 SmallVector<const Value *, 8> WorkList; 258 WorkList.push_back(AllocaPtr); 259 260 // A DFS search through all uses of the alloca in bitcasts/PHI/GEPs/etc. 261 while (!WorkList.empty()) { 262 const Value *V = WorkList.pop_back_val(); 263 for (const Use &UI : V->uses()) { 264 auto I = cast<const Instruction>(UI.getUser()); 265 assert(V == UI.get()); 266 267 switch (I->getOpcode()) { 268 case Instruction::Load: { 269 if (!IsAccessSafe(UI, DL->getTypeStoreSize(I->getType()), AllocaPtr, 270 AllocaSize)) 271 return false; 272 break; 273 } 274 case Instruction::VAArg: 275 // "va-arg" from a pointer is safe. 276 break; 277 case Instruction::Store: { 278 if (V == I->getOperand(0)) { 279 // Stored the pointer - conservatively assume it may be unsafe. 280 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr 281 << "\n store of address: " << *I << "\n"); 282 return false; 283 } 284 285 if (!IsAccessSafe(UI, DL->getTypeStoreSize(I->getOperand(0)->getType()), 286 AllocaPtr, AllocaSize)) 287 return false; 288 break; 289 } 290 case Instruction::Ret: { 291 // Information leak. 292 return false; 293 } 294 295 case Instruction::Call: 296 case Instruction::Invoke: { 297 ImmutableCallSite CS(I); 298 299 if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) { 300 if (II->getIntrinsicID() == Intrinsic::lifetime_start || 301 II->getIntrinsicID() == Intrinsic::lifetime_end) 302 continue; 303 } 304 305 if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) { 306 if (!IsMemIntrinsicSafe(MI, UI, AllocaPtr, AllocaSize)) { 307 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr 308 << "\n unsafe memintrinsic: " << *I 309 << "\n"); 310 return false; 311 } 312 continue; 313 } 314 315 // LLVM 'nocapture' attribute is only set for arguments whose address 316 // is not stored, passed around, or used in any other non-trivial way. 317 // We assume that passing a pointer to an object as a 'nocapture 318 // readnone' argument is safe. 319 // FIXME: a more precise solution would require an interprocedural 320 // analysis here, which would look at all uses of an argument inside 321 // the function being called. 322 ImmutableCallSite::arg_iterator B = CS.arg_begin(), E = CS.arg_end(); 323 for (ImmutableCallSite::arg_iterator A = B; A != E; ++A) 324 if (A->get() == V) 325 if (!(CS.doesNotCapture(A - B) && (CS.doesNotAccessMemory(A - B) || 326 CS.doesNotAccessMemory()))) { 327 DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr 328 << "\n unsafe call: " << *I << "\n"); 329 return false; 330 } 331 continue; 332 } 333 334 default: 335 if (Visited.insert(I).second) 336 WorkList.push_back(cast<const Instruction>(I)); 337 } 338 } 339 } 340 341 // All uses of the alloca are safe, we can place it on the safe stack. 342 return true; 343 } 344 345 Value *SafeStack::getOrCreateUnsafeStackPtr(IRBuilder<> &IRB, Function &F) { 346 // Check if there is a target-specific location for the unsafe stack pointer. 347 if (TL) 348 if (Value *V = TL->getSafeStackPointerLocation(IRB)) 349 return V; 350 351 // Otherwise, assume the target links with compiler-rt, which provides a 352 // thread-local variable with a magic name. 353 Module &M = *F.getParent(); 354 const char *UnsafeStackPtrVar = "__safestack_unsafe_stack_ptr"; 355 auto UnsafeStackPtr = 356 dyn_cast_or_null<GlobalVariable>(M.getNamedValue(UnsafeStackPtrVar)); 357 358 bool UseTLS = USPStorage == ThreadLocalUSP; 359 360 if (!UnsafeStackPtr) { 361 auto TLSModel = UseTLS ? 362 GlobalValue::InitialExecTLSModel : 363 GlobalValue::NotThreadLocal; 364 // The global variable is not defined yet, define it ourselves. 365 // We use the initial-exec TLS model because we do not support the 366 // variable living anywhere other than in the main executable. 367 UnsafeStackPtr = new GlobalVariable( 368 M, StackPtrTy, false, GlobalValue::ExternalLinkage, nullptr, 369 UnsafeStackPtrVar, nullptr, TLSModel); 370 } else { 371 // The variable exists, check its type and attributes. 372 if (UnsafeStackPtr->getValueType() != StackPtrTy) 373 report_fatal_error(Twine(UnsafeStackPtrVar) + " must have void* type"); 374 if (UseTLS != UnsafeStackPtr->isThreadLocal()) 375 report_fatal_error(Twine(UnsafeStackPtrVar) + " must " + 376 (UseTLS ? "" : "not ") + "be thread-local"); 377 } 378 return UnsafeStackPtr; 379 } 380 381 void SafeStack::findInsts(Function &F, 382 SmallVectorImpl<AllocaInst *> &StaticAllocas, 383 SmallVectorImpl<AllocaInst *> &DynamicAllocas, 384 SmallVectorImpl<Argument *> &ByValArguments, 385 SmallVectorImpl<ReturnInst *> &Returns, 386 SmallVectorImpl<Instruction *> &StackRestorePoints) { 387 for (Instruction &I : instructions(&F)) { 388 if (auto AI = dyn_cast<AllocaInst>(&I)) { 389 ++NumAllocas; 390 391 uint64_t Size = getStaticAllocaAllocationSize(AI); 392 if (IsSafeStackAlloca(AI, Size)) 393 continue; 394 395 if (AI->isStaticAlloca()) { 396 ++NumUnsafeStaticAllocas; 397 StaticAllocas.push_back(AI); 398 } else { 399 ++NumUnsafeDynamicAllocas; 400 DynamicAllocas.push_back(AI); 401 } 402 } else if (auto RI = dyn_cast<ReturnInst>(&I)) { 403 Returns.push_back(RI); 404 } else if (auto CI = dyn_cast<CallInst>(&I)) { 405 // setjmps require stack restore. 406 if (CI->getCalledFunction() && CI->canReturnTwice()) 407 StackRestorePoints.push_back(CI); 408 } else if (auto LP = dyn_cast<LandingPadInst>(&I)) { 409 // Exception landing pads require stack restore. 410 StackRestorePoints.push_back(LP); 411 } else if (auto II = dyn_cast<IntrinsicInst>(&I)) { 412 if (II->getIntrinsicID() == Intrinsic::gcroot) 413 llvm::report_fatal_error( 414 "gcroot intrinsic not compatible with safestack attribute"); 415 } 416 } 417 for (Argument &Arg : F.args()) { 418 if (!Arg.hasByValAttr()) 419 continue; 420 uint64_t Size = 421 DL->getTypeStoreSize(Arg.getType()->getPointerElementType()); 422 if (IsSafeStackAlloca(&Arg, Size)) 423 continue; 424 425 ++NumUnsafeByValArguments; 426 ByValArguments.push_back(&Arg); 427 } 428 } 429 430 AllocaInst * 431 SafeStack::createStackRestorePoints(IRBuilder<> &IRB, Function &F, 432 ArrayRef<Instruction *> StackRestorePoints, 433 Value *StaticTop, bool NeedDynamicTop) { 434 if (StackRestorePoints.empty()) 435 return nullptr; 436 437 // We need the current value of the shadow stack pointer to restore 438 // after longjmp or exception catching. 439 440 // FIXME: On some platforms this could be handled by the longjmp/exception 441 // runtime itself. 442 443 AllocaInst *DynamicTop = nullptr; 444 if (NeedDynamicTop) 445 // If we also have dynamic alloca's, the stack pointer value changes 446 // throughout the function. For now we store it in an alloca. 447 DynamicTop = IRB.CreateAlloca(StackPtrTy, /*ArraySize=*/nullptr, 448 "unsafe_stack_dynamic_ptr"); 449 450 if (!StaticTop) 451 // We need the original unsafe stack pointer value, even if there are 452 // no unsafe static allocas. 453 StaticTop = IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr"); 454 455 if (NeedDynamicTop) 456 IRB.CreateStore(StaticTop, DynamicTop); 457 458 // Restore current stack pointer after longjmp/exception catch. 459 for (Instruction *I : StackRestorePoints) { 460 ++NumUnsafeStackRestorePoints; 461 462 IRB.SetInsertPoint(I->getNextNode()); 463 Value *CurrentTop = DynamicTop ? IRB.CreateLoad(DynamicTop) : StaticTop; 464 IRB.CreateStore(CurrentTop, UnsafeStackPtr); 465 } 466 467 return DynamicTop; 468 } 469 470 Value *SafeStack::moveStaticAllocasToUnsafeStack( 471 IRBuilder<> &IRB, Function &F, ArrayRef<AllocaInst *> StaticAllocas, 472 ArrayRef<Argument *> ByValArguments, ArrayRef<ReturnInst *> Returns) { 473 if (StaticAllocas.empty() && ByValArguments.empty()) 474 return nullptr; 475 476 DIBuilder DIB(*F.getParent()); 477 478 // We explicitly compute and set the unsafe stack layout for all unsafe 479 // static alloca instructions. We save the unsafe "base pointer" in the 480 // prologue into a local variable and restore it in the epilogue. 481 482 // Load the current stack pointer (we'll also use it as a base pointer). 483 // FIXME: use a dedicated register for it ? 484 Instruction *BasePointer = 485 IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr"); 486 assert(BasePointer->getType() == StackPtrTy); 487 488 for (ReturnInst *RI : Returns) { 489 IRB.SetInsertPoint(RI); 490 IRB.CreateStore(BasePointer, UnsafeStackPtr); 491 } 492 493 // Compute maximum alignment among static objects on the unsafe stack. 494 unsigned MaxAlignment = 0; 495 for (Argument *Arg : ByValArguments) { 496 Type *Ty = Arg->getType()->getPointerElementType(); 497 unsigned Align = std::max((unsigned)DL->getPrefTypeAlignment(Ty), 498 Arg->getParamAlignment()); 499 if (Align > MaxAlignment) 500 MaxAlignment = Align; 501 } 502 for (AllocaInst *AI : StaticAllocas) { 503 Type *Ty = AI->getAllocatedType(); 504 unsigned Align = 505 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment()); 506 if (Align > MaxAlignment) 507 MaxAlignment = Align; 508 } 509 510 if (MaxAlignment > StackAlignment) { 511 // Re-align the base pointer according to the max requested alignment. 512 assert(isPowerOf2_32(MaxAlignment)); 513 IRB.SetInsertPoint(BasePointer->getNextNode()); 514 BasePointer = cast<Instruction>(IRB.CreateIntToPtr( 515 IRB.CreateAnd(IRB.CreatePtrToInt(BasePointer, IntPtrTy), 516 ConstantInt::get(IntPtrTy, ~uint64_t(MaxAlignment - 1))), 517 StackPtrTy)); 518 } 519 520 int64_t StaticOffset = 0; // Current stack top. 521 IRB.SetInsertPoint(BasePointer->getNextNode()); 522 523 for (Argument *Arg : ByValArguments) { 524 Type *Ty = Arg->getType()->getPointerElementType(); 525 526 uint64_t Size = DL->getTypeStoreSize(Ty); 527 if (Size == 0) 528 Size = 1; // Don't create zero-sized stack objects. 529 530 // Ensure the object is properly aligned. 531 unsigned Align = std::max((unsigned)DL->getPrefTypeAlignment(Ty), 532 Arg->getParamAlignment()); 533 534 // Add alignment. 535 // NOTE: we ensure that BasePointer itself is aligned to >= Align. 536 StaticOffset += Size; 537 StaticOffset = RoundUpToAlignment(StaticOffset, Align); 538 539 Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8* 540 ConstantInt::get(Int32Ty, -StaticOffset)); 541 Value *NewArg = IRB.CreateBitCast(Off, Arg->getType(), 542 Arg->getName() + ".unsafe-byval"); 543 544 // Replace alloc with the new location. 545 replaceDbgDeclare(Arg, BasePointer, BasePointer->getNextNode(), DIB, 546 /*Deref=*/true, -StaticOffset); 547 Arg->replaceAllUsesWith(NewArg); 548 IRB.SetInsertPoint(cast<Instruction>(NewArg)->getNextNode()); 549 IRB.CreateMemCpy(Off, Arg, Size, Arg->getParamAlignment()); 550 } 551 552 // Allocate space for every unsafe static AllocaInst on the unsafe stack. 553 for (AllocaInst *AI : StaticAllocas) { 554 IRB.SetInsertPoint(AI); 555 556 Type *Ty = AI->getAllocatedType(); 557 uint64_t Size = getStaticAllocaAllocationSize(AI); 558 if (Size == 0) 559 Size = 1; // Don't create zero-sized stack objects. 560 561 // Ensure the object is properly aligned. 562 unsigned Align = 563 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment()); 564 565 // Add alignment. 566 // NOTE: we ensure that BasePointer itself is aligned to >= Align. 567 StaticOffset += Size; 568 StaticOffset = RoundUpToAlignment(StaticOffset, Align); 569 570 Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8* 571 ConstantInt::get(Int32Ty, -StaticOffset)); 572 Value *NewAI = IRB.CreateBitCast(Off, AI->getType(), AI->getName()); 573 if (AI->hasName() && isa<Instruction>(NewAI)) 574 cast<Instruction>(NewAI)->takeName(AI); 575 576 // Replace alloc with the new location. 577 replaceDbgDeclareForAlloca(AI, BasePointer, DIB, /*Deref=*/true, -StaticOffset); 578 AI->replaceAllUsesWith(NewAI); 579 AI->eraseFromParent(); 580 } 581 582 // Re-align BasePointer so that our callees would see it aligned as 583 // expected. 584 // FIXME: no need to update BasePointer in leaf functions. 585 StaticOffset = RoundUpToAlignment(StaticOffset, StackAlignment); 586 587 // Update shadow stack pointer in the function epilogue. 588 IRB.SetInsertPoint(BasePointer->getNextNode()); 589 590 Value *StaticTop = 591 IRB.CreateGEP(BasePointer, ConstantInt::get(Int32Ty, -StaticOffset), 592 "unsafe_stack_static_top"); 593 IRB.CreateStore(StaticTop, UnsafeStackPtr); 594 return StaticTop; 595 } 596 597 void SafeStack::moveDynamicAllocasToUnsafeStack( 598 Function &F, Value *UnsafeStackPtr, AllocaInst *DynamicTop, 599 ArrayRef<AllocaInst *> DynamicAllocas) { 600 DIBuilder DIB(*F.getParent()); 601 602 for (AllocaInst *AI : DynamicAllocas) { 603 IRBuilder<> IRB(AI); 604 605 // Compute the new SP value (after AI). 606 Value *ArraySize = AI->getArraySize(); 607 if (ArraySize->getType() != IntPtrTy) 608 ArraySize = IRB.CreateIntCast(ArraySize, IntPtrTy, false); 609 610 Type *Ty = AI->getAllocatedType(); 611 uint64_t TySize = DL->getTypeAllocSize(Ty); 612 Value *Size = IRB.CreateMul(ArraySize, ConstantInt::get(IntPtrTy, TySize)); 613 614 Value *SP = IRB.CreatePtrToInt(IRB.CreateLoad(UnsafeStackPtr), IntPtrTy); 615 SP = IRB.CreateSub(SP, Size); 616 617 // Align the SP value to satisfy the AllocaInst, type and stack alignments. 618 unsigned Align = std::max( 619 std::max((unsigned)DL->getPrefTypeAlignment(Ty), AI->getAlignment()), 620 (unsigned)StackAlignment); 621 622 assert(isPowerOf2_32(Align)); 623 Value *NewTop = IRB.CreateIntToPtr( 624 IRB.CreateAnd(SP, ConstantInt::get(IntPtrTy, ~uint64_t(Align - 1))), 625 StackPtrTy); 626 627 // Save the stack pointer. 628 IRB.CreateStore(NewTop, UnsafeStackPtr); 629 if (DynamicTop) 630 IRB.CreateStore(NewTop, DynamicTop); 631 632 Value *NewAI = IRB.CreatePointerCast(NewTop, AI->getType()); 633 if (AI->hasName() && isa<Instruction>(NewAI)) 634 NewAI->takeName(AI); 635 636 replaceDbgDeclareForAlloca(AI, NewAI, DIB, /*Deref=*/true); 637 AI->replaceAllUsesWith(NewAI); 638 AI->eraseFromParent(); 639 } 640 641 if (!DynamicAllocas.empty()) { 642 // Now go through the instructions again, replacing stacksave/stackrestore. 643 for (inst_iterator It = inst_begin(&F), Ie = inst_end(&F); It != Ie;) { 644 Instruction *I = &*(It++); 645 auto II = dyn_cast<IntrinsicInst>(I); 646 if (!II) 647 continue; 648 649 if (II->getIntrinsicID() == Intrinsic::stacksave) { 650 IRBuilder<> IRB(II); 651 Instruction *LI = IRB.CreateLoad(UnsafeStackPtr); 652 LI->takeName(II); 653 II->replaceAllUsesWith(LI); 654 II->eraseFromParent(); 655 } else if (II->getIntrinsicID() == Intrinsic::stackrestore) { 656 IRBuilder<> IRB(II); 657 Instruction *SI = IRB.CreateStore(II->getArgOperand(0), UnsafeStackPtr); 658 SI->takeName(II); 659 assert(II->use_empty()); 660 II->eraseFromParent(); 661 } 662 } 663 } 664 } 665 666 bool SafeStack::runOnFunction(Function &F) { 667 DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n"); 668 669 if (!F.hasFnAttribute(Attribute::SafeStack)) { 670 DEBUG(dbgs() << "[SafeStack] safestack is not requested" 671 " for this function\n"); 672 return false; 673 } 674 675 if (F.isDeclaration()) { 676 DEBUG(dbgs() << "[SafeStack] function definition" 677 " is not available\n"); 678 return false; 679 } 680 681 TL = TM ? TM->getSubtargetImpl(F)->getTargetLowering() : nullptr; 682 SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE(); 683 684 { 685 // Make sure the regular stack protector won't run on this function 686 // (safestack attribute takes precedence). 687 AttrBuilder B; 688 B.addAttribute(Attribute::StackProtect) 689 .addAttribute(Attribute::StackProtectReq) 690 .addAttribute(Attribute::StackProtectStrong); 691 F.removeAttributes( 692 AttributeSet::FunctionIndex, 693 AttributeSet::get(F.getContext(), AttributeSet::FunctionIndex, B)); 694 } 695 696 ++NumFunctions; 697 698 SmallVector<AllocaInst *, 16> StaticAllocas; 699 SmallVector<AllocaInst *, 4> DynamicAllocas; 700 SmallVector<Argument *, 4> ByValArguments; 701 SmallVector<ReturnInst *, 4> Returns; 702 703 // Collect all points where stack gets unwound and needs to be restored 704 // This is only necessary because the runtime (setjmp and unwind code) is 705 // not aware of the unsafe stack and won't unwind/restore it prorerly. 706 // To work around this problem without changing the runtime, we insert 707 // instrumentation to restore the unsafe stack pointer when necessary. 708 SmallVector<Instruction *, 4> StackRestorePoints; 709 710 // Find all static and dynamic alloca instructions that must be moved to the 711 // unsafe stack, all return instructions and stack restore points. 712 findInsts(F, StaticAllocas, DynamicAllocas, ByValArguments, Returns, 713 StackRestorePoints); 714 715 if (StaticAllocas.empty() && DynamicAllocas.empty() && 716 ByValArguments.empty() && StackRestorePoints.empty()) 717 return false; // Nothing to do in this function. 718 719 if (!StaticAllocas.empty() || !DynamicAllocas.empty() || 720 !ByValArguments.empty()) 721 ++NumUnsafeStackFunctions; // This function has the unsafe stack. 722 723 if (!StackRestorePoints.empty()) 724 ++NumUnsafeStackRestorePointsFunctions; 725 726 IRBuilder<> IRB(&F.front(), F.begin()->getFirstInsertionPt()); 727 UnsafeStackPtr = getOrCreateUnsafeStackPtr(IRB, F); 728 729 // The top of the unsafe stack after all unsafe static allocas are allocated. 730 Value *StaticTop = moveStaticAllocasToUnsafeStack(IRB, F, StaticAllocas, 731 ByValArguments, Returns); 732 733 // Safe stack object that stores the current unsafe stack top. It is updated 734 // as unsafe dynamic (non-constant-sized) allocas are allocated and freed. 735 // This is only needed if we need to restore stack pointer after longjmp 736 // or exceptions, and we have dynamic allocations. 737 // FIXME: a better alternative might be to store the unsafe stack pointer 738 // before setjmp / invoke instructions. 739 AllocaInst *DynamicTop = createStackRestorePoints( 740 IRB, F, StackRestorePoints, StaticTop, !DynamicAllocas.empty()); 741 742 // Handle dynamic allocas. 743 moveDynamicAllocasToUnsafeStack(F, UnsafeStackPtr, DynamicTop, 744 DynamicAllocas); 745 746 DEBUG(dbgs() << "[SafeStack] safestack applied\n"); 747 return true; 748 } 749 750 } // anonymous namespace 751 752 char SafeStack::ID = 0; 753 INITIALIZE_TM_PASS_BEGIN(SafeStack, "safe-stack", 754 "Safe Stack instrumentation pass", false, false) 755 INITIALIZE_TM_PASS_END(SafeStack, "safe-stack", 756 "Safe Stack instrumentation pass", false, false) 757 758 FunctionPass *llvm::createSafeStackPass(const llvm::TargetMachine *TM) { 759 return new SafeStack(TM); 760 } 761