SafeStack.cpp 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851
  1. //===- SafeStack.cpp - Safe Stack Insertion -------------------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This pass splits the stack into the safe stack (kept as-is for LLVM backend)
  11. // and the unsafe stack (explicitly allocated and managed through the runtime
  12. // support library).
  13. //
  14. // http://clang.llvm.org/docs/SafeStack.html
  15. //
  16. //===----------------------------------------------------------------------===//
  17. #include "SafeStackColoring.h"
  18. #include "SafeStackLayout.h"
  19. #include "llvm/ADT/APInt.h"
  20. #include "llvm/ADT/ArrayRef.h"
  21. #include "llvm/ADT/SmallPtrSet.h"
  22. #include "llvm/ADT/SmallVector.h"
  23. #include "llvm/ADT/Statistic.h"
  24. #include "llvm/Analysis/AssumptionCache.h"
  25. #include "llvm/Analysis/BranchProbabilityInfo.h"
  26. #include "llvm/Analysis/LoopInfo.h"
  27. #include "llvm/Analysis/ScalarEvolution.h"
  28. #include "llvm/Analysis/ScalarEvolutionExpressions.h"
  29. #include "llvm/Analysis/TargetLibraryInfo.h"
  30. #include "llvm/CodeGen/TargetLowering.h"
  31. #include "llvm/CodeGen/TargetPassConfig.h"
  32. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  33. #include "llvm/IR/Argument.h"
  34. #include "llvm/IR/Attributes.h"
  35. #include "llvm/IR/CallSite.h"
  36. #include "llvm/IR/ConstantRange.h"
  37. #include "llvm/IR/Constants.h"
  38. #include "llvm/IR/DIBuilder.h"
  39. #include "llvm/IR/DataLayout.h"
  40. #include "llvm/IR/DerivedTypes.h"
  41. #include "llvm/IR/Dominators.h"
  42. #include "llvm/IR/Function.h"
  43. #include "llvm/IR/IRBuilder.h"
  44. #include "llvm/IR/InstIterator.h"
  45. #include "llvm/IR/Instruction.h"
  46. #include "llvm/IR/Instructions.h"
  47. #include "llvm/IR/IntrinsicInst.h"
  48. #include "llvm/IR/Intrinsics.h"
  49. #include "llvm/IR/MDBuilder.h"
  50. #include "llvm/IR/Module.h"
  51. #include "llvm/IR/Type.h"
  52. #include "llvm/IR/Use.h"
  53. #include "llvm/IR/User.h"
  54. #include "llvm/IR/Value.h"
  55. #include "llvm/Pass.h"
  56. #include "llvm/Support/Casting.h"
  57. #include "llvm/Support/Debug.h"
  58. #include "llvm/Support/ErrorHandling.h"
  59. #include "llvm/Support/MathExtras.h"
  60. #include "llvm/Support/raw_ostream.h"
  61. #include "llvm/Target/TargetMachine.h"
  62. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  63. #include "llvm/Transforms/Utils/Local.h"
  64. #include <algorithm>
  65. #include <cassert>
  66. #include <cstdint>
  67. #include <string>
  68. #include <utility>
  69. using namespace llvm;
  70. using namespace llvm::safestack;
  71. #define DEBUG_TYPE "safe-stack"
  72. namespace llvm {
  73. STATISTIC(NumFunctions, "Total number of functions");
  74. STATISTIC(NumUnsafeStackFunctions, "Number of functions with unsafe stack");
  75. STATISTIC(NumUnsafeStackRestorePointsFunctions,
  76. "Number of functions that use setjmp or exceptions");
  77. STATISTIC(NumAllocas, "Total number of allocas");
  78. STATISTIC(NumUnsafeStaticAllocas, "Number of unsafe static allocas");
  79. STATISTIC(NumUnsafeDynamicAllocas, "Number of unsafe dynamic allocas");
  80. STATISTIC(NumUnsafeByValArguments, "Number of unsafe byval arguments");
  81. STATISTIC(NumUnsafeStackRestorePoints, "Number of setjmps and landingpads");
  82. } // namespace llvm
  83. namespace {
  84. /// Rewrite an SCEV expression for a memory access address to an expression that
  85. /// represents offset from the given alloca.
  86. ///
  87. /// The implementation simply replaces all mentions of the alloca with zero.
  88. class AllocaOffsetRewriter : public SCEVRewriteVisitor<AllocaOffsetRewriter> {
  89. const Value *AllocaPtr;
  90. public:
  91. AllocaOffsetRewriter(ScalarEvolution &SE, const Value *AllocaPtr)
  92. : SCEVRewriteVisitor(SE), AllocaPtr(AllocaPtr) {}
  93. const SCEV *visitUnknown(const SCEVUnknown *Expr) {
  94. if (Expr->getValue() == AllocaPtr)
  95. return SE.getZero(Expr->getType());
  96. return Expr;
  97. }
  98. };
  99. /// The SafeStack pass splits the stack of each function into the safe
  100. /// stack, which is only accessed through memory safe dereferences (as
  101. /// determined statically), and the unsafe stack, which contains all
  102. /// local variables that are accessed in ways that we can't prove to
  103. /// be safe.
  104. class SafeStack {
  105. Function &F;
  106. const TargetLoweringBase &TL;
  107. const DataLayout &DL;
  108. ScalarEvolution &SE;
  109. Type *StackPtrTy;
  110. Type *IntPtrTy;
  111. Type *Int32Ty;
  112. Type *Int8Ty;
  113. Value *UnsafeStackPtr = nullptr;
  114. /// Unsafe stack alignment. Each stack frame must ensure that the stack is
  115. /// aligned to this value. We need to re-align the unsafe stack if the
  116. /// alignment of any object on the stack exceeds this value.
  117. ///
  118. /// 16 seems like a reasonable upper bound on the alignment of objects that we
  119. /// might expect to appear on the stack on most common targets.
  120. enum { StackAlignment = 16 };
  121. /// \brief Return the value of the stack canary.
  122. Value *getStackGuard(IRBuilder<> &IRB, Function &F);
  123. /// \brief Load stack guard from the frame and check if it has changed.
  124. void checkStackGuard(IRBuilder<> &IRB, Function &F, ReturnInst &RI,
  125. AllocaInst *StackGuardSlot, Value *StackGuard);
  126. /// \brief Find all static allocas, dynamic allocas, return instructions and
  127. /// stack restore points (exception unwind blocks and setjmp calls) in the
  128. /// given function and append them to the respective vectors.
  129. void findInsts(Function &F, SmallVectorImpl<AllocaInst *> &StaticAllocas,
  130. SmallVectorImpl<AllocaInst *> &DynamicAllocas,
  131. SmallVectorImpl<Argument *> &ByValArguments,
  132. SmallVectorImpl<ReturnInst *> &Returns,
  133. SmallVectorImpl<Instruction *> &StackRestorePoints);
  134. /// \brief Calculate the allocation size of a given alloca. Returns 0 if the
  135. /// size can not be statically determined.
  136. uint64_t getStaticAllocaAllocationSize(const AllocaInst* AI);
  137. /// \brief Allocate space for all static allocas in \p StaticAllocas,
  138. /// replace allocas with pointers into the unsafe stack and generate code to
  139. /// restore the stack pointer before all return instructions in \p Returns.
  140. ///
  141. /// \returns A pointer to the top of the unsafe stack after all unsafe static
  142. /// allocas are allocated.
  143. Value *moveStaticAllocasToUnsafeStack(IRBuilder<> &IRB, Function &F,
  144. ArrayRef<AllocaInst *> StaticAllocas,
  145. ArrayRef<Argument *> ByValArguments,
  146. ArrayRef<ReturnInst *> Returns,
  147. Instruction *BasePointer,
  148. AllocaInst *StackGuardSlot);
  149. /// \brief Generate code to restore the stack after all stack restore points
  150. /// in \p StackRestorePoints.
  151. ///
  152. /// \returns A local variable in which to maintain the dynamic top of the
  153. /// unsafe stack if needed.
  154. AllocaInst *
  155. createStackRestorePoints(IRBuilder<> &IRB, Function &F,
  156. ArrayRef<Instruction *> StackRestorePoints,
  157. Value *StaticTop, bool NeedDynamicTop);
  158. /// \brief Replace all allocas in \p DynamicAllocas with code to allocate
  159. /// space dynamically on the unsafe stack and store the dynamic unsafe stack
  160. /// top to \p DynamicTop if non-null.
  161. void moveDynamicAllocasToUnsafeStack(Function &F, Value *UnsafeStackPtr,
  162. AllocaInst *DynamicTop,
  163. ArrayRef<AllocaInst *> DynamicAllocas);
  164. bool IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize);
  165. bool IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
  166. const Value *AllocaPtr, uint64_t AllocaSize);
  167. bool IsAccessSafe(Value *Addr, uint64_t Size, const Value *AllocaPtr,
  168. uint64_t AllocaSize);
  169. public:
  170. SafeStack(Function &F, const TargetLoweringBase &TL, const DataLayout &DL,
  171. ScalarEvolution &SE)
  172. : F(F), TL(TL), DL(DL), SE(SE),
  173. StackPtrTy(Type::getInt8PtrTy(F.getContext())),
  174. IntPtrTy(DL.getIntPtrType(F.getContext())),
  175. Int32Ty(Type::getInt32Ty(F.getContext())),
  176. Int8Ty(Type::getInt8Ty(F.getContext())) {}
  177. // Run the transformation on the associated function.
  178. // Returns whether the function was changed.
  179. bool run();
  180. };
  181. uint64_t SafeStack::getStaticAllocaAllocationSize(const AllocaInst* AI) {
  182. uint64_t Size = DL.getTypeAllocSize(AI->getAllocatedType());
  183. if (AI->isArrayAllocation()) {
  184. auto C = dyn_cast<ConstantInt>(AI->getArraySize());
  185. if (!C)
  186. return 0;
  187. Size *= C->getZExtValue();
  188. }
  189. return Size;
  190. }
  191. bool SafeStack::IsAccessSafe(Value *Addr, uint64_t AccessSize,
  192. const Value *AllocaPtr, uint64_t AllocaSize) {
  193. AllocaOffsetRewriter Rewriter(SE, AllocaPtr);
  194. const SCEV *Expr = Rewriter.visit(SE.getSCEV(Addr));
  195. uint64_t BitWidth = SE.getTypeSizeInBits(Expr->getType());
  196. ConstantRange AccessStartRange = SE.getUnsignedRange(Expr);
  197. ConstantRange SizeRange =
  198. ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AccessSize));
  199. ConstantRange AccessRange = AccessStartRange.add(SizeRange);
  200. ConstantRange AllocaRange =
  201. ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AllocaSize));
  202. bool Safe = AllocaRange.contains(AccessRange);
  203. DEBUG(dbgs() << "[SafeStack] "
  204. << (isa<AllocaInst>(AllocaPtr) ? "Alloca " : "ByValArgument ")
  205. << *AllocaPtr << "\n"
  206. << " Access " << *Addr << "\n"
  207. << " SCEV " << *Expr
  208. << " U: " << SE.getUnsignedRange(Expr)
  209. << ", S: " << SE.getSignedRange(Expr) << "\n"
  210. << " Range " << AccessRange << "\n"
  211. << " AllocaRange " << AllocaRange << "\n"
  212. << " " << (Safe ? "safe" : "unsafe") << "\n");
  213. return Safe;
  214. }
  215. bool SafeStack::IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
  216. const Value *AllocaPtr,
  217. uint64_t AllocaSize) {
  218. // All MemIntrinsics have destination address in Arg0 and size in Arg2.
  219. if (MI->getRawDest() != U) return true;
  220. const auto *Len = dyn_cast<ConstantInt>(MI->getLength());
  221. // Non-constant size => unsafe. FIXME: try SCEV getRange.
  222. if (!Len) return false;
  223. return IsAccessSafe(U, Len->getZExtValue(), AllocaPtr, AllocaSize);
  224. }
  225. /// Check whether a given allocation must be put on the safe
  226. /// stack or not. The function analyzes all uses of AI and checks whether it is
  227. /// only accessed in a memory safe way (as decided statically).
  228. bool SafeStack::IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize) {
  229. // Go through all uses of this alloca and check whether all accesses to the
  230. // allocated object are statically known to be memory safe and, hence, the
  231. // object can be placed on the safe stack.
  232. SmallPtrSet<const Value *, 16> Visited;
  233. SmallVector<const Value *, 8> WorkList;
  234. WorkList.push_back(AllocaPtr);
  235. // A DFS search through all uses of the alloca in bitcasts/PHI/GEPs/etc.
  236. while (!WorkList.empty()) {
  237. const Value *V = WorkList.pop_back_val();
  238. for (const Use &UI : V->uses()) {
  239. auto I = cast<const Instruction>(UI.getUser());
  240. assert(V == UI.get());
  241. switch (I->getOpcode()) {
  242. case Instruction::Load:
  243. if (!IsAccessSafe(UI, DL.getTypeStoreSize(I->getType()), AllocaPtr,
  244. AllocaSize))
  245. return false;
  246. break;
  247. case Instruction::VAArg:
  248. // "va-arg" from a pointer is safe.
  249. break;
  250. case Instruction::Store:
  251. if (V == I->getOperand(0)) {
  252. // Stored the pointer - conservatively assume it may be unsafe.
  253. DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
  254. << "\n store of address: " << *I << "\n");
  255. return false;
  256. }
  257. if (!IsAccessSafe(UI, DL.getTypeStoreSize(I->getOperand(0)->getType()),
  258. AllocaPtr, AllocaSize))
  259. return false;
  260. break;
  261. case Instruction::Ret:
  262. // Information leak.
  263. return false;
  264. case Instruction::Call:
  265. case Instruction::Invoke: {
  266. ImmutableCallSite CS(I);
  267. if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
  268. if (II->getIntrinsicID() == Intrinsic::lifetime_start ||
  269. II->getIntrinsicID() == Intrinsic::lifetime_end)
  270. continue;
  271. }
  272. if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
  273. if (!IsMemIntrinsicSafe(MI, UI, AllocaPtr, AllocaSize)) {
  274. DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
  275. << "\n unsafe memintrinsic: " << *I
  276. << "\n");
  277. return false;
  278. }
  279. continue;
  280. }
  281. // LLVM 'nocapture' attribute is only set for arguments whose address
  282. // is not stored, passed around, or used in any other non-trivial way.
  283. // We assume that passing a pointer to an object as a 'nocapture
  284. // readnone' argument is safe.
  285. // FIXME: a more precise solution would require an interprocedural
  286. // analysis here, which would look at all uses of an argument inside
  287. // the function being called.
  288. ImmutableCallSite::arg_iterator B = CS.arg_begin(), E = CS.arg_end();
  289. for (ImmutableCallSite::arg_iterator A = B; A != E; ++A)
  290. if (A->get() == V)
  291. if (!(CS.doesNotCapture(A - B) && (CS.doesNotAccessMemory(A - B) ||
  292. CS.doesNotAccessMemory()))) {
  293. DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
  294. << "\n unsafe call: " << *I << "\n");
  295. return false;
  296. }
  297. continue;
  298. }
  299. default:
  300. if (Visited.insert(I).second)
  301. WorkList.push_back(cast<const Instruction>(I));
  302. }
  303. }
  304. }
  305. // All uses of the alloca are safe, we can place it on the safe stack.
  306. return true;
  307. }
  308. Value *SafeStack::getStackGuard(IRBuilder<> &IRB, Function &F) {
  309. Value *StackGuardVar = TL.getIRStackGuard(IRB);
  310. if (!StackGuardVar)
  311. StackGuardVar =
  312. F.getParent()->getOrInsertGlobal("__stack_chk_guard", StackPtrTy);
  313. return IRB.CreateLoad(StackGuardVar, "StackGuard");
  314. }
  315. void SafeStack::findInsts(Function &F,
  316. SmallVectorImpl<AllocaInst *> &StaticAllocas,
  317. SmallVectorImpl<AllocaInst *> &DynamicAllocas,
  318. SmallVectorImpl<Argument *> &ByValArguments,
  319. SmallVectorImpl<ReturnInst *> &Returns,
  320. SmallVectorImpl<Instruction *> &StackRestorePoints) {
  321. for (Instruction &I : instructions(&F)) {
  322. if (auto AI = dyn_cast<AllocaInst>(&I)) {
  323. ++NumAllocas;
  324. uint64_t Size = getStaticAllocaAllocationSize(AI);
  325. if (IsSafeStackAlloca(AI, Size))
  326. continue;
  327. if (AI->isStaticAlloca()) {
  328. ++NumUnsafeStaticAllocas;
  329. StaticAllocas.push_back(AI);
  330. } else {
  331. ++NumUnsafeDynamicAllocas;
  332. DynamicAllocas.push_back(AI);
  333. }
  334. } else if (auto RI = dyn_cast<ReturnInst>(&I)) {
  335. Returns.push_back(RI);
  336. } else if (auto CI = dyn_cast<CallInst>(&I)) {
  337. // setjmps require stack restore.
  338. if (CI->getCalledFunction() && CI->canReturnTwice())
  339. StackRestorePoints.push_back(CI);
  340. } else if (auto LP = dyn_cast<LandingPadInst>(&I)) {
  341. // Exception landing pads require stack restore.
  342. StackRestorePoints.push_back(LP);
  343. } else if (auto II = dyn_cast<IntrinsicInst>(&I)) {
  344. if (II->getIntrinsicID() == Intrinsic::gcroot)
  345. report_fatal_error(
  346. "gcroot intrinsic not compatible with safestack attribute");
  347. }
  348. }
  349. for (Argument &Arg : F.args()) {
  350. if (!Arg.hasByValAttr())
  351. continue;
  352. uint64_t Size =
  353. DL.getTypeStoreSize(Arg.getType()->getPointerElementType());
  354. if (IsSafeStackAlloca(&Arg, Size))
  355. continue;
  356. ++NumUnsafeByValArguments;
  357. ByValArguments.push_back(&Arg);
  358. }
  359. }
  360. AllocaInst *
  361. SafeStack::createStackRestorePoints(IRBuilder<> &IRB, Function &F,
  362. ArrayRef<Instruction *> StackRestorePoints,
  363. Value *StaticTop, bool NeedDynamicTop) {
  364. assert(StaticTop && "The stack top isn't set.");
  365. if (StackRestorePoints.empty())
  366. return nullptr;
  367. // We need the current value of the shadow stack pointer to restore
  368. // after longjmp or exception catching.
  369. // FIXME: On some platforms this could be handled by the longjmp/exception
  370. // runtime itself.
  371. AllocaInst *DynamicTop = nullptr;
  372. if (NeedDynamicTop) {
  373. // If we also have dynamic alloca's, the stack pointer value changes
  374. // throughout the function. For now we store it in an alloca.
  375. DynamicTop = IRB.CreateAlloca(StackPtrTy, /*ArraySize=*/nullptr,
  376. "unsafe_stack_dynamic_ptr");
  377. IRB.CreateStore(StaticTop, DynamicTop);
  378. }
  379. // Restore current stack pointer after longjmp/exception catch.
  380. for (Instruction *I : StackRestorePoints) {
  381. ++NumUnsafeStackRestorePoints;
  382. IRB.SetInsertPoint(I->getNextNode());
  383. Value *CurrentTop = DynamicTop ? IRB.CreateLoad(DynamicTop) : StaticTop;
  384. IRB.CreateStore(CurrentTop, UnsafeStackPtr);
  385. }
  386. return DynamicTop;
  387. }
  388. void SafeStack::checkStackGuard(IRBuilder<> &IRB, Function &F, ReturnInst &RI,
  389. AllocaInst *StackGuardSlot, Value *StackGuard) {
  390. Value *V = IRB.CreateLoad(StackGuardSlot);
  391. Value *Cmp = IRB.CreateICmpNE(StackGuard, V);
  392. auto SuccessProb = BranchProbabilityInfo::getBranchProbStackProtector(true);
  393. auto FailureProb = BranchProbabilityInfo::getBranchProbStackProtector(false);
  394. MDNode *Weights = MDBuilder(F.getContext())
  395. .createBranchWeights(SuccessProb.getNumerator(),
  396. FailureProb.getNumerator());
  397. Instruction *CheckTerm =
  398. SplitBlockAndInsertIfThen(Cmp, &RI,
  399. /* Unreachable */ true, Weights);
  400. IRBuilder<> IRBFail(CheckTerm);
  401. // FIXME: respect -fsanitize-trap / -ftrap-function here?
  402. Constant *StackChkFail = F.getParent()->getOrInsertFunction(
  403. "__stack_chk_fail", IRB.getVoidTy());
  404. IRBFail.CreateCall(StackChkFail, {});
  405. }
  406. /// We explicitly compute and set the unsafe stack layout for all unsafe
  407. /// static alloca instructions. We save the unsafe "base pointer" in the
  408. /// prologue into a local variable and restore it in the epilogue.
  409. Value *SafeStack::moveStaticAllocasToUnsafeStack(
  410. IRBuilder<> &IRB, Function &F, ArrayRef<AllocaInst *> StaticAllocas,
  411. ArrayRef<Argument *> ByValArguments, ArrayRef<ReturnInst *> Returns,
  412. Instruction *BasePointer, AllocaInst *StackGuardSlot) {
  413. if (StaticAllocas.empty() && ByValArguments.empty())
  414. return BasePointer;
  415. DIBuilder DIB(*F.getParent());
  416. StackColoring SSC(F, StaticAllocas);
  417. SSC.run();
  418. SSC.removeAllMarkers();
  419. // Unsafe stack always grows down.
  420. StackLayout SSL(StackAlignment);
  421. if (StackGuardSlot) {
  422. Type *Ty = StackGuardSlot->getAllocatedType();
  423. unsigned Align =
  424. std::max(DL.getPrefTypeAlignment(Ty), StackGuardSlot->getAlignment());
  425. SSL.addObject(StackGuardSlot, getStaticAllocaAllocationSize(StackGuardSlot),
  426. Align, SSC.getFullLiveRange());
  427. }
  428. for (Argument *Arg : ByValArguments) {
  429. Type *Ty = Arg->getType()->getPointerElementType();
  430. uint64_t Size = DL.getTypeStoreSize(Ty);
  431. if (Size == 0)
  432. Size = 1; // Don't create zero-sized stack objects.
  433. // Ensure the object is properly aligned.
  434. unsigned Align = std::max((unsigned)DL.getPrefTypeAlignment(Ty),
  435. Arg->getParamAlignment());
  436. SSL.addObject(Arg, Size, Align, SSC.getFullLiveRange());
  437. }
  438. for (AllocaInst *AI : StaticAllocas) {
  439. Type *Ty = AI->getAllocatedType();
  440. uint64_t Size = getStaticAllocaAllocationSize(AI);
  441. if (Size == 0)
  442. Size = 1; // Don't create zero-sized stack objects.
  443. // Ensure the object is properly aligned.
  444. unsigned Align =
  445. std::max((unsigned)DL.getPrefTypeAlignment(Ty), AI->getAlignment());
  446. SSL.addObject(AI, Size, Align, SSC.getLiveRange(AI));
  447. }
  448. SSL.computeLayout();
  449. unsigned FrameAlignment = SSL.getFrameAlignment();
  450. // FIXME: tell SSL that we start at a less-then-MaxAlignment aligned location
  451. // (AlignmentSkew).
  452. if (FrameAlignment > StackAlignment) {
  453. // Re-align the base pointer according to the max requested alignment.
  454. assert(isPowerOf2_32(FrameAlignment));
  455. IRB.SetInsertPoint(BasePointer->getNextNode());
  456. BasePointer = cast<Instruction>(IRB.CreateIntToPtr(
  457. IRB.CreateAnd(IRB.CreatePtrToInt(BasePointer, IntPtrTy),
  458. ConstantInt::get(IntPtrTy, ~uint64_t(FrameAlignment - 1))),
  459. StackPtrTy));
  460. }
  461. IRB.SetInsertPoint(BasePointer->getNextNode());
  462. if (StackGuardSlot) {
  463. unsigned Offset = SSL.getObjectOffset(StackGuardSlot);
  464. Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8*
  465. ConstantInt::get(Int32Ty, -Offset));
  466. Value *NewAI =
  467. IRB.CreateBitCast(Off, StackGuardSlot->getType(), "StackGuardSlot");
  468. // Replace alloc with the new location.
  469. StackGuardSlot->replaceAllUsesWith(NewAI);
  470. StackGuardSlot->eraseFromParent();
  471. }
  472. for (Argument *Arg : ByValArguments) {
  473. unsigned Offset = SSL.getObjectOffset(Arg);
  474. Type *Ty = Arg->getType()->getPointerElementType();
  475. uint64_t Size = DL.getTypeStoreSize(Ty);
  476. if (Size == 0)
  477. Size = 1; // Don't create zero-sized stack objects.
  478. Value *Off = IRB.CreateGEP(BasePointer, // BasePointer is i8*
  479. ConstantInt::get(Int32Ty, -Offset));
  480. Value *NewArg = IRB.CreateBitCast(Off, Arg->getType(),
  481. Arg->getName() + ".unsafe-byval");
  482. // Replace alloc with the new location.
  483. replaceDbgDeclare(Arg, BasePointer, BasePointer->getNextNode(), DIB,
  484. DIExpression::NoDeref, -Offset, DIExpression::NoDeref);
  485. Arg->replaceAllUsesWith(NewArg);
  486. IRB.SetInsertPoint(cast<Instruction>(NewArg)->getNextNode());
  487. IRB.CreateMemCpy(Off, Arg, Size, Arg->getParamAlignment());
  488. }
  489. // Allocate space for every unsafe static AllocaInst on the unsafe stack.
  490. for (AllocaInst *AI : StaticAllocas) {
  491. IRB.SetInsertPoint(AI);
  492. unsigned Offset = SSL.getObjectOffset(AI);
  493. uint64_t Size = getStaticAllocaAllocationSize(AI);
  494. if (Size == 0)
  495. Size = 1; // Don't create zero-sized stack objects.
  496. replaceDbgDeclareForAlloca(AI, BasePointer, DIB, DIExpression::NoDeref,
  497. -Offset, DIExpression::NoDeref);
  498. replaceDbgValueForAlloca(AI, BasePointer, DIB, -Offset);
  499. // Replace uses of the alloca with the new location.
  500. // Insert address calculation close to each use to work around PR27844.
  501. std::string Name = std::string(AI->getName()) + ".unsafe";
  502. while (!AI->use_empty()) {
  503. Use &U = *AI->use_begin();
  504. Instruction *User = cast<Instruction>(U.getUser());
  505. Instruction *InsertBefore;
  506. if (auto *PHI = dyn_cast<PHINode>(User))
  507. InsertBefore = PHI->getIncomingBlock(U)->getTerminator();
  508. else
  509. InsertBefore = User;
  510. IRBuilder<> IRBUser(InsertBefore);
  511. Value *Off = IRBUser.CreateGEP(BasePointer, // BasePointer is i8*
  512. ConstantInt::get(Int32Ty, -Offset));
  513. Value *Replacement = IRBUser.CreateBitCast(Off, AI->getType(), Name);
  514. if (auto *PHI = dyn_cast<PHINode>(User)) {
  515. // PHI nodes may have multiple incoming edges from the same BB (why??),
  516. // all must be updated at once with the same incoming value.
  517. auto *BB = PHI->getIncomingBlock(U);
  518. for (unsigned I = 0; I < PHI->getNumIncomingValues(); ++I)
  519. if (PHI->getIncomingBlock(I) == BB)
  520. PHI->setIncomingValue(I, Replacement);
  521. } else {
  522. U.set(Replacement);
  523. }
  524. }
  525. AI->eraseFromParent();
  526. }
  527. // Re-align BasePointer so that our callees would see it aligned as
  528. // expected.
  529. // FIXME: no need to update BasePointer in leaf functions.
  530. unsigned FrameSize = alignTo(SSL.getFrameSize(), StackAlignment);
  531. // Update shadow stack pointer in the function epilogue.
  532. IRB.SetInsertPoint(BasePointer->getNextNode());
  533. Value *StaticTop =
  534. IRB.CreateGEP(BasePointer, ConstantInt::get(Int32Ty, -FrameSize),
  535. "unsafe_stack_static_top");
  536. IRB.CreateStore(StaticTop, UnsafeStackPtr);
  537. return StaticTop;
  538. }
  539. void SafeStack::moveDynamicAllocasToUnsafeStack(
  540. Function &F, Value *UnsafeStackPtr, AllocaInst *DynamicTop,
  541. ArrayRef<AllocaInst *> DynamicAllocas) {
  542. DIBuilder DIB(*F.getParent());
  543. for (AllocaInst *AI : DynamicAllocas) {
  544. IRBuilder<> IRB(AI);
  545. // Compute the new SP value (after AI).
  546. Value *ArraySize = AI->getArraySize();
  547. if (ArraySize->getType() != IntPtrTy)
  548. ArraySize = IRB.CreateIntCast(ArraySize, IntPtrTy, false);
  549. Type *Ty = AI->getAllocatedType();
  550. uint64_t TySize = DL.getTypeAllocSize(Ty);
  551. Value *Size = IRB.CreateMul(ArraySize, ConstantInt::get(IntPtrTy, TySize));
  552. Value *SP = IRB.CreatePtrToInt(IRB.CreateLoad(UnsafeStackPtr), IntPtrTy);
  553. SP = IRB.CreateSub(SP, Size);
  554. // Align the SP value to satisfy the AllocaInst, type and stack alignments.
  555. unsigned Align = std::max(
  556. std::max((unsigned)DL.getPrefTypeAlignment(Ty), AI->getAlignment()),
  557. (unsigned)StackAlignment);
  558. assert(isPowerOf2_32(Align));
  559. Value *NewTop = IRB.CreateIntToPtr(
  560. IRB.CreateAnd(SP, ConstantInt::get(IntPtrTy, ~uint64_t(Align - 1))),
  561. StackPtrTy);
  562. // Save the stack pointer.
  563. IRB.CreateStore(NewTop, UnsafeStackPtr);
  564. if (DynamicTop)
  565. IRB.CreateStore(NewTop, DynamicTop);
  566. Value *NewAI = IRB.CreatePointerCast(NewTop, AI->getType());
  567. if (AI->hasName() && isa<Instruction>(NewAI))
  568. NewAI->takeName(AI);
  569. replaceDbgDeclareForAlloca(AI, NewAI, DIB, DIExpression::NoDeref, 0,
  570. DIExpression::NoDeref);
  571. AI->replaceAllUsesWith(NewAI);
  572. AI->eraseFromParent();
  573. }
  574. if (!DynamicAllocas.empty()) {
  575. // Now go through the instructions again, replacing stacksave/stackrestore.
  576. for (inst_iterator It = inst_begin(&F), Ie = inst_end(&F); It != Ie;) {
  577. Instruction *I = &*(It++);
  578. auto II = dyn_cast<IntrinsicInst>(I);
  579. if (!II)
  580. continue;
  581. if (II->getIntrinsicID() == Intrinsic::stacksave) {
  582. IRBuilder<> IRB(II);
  583. Instruction *LI = IRB.CreateLoad(UnsafeStackPtr);
  584. LI->takeName(II);
  585. II->replaceAllUsesWith(LI);
  586. II->eraseFromParent();
  587. } else if (II->getIntrinsicID() == Intrinsic::stackrestore) {
  588. IRBuilder<> IRB(II);
  589. Instruction *SI = IRB.CreateStore(II->getArgOperand(0), UnsafeStackPtr);
  590. SI->takeName(II);
  591. assert(II->use_empty());
  592. II->eraseFromParent();
  593. }
  594. }
  595. }
  596. }
  597. bool SafeStack::run() {
  598. assert(F.hasFnAttribute(Attribute::SafeStack) &&
  599. "Can't run SafeStack on a function without the attribute");
  600. assert(!F.isDeclaration() && "Can't run SafeStack on a function declaration");
  601. ++NumFunctions;
  602. SmallVector<AllocaInst *, 16> StaticAllocas;
  603. SmallVector<AllocaInst *, 4> DynamicAllocas;
  604. SmallVector<Argument *, 4> ByValArguments;
  605. SmallVector<ReturnInst *, 4> Returns;
  606. // Collect all points where stack gets unwound and needs to be restored
  607. // This is only necessary because the runtime (setjmp and unwind code) is
  608. // not aware of the unsafe stack and won't unwind/restore it properly.
  609. // To work around this problem without changing the runtime, we insert
  610. // instrumentation to restore the unsafe stack pointer when necessary.
  611. SmallVector<Instruction *, 4> StackRestorePoints;
  612. // Find all static and dynamic alloca instructions that must be moved to the
  613. // unsafe stack, all return instructions and stack restore points.
  614. findInsts(F, StaticAllocas, DynamicAllocas, ByValArguments, Returns,
  615. StackRestorePoints);
  616. if (StaticAllocas.empty() && DynamicAllocas.empty() &&
  617. ByValArguments.empty() && StackRestorePoints.empty())
  618. return false; // Nothing to do in this function.
  619. if (!StaticAllocas.empty() || !DynamicAllocas.empty() ||
  620. !ByValArguments.empty())
  621. ++NumUnsafeStackFunctions; // This function has the unsafe stack.
  622. if (!StackRestorePoints.empty())
  623. ++NumUnsafeStackRestorePointsFunctions;
  624. IRBuilder<> IRB(&F.front(), F.begin()->getFirstInsertionPt());
  625. UnsafeStackPtr = TL.getSafeStackPointerLocation(IRB);
  626. // Load the current stack pointer (we'll also use it as a base pointer).
  627. // FIXME: use a dedicated register for it ?
  628. Instruction *BasePointer =
  629. IRB.CreateLoad(UnsafeStackPtr, false, "unsafe_stack_ptr");
  630. assert(BasePointer->getType() == StackPtrTy);
  631. AllocaInst *StackGuardSlot = nullptr;
  632. // FIXME: implement weaker forms of stack protector.
  633. if (F.hasFnAttribute(Attribute::StackProtect) ||
  634. F.hasFnAttribute(Attribute::StackProtectStrong) ||
  635. F.hasFnAttribute(Attribute::StackProtectReq)) {
  636. Value *StackGuard = getStackGuard(IRB, F);
  637. StackGuardSlot = IRB.CreateAlloca(StackPtrTy, nullptr);
  638. IRB.CreateStore(StackGuard, StackGuardSlot);
  639. for (ReturnInst *RI : Returns) {
  640. IRBuilder<> IRBRet(RI);
  641. checkStackGuard(IRBRet, F, *RI, StackGuardSlot, StackGuard);
  642. }
  643. }
  644. // The top of the unsafe stack after all unsafe static allocas are
  645. // allocated.
  646. Value *StaticTop =
  647. moveStaticAllocasToUnsafeStack(IRB, F, StaticAllocas, ByValArguments,
  648. Returns, BasePointer, StackGuardSlot);
  649. // Safe stack object that stores the current unsafe stack top. It is updated
  650. // as unsafe dynamic (non-constant-sized) allocas are allocated and freed.
  651. // This is only needed if we need to restore stack pointer after longjmp
  652. // or exceptions, and we have dynamic allocations.
  653. // FIXME: a better alternative might be to store the unsafe stack pointer
  654. // before setjmp / invoke instructions.
  655. AllocaInst *DynamicTop = createStackRestorePoints(
  656. IRB, F, StackRestorePoints, StaticTop, !DynamicAllocas.empty());
  657. // Handle dynamic allocas.
  658. moveDynamicAllocasToUnsafeStack(F, UnsafeStackPtr, DynamicTop,
  659. DynamicAllocas);
  660. // Restore the unsafe stack pointer before each return.
  661. for (ReturnInst *RI : Returns) {
  662. IRB.SetInsertPoint(RI);
  663. IRB.CreateStore(BasePointer, UnsafeStackPtr);
  664. }
  665. DEBUG(dbgs() << "[SafeStack] safestack applied\n");
  666. return true;
  667. }
  668. class SafeStackLegacyPass : public FunctionPass {
  669. const TargetMachine *TM = nullptr;
  670. public:
  671. static char ID; // Pass identification, replacement for typeid..
  672. SafeStackLegacyPass() : FunctionPass(ID) {
  673. initializeSafeStackLegacyPassPass(*PassRegistry::getPassRegistry());
  674. }
  675. void getAnalysisUsage(AnalysisUsage &AU) const override {
  676. AU.addRequired<TargetPassConfig>();
  677. AU.addRequired<TargetLibraryInfoWrapperPass>();
  678. AU.addRequired<AssumptionCacheTracker>();
  679. }
  680. bool runOnFunction(Function &F) override {
  681. DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n");
  682. if (!F.hasFnAttribute(Attribute::SafeStack)) {
  683. DEBUG(dbgs() << "[SafeStack] safestack is not requested"
  684. " for this function\n");
  685. return false;
  686. }
  687. if (F.isDeclaration()) {
  688. DEBUG(dbgs() << "[SafeStack] function definition"
  689. " is not available\n");
  690. return false;
  691. }
  692. TM = &getAnalysis<TargetPassConfig>().getTM<TargetMachine>();
  693. auto *TL = TM->getSubtargetImpl(F)->getTargetLowering();
  694. if (!TL)
  695. report_fatal_error("TargetLowering instance is required");
  696. auto *DL = &F.getParent()->getDataLayout();
  697. auto &TLI = getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
  698. auto &ACT = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
  699. // Compute DT and LI only for functions that have the attribute.
  700. // This is only useful because the legacy pass manager doesn't let us
  701. // compute analyzes lazily.
  702. // In the backend pipeline, nothing preserves DT before SafeStack, so we
  703. // would otherwise always compute it wastefully, even if there is no
  704. // function with the safestack attribute.
  705. DominatorTree DT(F);
  706. LoopInfo LI(DT);
  707. ScalarEvolution SE(F, TLI, ACT, DT, LI);
  708. return SafeStack(F, *TL, *DL, SE).run();
  709. }
  710. };
  711. } // end anonymous namespace
  712. char SafeStackLegacyPass::ID = 0;
  713. INITIALIZE_PASS_BEGIN(SafeStackLegacyPass, DEBUG_TYPE,
  714. "Safe Stack instrumentation pass", false, false)
  715. INITIALIZE_PASS_DEPENDENCY(TargetPassConfig)
  716. INITIALIZE_PASS_END(SafeStackLegacyPass, DEBUG_TYPE,
  717. "Safe Stack instrumentation pass", false, false)
  718. FunctionPass *llvm::createSafeStackPass() { return new SafeStackLegacyPass(); }