StackAddrEscapeChecker.cpp 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371
  1. //=== StackAddrEscapeChecker.cpp ----------------------------------*- C++ -*--//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines stack address leak checker, which checks if an invalid
  11. // stack address is stored into a global or heap location. See CERT DCL30-C.
  12. //
  13. //===----------------------------------------------------------------------===//
  14. #include "ClangSACheckers.h"
  15. #include "clang/AST/ExprCXX.h"
  16. #include "clang/Basic/SourceManager.h"
  17. #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
  18. #include "clang/StaticAnalyzer/Core/Checker.h"
  19. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  20. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  21. #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
  22. #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
  23. #include "llvm/ADT/SmallString.h"
  24. #include "llvm/Support/raw_ostream.h"
  25. using namespace clang;
  26. using namespace ento;
  27. namespace {
  28. class StackAddrEscapeChecker
  29. : public Checker<check::PreCall, check::PreStmt<ReturnStmt>,
  30. check::EndFunction> {
  31. mutable IdentifierInfo *dispatch_semaphore_tII;
  32. mutable std::unique_ptr<BuiltinBug> BT_stackleak;
  33. mutable std::unique_ptr<BuiltinBug> BT_returnstack;
  34. mutable std::unique_ptr<BuiltinBug> BT_capturedstackasync;
  35. mutable std::unique_ptr<BuiltinBug> BT_capturedstackret;
  36. public:
  37. enum CheckKind {
  38. CK_StackAddrEscapeChecker,
  39. CK_StackAddrAsyncEscapeChecker,
  40. CK_NumCheckKinds
  41. };
  42. DefaultBool ChecksEnabled[CK_NumCheckKinds];
  43. void checkPreCall(const CallEvent &Call, CheckerContext &C) const;
  44. void checkPreStmt(const ReturnStmt *RS, CheckerContext &C) const;
  45. void checkEndFunction(CheckerContext &Ctx) const;
  46. private:
  47. void checkReturnedBlockCaptures(const BlockDataRegion &B,
  48. CheckerContext &C) const;
  49. void checkAsyncExecutedBlockCaptures(const BlockDataRegion &B,
  50. CheckerContext &C) const;
  51. void EmitStackError(CheckerContext &C, const MemRegion *R,
  52. const Expr *RetE) const;
  53. bool isSemaphoreCaptured(const BlockDecl &B) const;
  54. static SourceRange genName(raw_ostream &os, const MemRegion *R,
  55. ASTContext &Ctx);
  56. static SmallVector<const MemRegion *, 4>
  57. getCapturedStackRegions(const BlockDataRegion &B, CheckerContext &C);
  58. static bool isArcManagedBlock(const MemRegion *R, CheckerContext &C);
  59. static bool isNotInCurrentFrame(const MemRegion *R, CheckerContext &C);
  60. };
  61. } // namespace
  62. SourceRange StackAddrEscapeChecker::genName(raw_ostream &os, const MemRegion *R,
  63. ASTContext &Ctx) {
  64. // Get the base region, stripping away fields and elements.
  65. R = R->getBaseRegion();
  66. SourceManager &SM = Ctx.getSourceManager();
  67. SourceRange range;
  68. os << "Address of ";
  69. // Check if the region is a compound literal.
  70. if (const auto *CR = dyn_cast<CompoundLiteralRegion>(R)) {
  71. const CompoundLiteralExpr *CL = CR->getLiteralExpr();
  72. os << "stack memory associated with a compound literal "
  73. "declared on line "
  74. << SM.getExpansionLineNumber(CL->getLocStart()) << " returned to caller";
  75. range = CL->getSourceRange();
  76. } else if (const auto *AR = dyn_cast<AllocaRegion>(R)) {
  77. const Expr *ARE = AR->getExpr();
  78. SourceLocation L = ARE->getLocStart();
  79. range = ARE->getSourceRange();
  80. os << "stack memory allocated by call to alloca() on line "
  81. << SM.getExpansionLineNumber(L);
  82. } else if (const auto *BR = dyn_cast<BlockDataRegion>(R)) {
  83. const BlockDecl *BD = BR->getCodeRegion()->getDecl();
  84. SourceLocation L = BD->getLocStart();
  85. range = BD->getSourceRange();
  86. os << "stack-allocated block declared on line "
  87. << SM.getExpansionLineNumber(L);
  88. } else if (const auto *VR = dyn_cast<VarRegion>(R)) {
  89. os << "stack memory associated with local variable '" << VR->getString()
  90. << '\'';
  91. range = VR->getDecl()->getSourceRange();
  92. } else if (const auto *TOR = dyn_cast<CXXTempObjectRegion>(R)) {
  93. QualType Ty = TOR->getValueType().getLocalUnqualifiedType();
  94. os << "stack memory associated with temporary object of type '";
  95. Ty.print(os, Ctx.getPrintingPolicy());
  96. os << "'";
  97. range = TOR->getExpr()->getSourceRange();
  98. } else {
  99. llvm_unreachable("Invalid region in ReturnStackAddressChecker.");
  100. }
  101. return range;
  102. }
  103. bool StackAddrEscapeChecker::isArcManagedBlock(const MemRegion *R,
  104. CheckerContext &C) {
  105. assert(R && "MemRegion should not be null");
  106. return C.getASTContext().getLangOpts().ObjCAutoRefCount &&
  107. isa<BlockDataRegion>(R);
  108. }
  109. bool StackAddrEscapeChecker::isNotInCurrentFrame(const MemRegion *R,
  110. CheckerContext &C) {
  111. const StackSpaceRegion *S = cast<StackSpaceRegion>(R->getMemorySpace());
  112. return S->getStackFrame() != C.getLocationContext()->getCurrentStackFrame();
  113. }
  114. bool StackAddrEscapeChecker::isSemaphoreCaptured(const BlockDecl &B) const {
  115. if (!dispatch_semaphore_tII)
  116. dispatch_semaphore_tII = &B.getASTContext().Idents.get("dispatch_semaphore_t");
  117. for (const auto &C : B.captures()) {
  118. const auto *T = C.getVariable()->getType()->getAs<TypedefType>();
  119. if (T && T->getDecl()->getIdentifier() == dispatch_semaphore_tII)
  120. return true;
  121. }
  122. return false;
  123. }
  124. SmallVector<const MemRegion *, 4>
  125. StackAddrEscapeChecker::getCapturedStackRegions(const BlockDataRegion &B,
  126. CheckerContext &C) {
  127. SmallVector<const MemRegion *, 4> Regions;
  128. BlockDataRegion::referenced_vars_iterator I = B.referenced_vars_begin();
  129. BlockDataRegion::referenced_vars_iterator E = B.referenced_vars_end();
  130. for (; I != E; ++I) {
  131. SVal Val = C.getState()->getSVal(I.getCapturedRegion());
  132. const MemRegion *Region = Val.getAsRegion();
  133. if (Region && isa<StackSpaceRegion>(Region->getMemorySpace()))
  134. Regions.push_back(Region);
  135. }
  136. return Regions;
  137. }
  138. void StackAddrEscapeChecker::EmitStackError(CheckerContext &C,
  139. const MemRegion *R,
  140. const Expr *RetE) const {
  141. ExplodedNode *N = C.generateNonFatalErrorNode();
  142. if (!N)
  143. return;
  144. if (!BT_returnstack)
  145. BT_returnstack = llvm::make_unique<BuiltinBug>(
  146. this, "Return of address to stack-allocated memory");
  147. // Generate a report for this bug.
  148. SmallString<128> buf;
  149. llvm::raw_svector_ostream os(buf);
  150. SourceRange range = genName(os, R, C.getASTContext());
  151. os << " returned to caller";
  152. auto report = llvm::make_unique<BugReport>(*BT_returnstack, os.str(), N);
  153. report->addRange(RetE->getSourceRange());
  154. if (range.isValid())
  155. report->addRange(range);
  156. C.emitReport(std::move(report));
  157. }
  158. void StackAddrEscapeChecker::checkAsyncExecutedBlockCaptures(
  159. const BlockDataRegion &B, CheckerContext &C) const {
  160. // There is a not-too-uncommon idiom
  161. // where a block passed to dispatch_async captures a semaphore
  162. // and then the thread (which called dispatch_async) is blocked on waiting
  163. // for the completion of the execution of the block
  164. // via dispatch_semaphore_wait. To avoid false-positives (for now)
  165. // we ignore all the blocks which have captured
  166. // a variable of the type "dispatch_semaphore_t".
  167. if (isSemaphoreCaptured(*B.getDecl()))
  168. return;
  169. for (const MemRegion *Region : getCapturedStackRegions(B, C)) {
  170. // The block passed to dispatch_async may capture another block
  171. // created on the stack. However, there is no leak in this situaton,
  172. // no matter if ARC or no ARC is enabled:
  173. // dispatch_async copies the passed "outer" block (via Block_copy)
  174. // and if the block has captured another "inner" block,
  175. // the "inner" block will be copied as well.
  176. if (isa<BlockDataRegion>(Region))
  177. continue;
  178. ExplodedNode *N = C.generateNonFatalErrorNode();
  179. if (!N)
  180. continue;
  181. if (!BT_capturedstackasync)
  182. BT_capturedstackasync = llvm::make_unique<BuiltinBug>(
  183. this, "Address of stack-allocated memory is captured");
  184. SmallString<128> Buf;
  185. llvm::raw_svector_ostream Out(Buf);
  186. SourceRange Range = genName(Out, Region, C.getASTContext());
  187. Out << " is captured by an asynchronously-executed block";
  188. auto Report =
  189. llvm::make_unique<BugReport>(*BT_capturedstackasync, Out.str(), N);
  190. if (Range.isValid())
  191. Report->addRange(Range);
  192. C.emitReport(std::move(Report));
  193. }
  194. }
  195. void StackAddrEscapeChecker::checkReturnedBlockCaptures(
  196. const BlockDataRegion &B, CheckerContext &C) const {
  197. for (const MemRegion *Region : getCapturedStackRegions(B, C)) {
  198. if (isArcManagedBlock(Region, C) || isNotInCurrentFrame(Region, C))
  199. continue;
  200. ExplodedNode *N = C.generateNonFatalErrorNode();
  201. if (!N)
  202. continue;
  203. if (!BT_capturedstackret)
  204. BT_capturedstackret = llvm::make_unique<BuiltinBug>(
  205. this, "Address of stack-allocated memory is captured");
  206. SmallString<128> Buf;
  207. llvm::raw_svector_ostream Out(Buf);
  208. SourceRange Range = genName(Out, Region, C.getASTContext());
  209. Out << " is captured by a returned block";
  210. auto Report =
  211. llvm::make_unique<BugReport>(*BT_capturedstackret, Out.str(), N);
  212. if (Range.isValid())
  213. Report->addRange(Range);
  214. C.emitReport(std::move(Report));
  215. }
  216. }
  217. void StackAddrEscapeChecker::checkPreCall(const CallEvent &Call,
  218. CheckerContext &C) const {
  219. if (!ChecksEnabled[CK_StackAddrAsyncEscapeChecker])
  220. return;
  221. if (!Call.isGlobalCFunction("dispatch_after") &&
  222. !Call.isGlobalCFunction("dispatch_async"))
  223. return;
  224. for (unsigned Idx = 0, NumArgs = Call.getNumArgs(); Idx < NumArgs; ++Idx) {
  225. if (const BlockDataRegion *B = dyn_cast_or_null<BlockDataRegion>(
  226. Call.getArgSVal(Idx).getAsRegion()))
  227. checkAsyncExecutedBlockCaptures(*B, C);
  228. }
  229. }
  230. void StackAddrEscapeChecker::checkPreStmt(const ReturnStmt *RS,
  231. CheckerContext &C) const {
  232. if (!ChecksEnabled[CK_StackAddrEscapeChecker])
  233. return;
  234. const Expr *RetE = RS->getRetValue();
  235. if (!RetE)
  236. return;
  237. RetE = RetE->IgnoreParens();
  238. SVal V = C.getSVal(RetE);
  239. const MemRegion *R = V.getAsRegion();
  240. if (!R)
  241. return;
  242. if (const BlockDataRegion *B = dyn_cast<BlockDataRegion>(R))
  243. checkReturnedBlockCaptures(*B, C);
  244. if (!isa<StackSpaceRegion>(R->getMemorySpace()) ||
  245. isNotInCurrentFrame(R, C) || isArcManagedBlock(R, C))
  246. return;
  247. // Returning a record by value is fine. (In this case, the returned
  248. // expression will be a copy-constructor, possibly wrapped in an
  249. // ExprWithCleanups node.)
  250. if (const ExprWithCleanups *Cleanup = dyn_cast<ExprWithCleanups>(RetE))
  251. RetE = Cleanup->getSubExpr();
  252. if (isa<CXXConstructExpr>(RetE) && RetE->getType()->isRecordType())
  253. return;
  254. // The CK_CopyAndAutoreleaseBlockObject cast causes the block to be copied
  255. // so the stack address is not escaping here.
  256. if (auto *ICE = dyn_cast<ImplicitCastExpr>(RetE)) {
  257. if (isa<BlockDataRegion>(R) &&
  258. ICE->getCastKind() == CK_CopyAndAutoreleaseBlockObject) {
  259. return;
  260. }
  261. }
  262. EmitStackError(C, R, RetE);
  263. }
  264. void StackAddrEscapeChecker::checkEndFunction(CheckerContext &Ctx) const {
  265. if (!ChecksEnabled[CK_StackAddrEscapeChecker])
  266. return;
  267. ProgramStateRef State = Ctx.getState();
  268. // Iterate over all bindings to global variables and see if it contains
  269. // a memory region in the stack space.
  270. class CallBack : public StoreManager::BindingsHandler {
  271. private:
  272. CheckerContext &Ctx;
  273. const StackFrameContext *CurSFC;
  274. public:
  275. SmallVector<std::pair<const MemRegion *, const MemRegion *>, 10> V;
  276. CallBack(CheckerContext &CC)
  277. : Ctx(CC), CurSFC(CC.getLocationContext()->getCurrentStackFrame()) {}
  278. bool HandleBinding(StoreManager &SMgr, Store S, const MemRegion *Region,
  279. SVal Val) override {
  280. if (!isa<GlobalsSpaceRegion>(Region->getMemorySpace()))
  281. return true;
  282. const MemRegion *VR = Val.getAsRegion();
  283. if (VR && isa<StackSpaceRegion>(VR->getMemorySpace()) &&
  284. !isArcManagedBlock(VR, Ctx) && !isNotInCurrentFrame(VR, Ctx))
  285. V.emplace_back(Region, VR);
  286. return true;
  287. }
  288. };
  289. CallBack Cb(Ctx);
  290. State->getStateManager().getStoreManager().iterBindings(State->getStore(),
  291. Cb);
  292. if (Cb.V.empty())
  293. return;
  294. // Generate an error node.
  295. ExplodedNode *N = Ctx.generateNonFatalErrorNode(State);
  296. if (!N)
  297. return;
  298. if (!BT_stackleak)
  299. BT_stackleak = llvm::make_unique<BuiltinBug>(
  300. this, "Stack address stored into global variable",
  301. "Stack address was saved into a global variable. "
  302. "This is dangerous because the address will become "
  303. "invalid after returning from the function");
  304. for (const auto &P : Cb.V) {
  305. // Generate a report for this bug.
  306. SmallString<128> Buf;
  307. llvm::raw_svector_ostream Out(Buf);
  308. SourceRange Range = genName(Out, P.second, Ctx.getASTContext());
  309. Out << " is still referred to by the ";
  310. if (isa<StaticGlobalSpaceRegion>(P.first->getMemorySpace()))
  311. Out << "static";
  312. else
  313. Out << "global";
  314. Out << " variable '";
  315. const VarRegion *VR = cast<VarRegion>(P.first->getBaseRegion());
  316. Out << *VR->getDecl()
  317. << "' upon returning to the caller. This will be a dangling reference";
  318. auto Report = llvm::make_unique<BugReport>(*BT_stackleak, Out.str(), N);
  319. if (Range.isValid())
  320. Report->addRange(Range);
  321. Ctx.emitReport(std::move(Report));
  322. }
  323. }
  324. #define REGISTER_CHECKER(name) \
  325. void ento::register##name(CheckerManager &Mgr) { \
  326. StackAddrEscapeChecker *Chk = \
  327. Mgr.registerChecker<StackAddrEscapeChecker>(); \
  328. Chk->ChecksEnabled[StackAddrEscapeChecker::CK_##name] = true; \
  329. }
  330. REGISTER_CHECKER(StackAddrEscapeChecker)
  331. REGISTER_CHECKER(StackAddrAsyncEscapeChecker)