Lint.cpp 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720
  1. //===-- Lint.cpp - Check for common errors in LLVM IR ---------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This pass statically checks for common and easily-identified constructs
  11. // which produce undefined or likely unintended behavior in LLVM IR.
  12. //
  13. // It is not a guarantee of correctness, in two ways. First, it isn't
  14. // comprehensive. There are checks which could be done statically which are
  15. // not yet implemented. Some of these are indicated by TODO comments, but
  16. // those aren't comprehensive either. Second, many conditions cannot be
  17. // checked statically. This pass does no dynamic instrumentation, so it
  18. // can't check for all possible problems.
  19. //
  20. // Another limitation is that it assumes all code will be executed. A store
  21. // through a null pointer in a basic block which is never reached is harmless,
  22. // but this pass will warn about it anyway. This is the main reason why most
  23. // of these checks live here instead of in the Verifier pass.
  24. //
  25. // Optimization passes may make conditions that this pass checks for more or
  26. // less obvious. If an optimization pass appears to be introducing a warning,
  27. // it may be that the optimization pass is merely exposing an existing
  28. // condition in the code.
  29. //
  30. // This code may be run before instcombine. In many cases, instcombine checks
  31. // for the same kinds of things and turns instructions with undefined behavior
  32. // into unreachable (or equivalent). Because of this, this pass makes some
  33. // effort to look through bitcasts and so on.
  34. //
  35. //===----------------------------------------------------------------------===//
  36. #include "llvm/Analysis/Lint.h"
  37. #include "llvm/ADT/STLExtras.h"
  38. #include "llvm/Analysis/AliasAnalysis.h"
  39. #include "llvm/Analysis/ConstantFolding.h"
  40. #include "llvm/Analysis/InstructionSimplify.h"
  41. #include "llvm/Analysis/Loads.h"
  42. #include "llvm/Analysis/Passes.h"
  43. #include "llvm/Analysis/ValueTracking.h"
  44. #include "llvm/IR/DataLayout.h"
  45. #include "llvm/IR/Dominators.h"
  46. #include "llvm/IR/Function.h"
  47. #include "llvm/IR/IntrinsicInst.h"
  48. #include "llvm/InstVisitor.h"
  49. #include "llvm/Pass.h"
  50. #include "llvm/PassManager.h"
  51. #include "llvm/Support/CallSite.h"
  52. #include "llvm/Support/Debug.h"
  53. #include "llvm/Support/raw_ostream.h"
  54. #include "llvm/Target/TargetLibraryInfo.h"
  55. using namespace llvm;
  56. namespace {
  57. namespace MemRef {
  58. static unsigned Read = 1;
  59. static unsigned Write = 2;
  60. static unsigned Callee = 4;
  61. static unsigned Branchee = 8;
  62. }
  63. class Lint : public FunctionPass, public InstVisitor<Lint> {
  64. friend class InstVisitor<Lint>;
  65. void visitFunction(Function &F);
  66. void visitCallSite(CallSite CS);
  67. void visitMemoryReference(Instruction &I, Value *Ptr,
  68. uint64_t Size, unsigned Align,
  69. Type *Ty, unsigned Flags);
  70. void visitCallInst(CallInst &I);
  71. void visitInvokeInst(InvokeInst &I);
  72. void visitReturnInst(ReturnInst &I);
  73. void visitLoadInst(LoadInst &I);
  74. void visitStoreInst(StoreInst &I);
  75. void visitXor(BinaryOperator &I);
  76. void visitSub(BinaryOperator &I);
  77. void visitLShr(BinaryOperator &I);
  78. void visitAShr(BinaryOperator &I);
  79. void visitShl(BinaryOperator &I);
  80. void visitSDiv(BinaryOperator &I);
  81. void visitUDiv(BinaryOperator &I);
  82. void visitSRem(BinaryOperator &I);
  83. void visitURem(BinaryOperator &I);
  84. void visitAllocaInst(AllocaInst &I);
  85. void visitVAArgInst(VAArgInst &I);
  86. void visitIndirectBrInst(IndirectBrInst &I);
  87. void visitExtractElementInst(ExtractElementInst &I);
  88. void visitInsertElementInst(InsertElementInst &I);
  89. void visitUnreachableInst(UnreachableInst &I);
  90. Value *findValue(Value *V, bool OffsetOk) const;
  91. Value *findValueImpl(Value *V, bool OffsetOk,
  92. SmallPtrSet<Value *, 4> &Visited) const;
  93. public:
  94. Module *Mod;
  95. AliasAnalysis *AA;
  96. DominatorTree *DT;
  97. const DataLayout *DL;
  98. TargetLibraryInfo *TLI;
  99. std::string Messages;
  100. raw_string_ostream MessagesStr;
  101. static char ID; // Pass identification, replacement for typeid
  102. Lint() : FunctionPass(ID), MessagesStr(Messages) {
  103. initializeLintPass(*PassRegistry::getPassRegistry());
  104. }
  105. virtual bool runOnFunction(Function &F);
  106. virtual void getAnalysisUsage(AnalysisUsage &AU) const {
  107. AU.setPreservesAll();
  108. AU.addRequired<AliasAnalysis>();
  109. AU.addRequired<TargetLibraryInfo>();
  110. AU.addRequired<DominatorTreeWrapperPass>();
  111. }
  112. virtual void print(raw_ostream &O, const Module *M) const {}
  113. void WriteValue(const Value *V) {
  114. if (!V) return;
  115. if (isa<Instruction>(V)) {
  116. MessagesStr << *V << '\n';
  117. } else {
  118. V->printAsOperand(MessagesStr, true, Mod);
  119. MessagesStr << '\n';
  120. }
  121. }
  122. // CheckFailed - A check failed, so print out the condition and the message
  123. // that failed. This provides a nice place to put a breakpoint if you want
  124. // to see why something is not correct.
  125. void CheckFailed(const Twine &Message,
  126. const Value *V1 = 0, const Value *V2 = 0,
  127. const Value *V3 = 0, const Value *V4 = 0) {
  128. MessagesStr << Message.str() << "\n";
  129. WriteValue(V1);
  130. WriteValue(V2);
  131. WriteValue(V3);
  132. WriteValue(V4);
  133. }
  134. };
  135. }
  136. char Lint::ID = 0;
  137. INITIALIZE_PASS_BEGIN(Lint, "lint", "Statically lint-checks LLVM IR",
  138. false, true)
  139. INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfo)
  140. INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
  141. INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
  142. INITIALIZE_PASS_END(Lint, "lint", "Statically lint-checks LLVM IR",
  143. false, true)
  144. // Assert - We know that cond should be true, if not print an error message.
  145. #define Assert(C, M) \
  146. do { if (!(C)) { CheckFailed(M); return; } } while (0)
  147. #define Assert1(C, M, V1) \
  148. do { if (!(C)) { CheckFailed(M, V1); return; } } while (0)
  149. #define Assert2(C, M, V1, V2) \
  150. do { if (!(C)) { CheckFailed(M, V1, V2); return; } } while (0)
  151. #define Assert3(C, M, V1, V2, V3) \
  152. do { if (!(C)) { CheckFailed(M, V1, V2, V3); return; } } while (0)
  153. #define Assert4(C, M, V1, V2, V3, V4) \
  154. do { if (!(C)) { CheckFailed(M, V1, V2, V3, V4); return; } } while (0)
  155. // Lint::run - This is the main Analysis entry point for a
  156. // function.
  157. //
  158. bool Lint::runOnFunction(Function &F) {
  159. Mod = F.getParent();
  160. AA = &getAnalysis<AliasAnalysis>();
  161. DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
  162. DataLayoutPass *DLP = getAnalysisIfAvailable<DataLayoutPass>();
  163. DL = DLP ? &DLP->getDataLayout() : 0;
  164. TLI = &getAnalysis<TargetLibraryInfo>();
  165. visit(F);
  166. dbgs() << MessagesStr.str();
  167. Messages.clear();
  168. return false;
  169. }
  170. void Lint::visitFunction(Function &F) {
  171. // This isn't undefined behavior, it's just a little unusual, and it's a
  172. // fairly common mistake to neglect to name a function.
  173. Assert1(F.hasName() || F.hasLocalLinkage(),
  174. "Unusual: Unnamed function with non-local linkage", &F);
  175. // TODO: Check for irreducible control flow.
  176. }
  177. void Lint::visitCallSite(CallSite CS) {
  178. Instruction &I = *CS.getInstruction();
  179. Value *Callee = CS.getCalledValue();
  180. visitMemoryReference(I, Callee, AliasAnalysis::UnknownSize,
  181. 0, 0, MemRef::Callee);
  182. if (Function *F = dyn_cast<Function>(findValue(Callee, /*OffsetOk=*/false))) {
  183. Assert1(CS.getCallingConv() == F->getCallingConv(),
  184. "Undefined behavior: Caller and callee calling convention differ",
  185. &I);
  186. FunctionType *FT = F->getFunctionType();
  187. unsigned NumActualArgs = CS.arg_size();
  188. Assert1(FT->isVarArg() ?
  189. FT->getNumParams() <= NumActualArgs :
  190. FT->getNumParams() == NumActualArgs,
  191. "Undefined behavior: Call argument count mismatches callee "
  192. "argument count", &I);
  193. Assert1(FT->getReturnType() == I.getType(),
  194. "Undefined behavior: Call return type mismatches "
  195. "callee return type", &I);
  196. // Check argument types (in case the callee was casted) and attributes.
  197. // TODO: Verify that caller and callee attributes are compatible.
  198. Function::arg_iterator PI = F->arg_begin(), PE = F->arg_end();
  199. CallSite::arg_iterator AI = CS.arg_begin(), AE = CS.arg_end();
  200. for (; AI != AE; ++AI) {
  201. Value *Actual = *AI;
  202. if (PI != PE) {
  203. Argument *Formal = PI++;
  204. Assert1(Formal->getType() == Actual->getType(),
  205. "Undefined behavior: Call argument type mismatches "
  206. "callee parameter type", &I);
  207. // Check that noalias arguments don't alias other arguments. This is
  208. // not fully precise because we don't know the sizes of the dereferenced
  209. // memory regions.
  210. if (Formal->hasNoAliasAttr() && Actual->getType()->isPointerTy())
  211. for (CallSite::arg_iterator BI = CS.arg_begin(); BI != AE; ++BI)
  212. if (AI != BI && (*BI)->getType()->isPointerTy()) {
  213. AliasAnalysis::AliasResult Result = AA->alias(*AI, *BI);
  214. Assert1(Result != AliasAnalysis::MustAlias &&
  215. Result != AliasAnalysis::PartialAlias,
  216. "Unusual: noalias argument aliases another argument", &I);
  217. }
  218. // Check that an sret argument points to valid memory.
  219. if (Formal->hasStructRetAttr() && Actual->getType()->isPointerTy()) {
  220. Type *Ty =
  221. cast<PointerType>(Formal->getType())->getElementType();
  222. visitMemoryReference(I, Actual, AA->getTypeStoreSize(Ty),
  223. DL ? DL->getABITypeAlignment(Ty) : 0,
  224. Ty, MemRef::Read | MemRef::Write);
  225. }
  226. }
  227. }
  228. }
  229. if (CS.isCall() && cast<CallInst>(CS.getInstruction())->isTailCall())
  230. for (CallSite::arg_iterator AI = CS.arg_begin(), AE = CS.arg_end();
  231. AI != AE; ++AI) {
  232. Value *Obj = findValue(*AI, /*OffsetOk=*/true);
  233. Assert1(!isa<AllocaInst>(Obj),
  234. "Undefined behavior: Call with \"tail\" keyword references "
  235. "alloca", &I);
  236. }
  237. if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(&I))
  238. switch (II->getIntrinsicID()) {
  239. default: break;
  240. // TODO: Check more intrinsics
  241. case Intrinsic::memcpy: {
  242. MemCpyInst *MCI = cast<MemCpyInst>(&I);
  243. // TODO: If the size is known, use it.
  244. visitMemoryReference(I, MCI->getDest(), AliasAnalysis::UnknownSize,
  245. MCI->getAlignment(), 0,
  246. MemRef::Write);
  247. visitMemoryReference(I, MCI->getSource(), AliasAnalysis::UnknownSize,
  248. MCI->getAlignment(), 0,
  249. MemRef::Read);
  250. // Check that the memcpy arguments don't overlap. The AliasAnalysis API
  251. // isn't expressive enough for what we really want to do. Known partial
  252. // overlap is not distinguished from the case where nothing is known.
  253. uint64_t Size = 0;
  254. if (const ConstantInt *Len =
  255. dyn_cast<ConstantInt>(findValue(MCI->getLength(),
  256. /*OffsetOk=*/false)))
  257. if (Len->getValue().isIntN(32))
  258. Size = Len->getValue().getZExtValue();
  259. Assert1(AA->alias(MCI->getSource(), Size, MCI->getDest(), Size) !=
  260. AliasAnalysis::MustAlias,
  261. "Undefined behavior: memcpy source and destination overlap", &I);
  262. break;
  263. }
  264. case Intrinsic::memmove: {
  265. MemMoveInst *MMI = cast<MemMoveInst>(&I);
  266. // TODO: If the size is known, use it.
  267. visitMemoryReference(I, MMI->getDest(), AliasAnalysis::UnknownSize,
  268. MMI->getAlignment(), 0,
  269. MemRef::Write);
  270. visitMemoryReference(I, MMI->getSource(), AliasAnalysis::UnknownSize,
  271. MMI->getAlignment(), 0,
  272. MemRef::Read);
  273. break;
  274. }
  275. case Intrinsic::memset: {
  276. MemSetInst *MSI = cast<MemSetInst>(&I);
  277. // TODO: If the size is known, use it.
  278. visitMemoryReference(I, MSI->getDest(), AliasAnalysis::UnknownSize,
  279. MSI->getAlignment(), 0,
  280. MemRef::Write);
  281. break;
  282. }
  283. case Intrinsic::vastart:
  284. Assert1(I.getParent()->getParent()->isVarArg(),
  285. "Undefined behavior: va_start called in a non-varargs function",
  286. &I);
  287. visitMemoryReference(I, CS.getArgument(0), AliasAnalysis::UnknownSize,
  288. 0, 0, MemRef::Read | MemRef::Write);
  289. break;
  290. case Intrinsic::vacopy:
  291. visitMemoryReference(I, CS.getArgument(0), AliasAnalysis::UnknownSize,
  292. 0, 0, MemRef::Write);
  293. visitMemoryReference(I, CS.getArgument(1), AliasAnalysis::UnknownSize,
  294. 0, 0, MemRef::Read);
  295. break;
  296. case Intrinsic::vaend:
  297. visitMemoryReference(I, CS.getArgument(0), AliasAnalysis::UnknownSize,
  298. 0, 0, MemRef::Read | MemRef::Write);
  299. break;
  300. case Intrinsic::stackrestore:
  301. // Stackrestore doesn't read or write memory, but it sets the
  302. // stack pointer, which the compiler may read from or write to
  303. // at any time, so check it for both readability and writeability.
  304. visitMemoryReference(I, CS.getArgument(0), AliasAnalysis::UnknownSize,
  305. 0, 0, MemRef::Read | MemRef::Write);
  306. break;
  307. }
  308. }
  309. void Lint::visitCallInst(CallInst &I) {
  310. return visitCallSite(&I);
  311. }
  312. void Lint::visitInvokeInst(InvokeInst &I) {
  313. return visitCallSite(&I);
  314. }
  315. void Lint::visitReturnInst(ReturnInst &I) {
  316. Function *F = I.getParent()->getParent();
  317. Assert1(!F->doesNotReturn(),
  318. "Unusual: Return statement in function with noreturn attribute",
  319. &I);
  320. if (Value *V = I.getReturnValue()) {
  321. Value *Obj = findValue(V, /*OffsetOk=*/true);
  322. Assert1(!isa<AllocaInst>(Obj),
  323. "Unusual: Returning alloca value", &I);
  324. }
  325. }
  326. // TODO: Check that the reference is in bounds.
  327. // TODO: Check readnone/readonly function attributes.
  328. void Lint::visitMemoryReference(Instruction &I,
  329. Value *Ptr, uint64_t Size, unsigned Align,
  330. Type *Ty, unsigned Flags) {
  331. // If no memory is being referenced, it doesn't matter if the pointer
  332. // is valid.
  333. if (Size == 0)
  334. return;
  335. Value *UnderlyingObject = findValue(Ptr, /*OffsetOk=*/true);
  336. Assert1(!isa<ConstantPointerNull>(UnderlyingObject),
  337. "Undefined behavior: Null pointer dereference", &I);
  338. Assert1(!isa<UndefValue>(UnderlyingObject),
  339. "Undefined behavior: Undef pointer dereference", &I);
  340. Assert1(!isa<ConstantInt>(UnderlyingObject) ||
  341. !cast<ConstantInt>(UnderlyingObject)->isAllOnesValue(),
  342. "Unusual: All-ones pointer dereference", &I);
  343. Assert1(!isa<ConstantInt>(UnderlyingObject) ||
  344. !cast<ConstantInt>(UnderlyingObject)->isOne(),
  345. "Unusual: Address one pointer dereference", &I);
  346. if (Flags & MemRef::Write) {
  347. if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(UnderlyingObject))
  348. Assert1(!GV->isConstant(),
  349. "Undefined behavior: Write to read-only memory", &I);
  350. Assert1(!isa<Function>(UnderlyingObject) &&
  351. !isa<BlockAddress>(UnderlyingObject),
  352. "Undefined behavior: Write to text section", &I);
  353. }
  354. if (Flags & MemRef::Read) {
  355. Assert1(!isa<Function>(UnderlyingObject),
  356. "Unusual: Load from function body", &I);
  357. Assert1(!isa<BlockAddress>(UnderlyingObject),
  358. "Undefined behavior: Load from block address", &I);
  359. }
  360. if (Flags & MemRef::Callee) {
  361. Assert1(!isa<BlockAddress>(UnderlyingObject),
  362. "Undefined behavior: Call to block address", &I);
  363. }
  364. if (Flags & MemRef::Branchee) {
  365. Assert1(!isa<Constant>(UnderlyingObject) ||
  366. isa<BlockAddress>(UnderlyingObject),
  367. "Undefined behavior: Branch to non-blockaddress", &I);
  368. }
  369. // Check for buffer overflows and misalignment.
  370. // Only handles memory references that read/write something simple like an
  371. // alloca instruction or a global variable.
  372. int64_t Offset = 0;
  373. if (Value *Base = GetPointerBaseWithConstantOffset(Ptr, Offset, DL)) {
  374. // OK, so the access is to a constant offset from Ptr. Check that Ptr is
  375. // something we can handle and if so extract the size of this base object
  376. // along with its alignment.
  377. uint64_t BaseSize = AliasAnalysis::UnknownSize;
  378. unsigned BaseAlign = 0;
  379. if (AllocaInst *AI = dyn_cast<AllocaInst>(Base)) {
  380. Type *ATy = AI->getAllocatedType();
  381. if (DL && !AI->isArrayAllocation() && ATy->isSized())
  382. BaseSize = DL->getTypeAllocSize(ATy);
  383. BaseAlign = AI->getAlignment();
  384. if (DL && BaseAlign == 0 && ATy->isSized())
  385. BaseAlign = DL->getABITypeAlignment(ATy);
  386. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Base)) {
  387. // If the global may be defined differently in another compilation unit
  388. // then don't warn about funky memory accesses.
  389. if (GV->hasDefinitiveInitializer()) {
  390. Type *GTy = GV->getType()->getElementType();
  391. if (DL && GTy->isSized())
  392. BaseSize = DL->getTypeAllocSize(GTy);
  393. BaseAlign = GV->getAlignment();
  394. if (DL && BaseAlign == 0 && GTy->isSized())
  395. BaseAlign = DL->getABITypeAlignment(GTy);
  396. }
  397. }
  398. // Accesses from before the start or after the end of the object are not
  399. // defined.
  400. Assert1(Size == AliasAnalysis::UnknownSize ||
  401. BaseSize == AliasAnalysis::UnknownSize ||
  402. (Offset >= 0 && Offset + Size <= BaseSize),
  403. "Undefined behavior: Buffer overflow", &I);
  404. // Accesses that say that the memory is more aligned than it is are not
  405. // defined.
  406. if (DL && Align == 0 && Ty && Ty->isSized())
  407. Align = DL->getABITypeAlignment(Ty);
  408. Assert1(!BaseAlign || Align <= MinAlign(BaseAlign, Offset),
  409. "Undefined behavior: Memory reference address is misaligned", &I);
  410. }
  411. }
  412. void Lint::visitLoadInst(LoadInst &I) {
  413. visitMemoryReference(I, I.getPointerOperand(),
  414. AA->getTypeStoreSize(I.getType()), I.getAlignment(),
  415. I.getType(), MemRef::Read);
  416. }
  417. void Lint::visitStoreInst(StoreInst &I) {
  418. visitMemoryReference(I, I.getPointerOperand(),
  419. AA->getTypeStoreSize(I.getOperand(0)->getType()),
  420. I.getAlignment(),
  421. I.getOperand(0)->getType(), MemRef::Write);
  422. }
  423. void Lint::visitXor(BinaryOperator &I) {
  424. Assert1(!isa<UndefValue>(I.getOperand(0)) ||
  425. !isa<UndefValue>(I.getOperand(1)),
  426. "Undefined result: xor(undef, undef)", &I);
  427. }
  428. void Lint::visitSub(BinaryOperator &I) {
  429. Assert1(!isa<UndefValue>(I.getOperand(0)) ||
  430. !isa<UndefValue>(I.getOperand(1)),
  431. "Undefined result: sub(undef, undef)", &I);
  432. }
  433. void Lint::visitLShr(BinaryOperator &I) {
  434. if (ConstantInt *CI =
  435. dyn_cast<ConstantInt>(findValue(I.getOperand(1), /*OffsetOk=*/false)))
  436. Assert1(CI->getValue().ult(cast<IntegerType>(I.getType())->getBitWidth()),
  437. "Undefined result: Shift count out of range", &I);
  438. }
  439. void Lint::visitAShr(BinaryOperator &I) {
  440. if (ConstantInt *CI =
  441. dyn_cast<ConstantInt>(findValue(I.getOperand(1), /*OffsetOk=*/false)))
  442. Assert1(CI->getValue().ult(cast<IntegerType>(I.getType())->getBitWidth()),
  443. "Undefined result: Shift count out of range", &I);
  444. }
  445. void Lint::visitShl(BinaryOperator &I) {
  446. if (ConstantInt *CI =
  447. dyn_cast<ConstantInt>(findValue(I.getOperand(1), /*OffsetOk=*/false)))
  448. Assert1(CI->getValue().ult(cast<IntegerType>(I.getType())->getBitWidth()),
  449. "Undefined result: Shift count out of range", &I);
  450. }
  451. static bool isZero(Value *V, const DataLayout *DL) {
  452. // Assume undef could be zero.
  453. if (isa<UndefValue>(V))
  454. return true;
  455. VectorType *VecTy = dyn_cast<VectorType>(V->getType());
  456. if (!VecTy) {
  457. unsigned BitWidth = V->getType()->getIntegerBitWidth();
  458. APInt KnownZero(BitWidth, 0), KnownOne(BitWidth, 0);
  459. ComputeMaskedBits(V, KnownZero, KnownOne, DL);
  460. return KnownZero.isAllOnesValue();
  461. }
  462. // Per-component check doesn't work with zeroinitializer
  463. Constant *C = dyn_cast<Constant>(V);
  464. if (!C)
  465. return false;
  466. if (C->isZeroValue())
  467. return true;
  468. // For a vector, KnownZero will only be true if all values are zero, so check
  469. // this per component
  470. unsigned BitWidth = VecTy->getElementType()->getIntegerBitWidth();
  471. for (unsigned I = 0, N = VecTy->getNumElements(); I != N; ++I) {
  472. Constant *Elem = C->getAggregateElement(I);
  473. if (isa<UndefValue>(Elem))
  474. return true;
  475. APInt KnownZero(BitWidth, 0), KnownOne(BitWidth, 0);
  476. ComputeMaskedBits(Elem, KnownZero, KnownOne, DL);
  477. if (KnownZero.isAllOnesValue())
  478. return true;
  479. }
  480. return false;
  481. }
  482. void Lint::visitSDiv(BinaryOperator &I) {
  483. Assert1(!isZero(I.getOperand(1), DL),
  484. "Undefined behavior: Division by zero", &I);
  485. }
  486. void Lint::visitUDiv(BinaryOperator &I) {
  487. Assert1(!isZero(I.getOperand(1), DL),
  488. "Undefined behavior: Division by zero", &I);
  489. }
  490. void Lint::visitSRem(BinaryOperator &I) {
  491. Assert1(!isZero(I.getOperand(1), DL),
  492. "Undefined behavior: Division by zero", &I);
  493. }
  494. void Lint::visitURem(BinaryOperator &I) {
  495. Assert1(!isZero(I.getOperand(1), DL),
  496. "Undefined behavior: Division by zero", &I);
  497. }
  498. void Lint::visitAllocaInst(AllocaInst &I) {
  499. if (isa<ConstantInt>(I.getArraySize()))
  500. // This isn't undefined behavior, it's just an obvious pessimization.
  501. Assert1(&I.getParent()->getParent()->getEntryBlock() == I.getParent(),
  502. "Pessimization: Static alloca outside of entry block", &I);
  503. // TODO: Check for an unusual size (MSB set?)
  504. }
  505. void Lint::visitVAArgInst(VAArgInst &I) {
  506. visitMemoryReference(I, I.getOperand(0), AliasAnalysis::UnknownSize, 0, 0,
  507. MemRef::Read | MemRef::Write);
  508. }
  509. void Lint::visitIndirectBrInst(IndirectBrInst &I) {
  510. visitMemoryReference(I, I.getAddress(), AliasAnalysis::UnknownSize, 0, 0,
  511. MemRef::Branchee);
  512. Assert1(I.getNumDestinations() != 0,
  513. "Undefined behavior: indirectbr with no destinations", &I);
  514. }
  515. void Lint::visitExtractElementInst(ExtractElementInst &I) {
  516. if (ConstantInt *CI =
  517. dyn_cast<ConstantInt>(findValue(I.getIndexOperand(),
  518. /*OffsetOk=*/false)))
  519. Assert1(CI->getValue().ult(I.getVectorOperandType()->getNumElements()),
  520. "Undefined result: extractelement index out of range", &I);
  521. }
  522. void Lint::visitInsertElementInst(InsertElementInst &I) {
  523. if (ConstantInt *CI =
  524. dyn_cast<ConstantInt>(findValue(I.getOperand(2),
  525. /*OffsetOk=*/false)))
  526. Assert1(CI->getValue().ult(I.getType()->getNumElements()),
  527. "Undefined result: insertelement index out of range", &I);
  528. }
  529. void Lint::visitUnreachableInst(UnreachableInst &I) {
  530. // This isn't undefined behavior, it's merely suspicious.
  531. Assert1(&I == I.getParent()->begin() ||
  532. std::prev(BasicBlock::iterator(&I))->mayHaveSideEffects(),
  533. "Unusual: unreachable immediately preceded by instruction without "
  534. "side effects", &I);
  535. }
  536. /// findValue - Look through bitcasts and simple memory reference patterns
  537. /// to identify an equivalent, but more informative, value. If OffsetOk
  538. /// is true, look through getelementptrs with non-zero offsets too.
  539. ///
  540. /// Most analysis passes don't require this logic, because instcombine
  541. /// will simplify most of these kinds of things away. But it's a goal of
  542. /// this Lint pass to be useful even on non-optimized IR.
  543. Value *Lint::findValue(Value *V, bool OffsetOk) const {
  544. SmallPtrSet<Value *, 4> Visited;
  545. return findValueImpl(V, OffsetOk, Visited);
  546. }
  547. /// findValueImpl - Implementation helper for findValue.
  548. Value *Lint::findValueImpl(Value *V, bool OffsetOk,
  549. SmallPtrSet<Value *, 4> &Visited) const {
  550. // Detect self-referential values.
  551. if (!Visited.insert(V))
  552. return UndefValue::get(V->getType());
  553. // TODO: Look through sext or zext cast, when the result is known to
  554. // be interpreted as signed or unsigned, respectively.
  555. // TODO: Look through eliminable cast pairs.
  556. // TODO: Look through calls with unique return values.
  557. // TODO: Look through vector insert/extract/shuffle.
  558. V = OffsetOk ? GetUnderlyingObject(V, DL) : V->stripPointerCasts();
  559. if (LoadInst *L = dyn_cast<LoadInst>(V)) {
  560. BasicBlock::iterator BBI = L;
  561. BasicBlock *BB = L->getParent();
  562. SmallPtrSet<BasicBlock *, 4> VisitedBlocks;
  563. for (;;) {
  564. if (!VisitedBlocks.insert(BB)) break;
  565. if (Value *U = FindAvailableLoadedValue(L->getPointerOperand(),
  566. BB, BBI, 6, AA))
  567. return findValueImpl(U, OffsetOk, Visited);
  568. if (BBI != BB->begin()) break;
  569. BB = BB->getUniquePredecessor();
  570. if (!BB) break;
  571. BBI = BB->end();
  572. }
  573. } else if (PHINode *PN = dyn_cast<PHINode>(V)) {
  574. if (Value *W = PN->hasConstantValue())
  575. if (W != V)
  576. return findValueImpl(W, OffsetOk, Visited);
  577. } else if (CastInst *CI = dyn_cast<CastInst>(V)) {
  578. if (CI->isNoopCast(DL ? DL->getIntPtrType(V->getContext()) :
  579. Type::getInt64Ty(V->getContext())))
  580. return findValueImpl(CI->getOperand(0), OffsetOk, Visited);
  581. } else if (ExtractValueInst *Ex = dyn_cast<ExtractValueInst>(V)) {
  582. if (Value *W = FindInsertedValue(Ex->getAggregateOperand(),
  583. Ex->getIndices()))
  584. if (W != V)
  585. return findValueImpl(W, OffsetOk, Visited);
  586. } else if (ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) {
  587. // Same as above, but for ConstantExpr instead of Instruction.
  588. if (Instruction::isCast(CE->getOpcode())) {
  589. if (CastInst::isNoopCast(Instruction::CastOps(CE->getOpcode()),
  590. CE->getOperand(0)->getType(),
  591. CE->getType(),
  592. DL ? DL->getIntPtrType(V->getContext()) :
  593. Type::getInt64Ty(V->getContext())))
  594. return findValueImpl(CE->getOperand(0), OffsetOk, Visited);
  595. } else if (CE->getOpcode() == Instruction::ExtractValue) {
  596. ArrayRef<unsigned> Indices = CE->getIndices();
  597. if (Value *W = FindInsertedValue(CE->getOperand(0), Indices))
  598. if (W != V)
  599. return findValueImpl(W, OffsetOk, Visited);
  600. }
  601. }
  602. // As a last resort, try SimplifyInstruction or constant folding.
  603. if (Instruction *Inst = dyn_cast<Instruction>(V)) {
  604. if (Value *W = SimplifyInstruction(Inst, DL, TLI, DT))
  605. return findValueImpl(W, OffsetOk, Visited);
  606. } else if (ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) {
  607. if (Value *W = ConstantFoldConstantExpression(CE, DL, TLI))
  608. if (W != V)
  609. return findValueImpl(W, OffsetOk, Visited);
  610. }
  611. return V;
  612. }
  613. //===----------------------------------------------------------------------===//
  614. // Implement the public interfaces to this file...
  615. //===----------------------------------------------------------------------===//
  616. FunctionPass *llvm::createLintPass() {
  617. return new Lint();
  618. }
  619. /// lintFunction - Check a function for errors, printing messages on stderr.
  620. ///
  621. void llvm::lintFunction(const Function &f) {
  622. Function &F = const_cast<Function&>(f);
  623. assert(!F.isDeclaration() && "Cannot lint external functions");
  624. FunctionPassManager FPM(F.getParent());
  625. Lint *V = new Lint();
  626. FPM.add(V);
  627. FPM.run(F);
  628. }
  629. /// lintModule - Check a module for errors, printing messages on stderr.
  630. ///
  631. void llvm::lintModule(const Module &M) {
  632. PassManager PM;
  633. Lint *V = new Lint();
  634. PM.add(V);
  635. PM.run(const_cast<Module&>(M));
  636. }