BasicAliasAnalysis.cpp 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983
  1. //===- BasicAliasAnalysis.cpp - Local Alias Analysis Impl -----------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines the default implementation of the Alias Analysis interface
  11. // that simply implements a few identities (two different globals cannot alias,
  12. // etc), but otherwise does no analysis.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "llvm/Analysis/AliasAnalysis.h"
  16. #include "llvm/Analysis/CaptureTracking.h"
  17. #include "llvm/Analysis/MallocHelper.h"
  18. #include "llvm/Analysis/Passes.h"
  19. #include "llvm/Constants.h"
  20. #include "llvm/DerivedTypes.h"
  21. #include "llvm/Function.h"
  22. #include "llvm/GlobalVariable.h"
  23. #include "llvm/Instructions.h"
  24. #include "llvm/IntrinsicInst.h"
  25. #include "llvm/LLVMContext.h"
  26. #include "llvm/Operator.h"
  27. #include "llvm/Pass.h"
  28. #include "llvm/Target/TargetData.h"
  29. #include "llvm/ADT/SmallSet.h"
  30. #include "llvm/ADT/SmallVector.h"
  31. #include "llvm/ADT/STLExtras.h"
  32. #include "llvm/Support/ErrorHandling.h"
  33. #include "llvm/Support/GetElementPtrTypeIterator.h"
  34. #include <algorithm>
  35. using namespace llvm;
  36. //===----------------------------------------------------------------------===//
  37. // Useful predicates
  38. //===----------------------------------------------------------------------===//
  39. static const Value *GetGEPOperands(const Value *V,
  40. SmallVector<Value*, 16> &GEPOps) {
  41. assert(GEPOps.empty() && "Expect empty list to populate!");
  42. GEPOps.insert(GEPOps.end(), cast<User>(V)->op_begin()+1,
  43. cast<User>(V)->op_end());
  44. // Accumulate all of the chained indexes into the operand array
  45. V = cast<User>(V)->getOperand(0);
  46. while (const GEPOperator *G = dyn_cast<GEPOperator>(V)) {
  47. if (!isa<Constant>(GEPOps[0]) || isa<GlobalValue>(GEPOps[0]) ||
  48. !cast<Constant>(GEPOps[0])->isNullValue())
  49. break; // Don't handle folding arbitrary pointer offsets yet...
  50. GEPOps.erase(GEPOps.begin()); // Drop the zero index
  51. GEPOps.insert(GEPOps.begin(), G->op_begin()+1, G->op_end());
  52. V = G->getOperand(0);
  53. }
  54. return V;
  55. }
  56. /// isKnownNonNull - Return true if we know that the specified value is never
  57. /// null.
  58. static bool isKnownNonNull(const Value *V) {
  59. // Alloca never returns null, malloc might.
  60. if (isa<AllocaInst>(V)) return true;
  61. // A byval argument is never null.
  62. if (const Argument *A = dyn_cast<Argument>(V))
  63. return A->hasByValAttr();
  64. // Global values are not null unless extern weak.
  65. if (const GlobalValue *GV = dyn_cast<GlobalValue>(V))
  66. return !GV->hasExternalWeakLinkage();
  67. return false;
  68. }
  69. /// isNonEscapingLocalObject - Return true if the pointer is to a function-local
  70. /// object that never escapes from the function.
  71. static bool isNonEscapingLocalObject(const Value *V) {
  72. // If this is a local allocation, check to see if it escapes.
  73. if (isa<AllocaInst>(V) || isNoAliasCall(V))
  74. return !PointerMayBeCaptured(V, false);
  75. // If this is an argument that corresponds to a byval or noalias argument,
  76. // then it has not escaped before entering the function. Check if it escapes
  77. // inside the function.
  78. if (const Argument *A = dyn_cast<Argument>(V))
  79. if (A->hasByValAttr() || A->hasNoAliasAttr()) {
  80. // Don't bother analyzing arguments already known not to escape.
  81. if (A->hasNoCaptureAttr())
  82. return true;
  83. return !PointerMayBeCaptured(V, false);
  84. }
  85. return false;
  86. }
  87. /// isObjectSmallerThan - Return true if we can prove that the object specified
  88. /// by V is smaller than Size.
  89. static bool isObjectSmallerThan(const Value *V, unsigned Size,
  90. LLVMContext &Context, const TargetData &TD) {
  91. const Type *AccessTy;
  92. if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) {
  93. AccessTy = GV->getType()->getElementType();
  94. } else if (const AllocaInst *AI = dyn_cast<AllocaInst>(V)) {
  95. if (!AI->isArrayAllocation())
  96. AccessTy = AI->getType()->getElementType();
  97. else
  98. return false;
  99. } else if (const CallInst* CI = extractMallocCall(V)) {
  100. if (!isArrayMalloc(V, Context, &TD))
  101. // The size is the argument to the malloc call.
  102. if (const ConstantInt* C = dyn_cast<ConstantInt>(CI->getOperand(1)))
  103. return (C->getZExtValue() < Size);
  104. return false;
  105. } else if (const Argument *A = dyn_cast<Argument>(V)) {
  106. if (A->hasByValAttr())
  107. AccessTy = cast<PointerType>(A->getType())->getElementType();
  108. else
  109. return false;
  110. } else {
  111. return false;
  112. }
  113. if (AccessTy->isSized())
  114. return TD.getTypeAllocSize(AccessTy) < Size;
  115. return false;
  116. }
  117. //===----------------------------------------------------------------------===//
  118. // NoAA Pass
  119. //===----------------------------------------------------------------------===//
  120. namespace {
  121. /// NoAA - This class implements the -no-aa pass, which always returns "I
  122. /// don't know" for alias queries. NoAA is unlike other alias analysis
  123. /// implementations, in that it does not chain to a previous analysis. As
  124. /// such it doesn't follow many of the rules that other alias analyses must.
  125. ///
  126. struct NoAA : public ImmutablePass, public AliasAnalysis {
  127. static char ID; // Class identification, replacement for typeinfo
  128. NoAA() : ImmutablePass(&ID) {}
  129. explicit NoAA(void *PID) : ImmutablePass(PID) { }
  130. virtual void getAnalysisUsage(AnalysisUsage &AU) const {
  131. }
  132. virtual void initializePass() {
  133. TD = getAnalysisIfAvailable<TargetData>();
  134. }
  135. virtual AliasResult alias(const Value *V1, unsigned V1Size,
  136. const Value *V2, unsigned V2Size) {
  137. return MayAlias;
  138. }
  139. virtual void getArgumentAccesses(Function *F, CallSite CS,
  140. std::vector<PointerAccessInfo> &Info) {
  141. llvm_unreachable("This method may not be called on this function!");
  142. }
  143. virtual void getMustAliases(Value *P, std::vector<Value*> &RetVals) { }
  144. virtual bool pointsToConstantMemory(const Value *P) { return false; }
  145. virtual ModRefResult getModRefInfo(CallSite CS, Value *P, unsigned Size) {
  146. return ModRef;
  147. }
  148. virtual ModRefResult getModRefInfo(CallSite CS1, CallSite CS2) {
  149. return ModRef;
  150. }
  151. virtual bool hasNoModRefInfoForCalls() const { return true; }
  152. virtual void deleteValue(Value *V) {}
  153. virtual void copyValue(Value *From, Value *To) {}
  154. };
  155. } // End of anonymous namespace
  156. // Register this pass...
  157. char NoAA::ID = 0;
  158. static RegisterPass<NoAA>
  159. U("no-aa", "No Alias Analysis (always returns 'may' alias)", true, true);
  160. // Declare that we implement the AliasAnalysis interface
  161. static RegisterAnalysisGroup<AliasAnalysis> V(U);
  162. ImmutablePass *llvm::createNoAAPass() { return new NoAA(); }
  163. //===----------------------------------------------------------------------===//
  164. // BasicAA Pass
  165. //===----------------------------------------------------------------------===//
  166. namespace {
  167. /// BasicAliasAnalysis - This is the default alias analysis implementation.
  168. /// Because it doesn't chain to a previous alias analysis (like -no-aa), it
  169. /// derives from the NoAA class.
  170. struct BasicAliasAnalysis : public NoAA {
  171. static char ID; // Class identification, replacement for typeinfo
  172. BasicAliasAnalysis() : NoAA(&ID) {}
  173. AliasResult alias(const Value *V1, unsigned V1Size,
  174. const Value *V2, unsigned V2Size) {
  175. assert(VisitedPHIs.empty() && "VisitedPHIs must be cleared after use!");
  176. AliasResult Alias = aliasCheck(V1, V1Size, V2, V2Size);
  177. VisitedPHIs.clear();
  178. return Alias;
  179. }
  180. ModRefResult getModRefInfo(CallSite CS, Value *P, unsigned Size);
  181. ModRefResult getModRefInfo(CallSite CS1, CallSite CS2);
  182. /// hasNoModRefInfoForCalls - We can provide mod/ref information against
  183. /// non-escaping allocations.
  184. virtual bool hasNoModRefInfoForCalls() const { return false; }
  185. /// pointsToConstantMemory - Chase pointers until we find a (constant
  186. /// global) or not.
  187. bool pointsToConstantMemory(const Value *P);
  188. private:
  189. // VisitedPHIs - Track PHI nodes visited by a aliasCheck() call.
  190. SmallPtrSet<const PHINode*, 16> VisitedPHIs;
  191. // aliasGEP - Provide a bunch of ad-hoc rules to disambiguate a GEP instruction
  192. // against another.
  193. AliasResult aliasGEP(const Value *V1, unsigned V1Size,
  194. const Value *V2, unsigned V2Size);
  195. // aliasPHI - Provide a bunch of ad-hoc rules to disambiguate a PHI instruction
  196. // against another.
  197. AliasResult aliasPHI(const PHINode *PN, unsigned PNSize,
  198. const Value *V2, unsigned V2Size);
  199. AliasResult aliasCheck(const Value *V1, unsigned V1Size,
  200. const Value *V2, unsigned V2Size);
  201. // CheckGEPInstructions - Check two GEP instructions with known
  202. // must-aliasing base pointers. This checks to see if the index expressions
  203. // preclude the pointers from aliasing...
  204. AliasResult
  205. CheckGEPInstructions(const Type* BasePtr1Ty,
  206. Value **GEP1Ops, unsigned NumGEP1Ops, unsigned G1Size,
  207. const Type *BasePtr2Ty,
  208. Value **GEP2Ops, unsigned NumGEP2Ops, unsigned G2Size);
  209. };
  210. } // End of anonymous namespace
  211. // Register this pass...
  212. char BasicAliasAnalysis::ID = 0;
  213. static RegisterPass<BasicAliasAnalysis>
  214. X("basicaa", "Basic Alias Analysis (default AA impl)", false, true);
  215. // Declare that we implement the AliasAnalysis interface
  216. static RegisterAnalysisGroup<AliasAnalysis, true> Y(X);
  217. ImmutablePass *llvm::createBasicAliasAnalysisPass() {
  218. return new BasicAliasAnalysis();
  219. }
  220. /// pointsToConstantMemory - Chase pointers until we find a (constant
  221. /// global) or not.
  222. bool BasicAliasAnalysis::pointsToConstantMemory(const Value *P) {
  223. if (const GlobalVariable *GV =
  224. dyn_cast<GlobalVariable>(P->getUnderlyingObject()))
  225. return GV->isConstant();
  226. return false;
  227. }
  228. // getModRefInfo - Check to see if the specified callsite can clobber the
  229. // specified memory object. Since we only look at local properties of this
  230. // function, we really can't say much about this query. We do, however, use
  231. // simple "address taken" analysis on local objects.
  232. //
  233. AliasAnalysis::ModRefResult
  234. BasicAliasAnalysis::getModRefInfo(CallSite CS, Value *P, unsigned Size) {
  235. if (!isa<Constant>(P)) {
  236. const Value *Object = P->getUnderlyingObject();
  237. // If this is a tail call and P points to a stack location, we know that
  238. // the tail call cannot access or modify the local stack.
  239. // We cannot exclude byval arguments here; these belong to the caller of
  240. // the current function not to the current function, and a tail callee
  241. // may reference them.
  242. if (isa<AllocaInst>(Object))
  243. if (CallInst *CI = dyn_cast<CallInst>(CS.getInstruction()))
  244. if (CI->isTailCall())
  245. return NoModRef;
  246. // If the pointer is to a locally allocated object that does not escape,
  247. // then the call can not mod/ref the pointer unless the call takes the
  248. // argument without capturing it.
  249. if (isNonEscapingLocalObject(Object) && CS.getInstruction() != Object) {
  250. bool passedAsArg = false;
  251. // TODO: Eventually only check 'nocapture' arguments.
  252. for (CallSite::arg_iterator CI = CS.arg_begin(), CE = CS.arg_end();
  253. CI != CE; ++CI)
  254. if (isa<PointerType>((*CI)->getType()) &&
  255. alias(cast<Value>(CI), ~0U, P, ~0U) != NoAlias)
  256. passedAsArg = true;
  257. if (!passedAsArg)
  258. return NoModRef;
  259. }
  260. if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
  261. switch (II->getIntrinsicID()) {
  262. default: break;
  263. case Intrinsic::memcpy:
  264. case Intrinsic::memmove: {
  265. unsigned Len = ~0U;
  266. if (ConstantInt *LenCI = dyn_cast<ConstantInt>(II->getOperand(3)))
  267. Len = LenCI->getZExtValue();
  268. Value *Dest = II->getOperand(1);
  269. Value *Src = II->getOperand(2);
  270. if (alias(Dest, Len, P, Size) == NoAlias) {
  271. if (alias(Src, Len, P, Size) == NoAlias)
  272. return NoModRef;
  273. return Ref;
  274. }
  275. }
  276. break;
  277. case Intrinsic::memset:
  278. if (ConstantInt *LenCI = dyn_cast<ConstantInt>(II->getOperand(3))) {
  279. unsigned Len = LenCI->getZExtValue();
  280. Value *Dest = II->getOperand(1);
  281. if (alias(Dest, Len, P, Size) == NoAlias)
  282. return NoModRef;
  283. }
  284. break;
  285. case Intrinsic::atomic_cmp_swap:
  286. case Intrinsic::atomic_swap:
  287. case Intrinsic::atomic_load_add:
  288. case Intrinsic::atomic_load_sub:
  289. case Intrinsic::atomic_load_and:
  290. case Intrinsic::atomic_load_nand:
  291. case Intrinsic::atomic_load_or:
  292. case Intrinsic::atomic_load_xor:
  293. case Intrinsic::atomic_load_max:
  294. case Intrinsic::atomic_load_min:
  295. case Intrinsic::atomic_load_umax:
  296. case Intrinsic::atomic_load_umin:
  297. if (TD) {
  298. Value *Op1 = II->getOperand(1);
  299. unsigned Op1Size = TD->getTypeStoreSize(Op1->getType());
  300. if (alias(Op1, Op1Size, P, Size) == NoAlias)
  301. return NoModRef;
  302. }
  303. break;
  304. case Intrinsic::lifetime_start:
  305. case Intrinsic::lifetime_end:
  306. case Intrinsic::invariant_start: {
  307. unsigned PtrSize = cast<ConstantInt>(II->getOperand(1))->getZExtValue();
  308. if (alias(II->getOperand(2), PtrSize, P, Size) == NoAlias)
  309. return NoModRef;
  310. }
  311. break;
  312. case Intrinsic::invariant_end: {
  313. unsigned PtrSize = cast<ConstantInt>(II->getOperand(2))->getZExtValue();
  314. if (alias(II->getOperand(3), PtrSize, P, Size) == NoAlias)
  315. return NoModRef;
  316. }
  317. break;
  318. }
  319. }
  320. }
  321. // The AliasAnalysis base class has some smarts, lets use them.
  322. return AliasAnalysis::getModRefInfo(CS, P, Size);
  323. }
  324. AliasAnalysis::ModRefResult
  325. BasicAliasAnalysis::getModRefInfo(CallSite CS1, CallSite CS2) {
  326. // If CS1 or CS2 are readnone, they don't interact.
  327. ModRefBehavior CS1B = AliasAnalysis::getModRefBehavior(CS1);
  328. if (CS1B == DoesNotAccessMemory) return NoModRef;
  329. ModRefBehavior CS2B = AliasAnalysis::getModRefBehavior(CS2);
  330. if (CS2B == DoesNotAccessMemory) return NoModRef;
  331. // If they both only read from memory, just return ref.
  332. if (CS1B == OnlyReadsMemory && CS2B == OnlyReadsMemory)
  333. return Ref;
  334. // Otherwise, fall back to NoAA (mod+ref).
  335. return NoAA::getModRefInfo(CS1, CS2);
  336. }
  337. // aliasGEP - Provide a bunch of ad-hoc rules to disambiguate a GEP instruction
  338. // against another.
  339. //
  340. AliasAnalysis::AliasResult
  341. BasicAliasAnalysis::aliasGEP(const Value *V1, unsigned V1Size,
  342. const Value *V2, unsigned V2Size) {
  343. // If we have two gep instructions with must-alias'ing base pointers, figure
  344. // out if the indexes to the GEP tell us anything about the derived pointer.
  345. // Note that we also handle chains of getelementptr instructions as well as
  346. // constant expression getelementptrs here.
  347. //
  348. if (isa<GEPOperator>(V1) && isa<GEPOperator>(V2)) {
  349. const User *GEP1 = cast<User>(V1);
  350. const User *GEP2 = cast<User>(V2);
  351. // If V1 and V2 are identical GEPs, just recurse down on both of them.
  352. // This allows us to analyze things like:
  353. // P = gep A, 0, i, 1
  354. // Q = gep B, 0, i, 1
  355. // by just analyzing A and B. This is even safe for variable indices.
  356. if (GEP1->getType() == GEP2->getType() &&
  357. GEP1->getNumOperands() == GEP2->getNumOperands() &&
  358. GEP1->getOperand(0)->getType() == GEP2->getOperand(0)->getType() &&
  359. // All operands are the same, ignoring the base.
  360. std::equal(GEP1->op_begin()+1, GEP1->op_end(), GEP2->op_begin()+1))
  361. return aliasCheck(GEP1->getOperand(0), V1Size,
  362. GEP2->getOperand(0), V2Size);
  363. // Drill down into the first non-gep value, to test for must-aliasing of
  364. // the base pointers.
  365. while (isa<GEPOperator>(GEP1->getOperand(0)) &&
  366. GEP1->getOperand(1) ==
  367. Constant::getNullValue(GEP1->getOperand(1)->getType()))
  368. GEP1 = cast<User>(GEP1->getOperand(0));
  369. const Value *BasePtr1 = GEP1->getOperand(0);
  370. while (isa<GEPOperator>(GEP2->getOperand(0)) &&
  371. GEP2->getOperand(1) ==
  372. Constant::getNullValue(GEP2->getOperand(1)->getType()))
  373. GEP2 = cast<User>(GEP2->getOperand(0));
  374. const Value *BasePtr2 = GEP2->getOperand(0);
  375. // Do the base pointers alias?
  376. AliasResult BaseAlias = aliasCheck(BasePtr1, ~0U, BasePtr2, ~0U);
  377. if (BaseAlias == NoAlias) return NoAlias;
  378. if (BaseAlias == MustAlias) {
  379. // If the base pointers alias each other exactly, check to see if we can
  380. // figure out anything about the resultant pointers, to try to prove
  381. // non-aliasing.
  382. // Collect all of the chained GEP operands together into one simple place
  383. SmallVector<Value*, 16> GEP1Ops, GEP2Ops;
  384. BasePtr1 = GetGEPOperands(V1, GEP1Ops);
  385. BasePtr2 = GetGEPOperands(V2, GEP2Ops);
  386. // If GetGEPOperands were able to fold to the same must-aliased pointer,
  387. // do the comparison.
  388. if (BasePtr1 == BasePtr2) {
  389. AliasResult GAlias =
  390. CheckGEPInstructions(BasePtr1->getType(),
  391. &GEP1Ops[0], GEP1Ops.size(), V1Size,
  392. BasePtr2->getType(),
  393. &GEP2Ops[0], GEP2Ops.size(), V2Size);
  394. if (GAlias != MayAlias)
  395. return GAlias;
  396. }
  397. }
  398. }
  399. // Check to see if these two pointers are related by a getelementptr
  400. // instruction. If one pointer is a GEP with a non-zero index of the other
  401. // pointer, we know they cannot alias.
  402. //
  403. if (V1Size == ~0U || V2Size == ~0U)
  404. return MayAlias;
  405. SmallVector<Value*, 16> GEPOperands;
  406. const Value *BasePtr = GetGEPOperands(V1, GEPOperands);
  407. AliasResult R = aliasCheck(BasePtr, ~0U, V2, V2Size);
  408. if (R != MustAlias)
  409. // If V2 may alias GEP base pointer, conservatively returns MayAlias.
  410. // If V2 is known not to alias GEP base pointer, then the two values
  411. // cannot alias per GEP semantics: "A pointer value formed from a
  412. // getelementptr instruction is associated with the addresses associated
  413. // with the first operand of the getelementptr".
  414. return R;
  415. // If there is at least one non-zero constant index, we know they cannot
  416. // alias.
  417. bool ConstantFound = false;
  418. bool AllZerosFound = true;
  419. for (unsigned i = 0, e = GEPOperands.size(); i != e; ++i)
  420. if (const Constant *C = dyn_cast<Constant>(GEPOperands[i])) {
  421. if (!C->isNullValue()) {
  422. ConstantFound = true;
  423. AllZerosFound = false;
  424. break;
  425. }
  426. } else {
  427. AllZerosFound = false;
  428. }
  429. // If we have getelementptr <ptr>, 0, 0, 0, 0, ... and V2 must aliases
  430. // the ptr, the end result is a must alias also.
  431. if (AllZerosFound)
  432. return MustAlias;
  433. if (ConstantFound) {
  434. if (V2Size <= 1 && V1Size <= 1) // Just pointer check?
  435. return NoAlias;
  436. // Otherwise we have to check to see that the distance is more than
  437. // the size of the argument... build an index vector that is equal to
  438. // the arguments provided, except substitute 0's for any variable
  439. // indexes we find...
  440. if (TD &&
  441. cast<PointerType>(BasePtr->getType())->getElementType()->isSized()) {
  442. for (unsigned i = 0; i != GEPOperands.size(); ++i)
  443. if (!isa<ConstantInt>(GEPOperands[i]))
  444. GEPOperands[i] = Constant::getNullValue(GEPOperands[i]->getType());
  445. int64_t Offset = TD->getIndexedOffset(BasePtr->getType(),
  446. &GEPOperands[0],
  447. GEPOperands.size());
  448. if (Offset >= (int64_t)V2Size || Offset <= -(int64_t)V1Size)
  449. return NoAlias;
  450. }
  451. }
  452. return MayAlias;
  453. }
  454. // aliasPHI - Provide a bunch of ad-hoc rules to disambiguate a PHI instruction
  455. // against another.
  456. AliasAnalysis::AliasResult
  457. BasicAliasAnalysis::aliasPHI(const PHINode *PN, unsigned PNSize,
  458. const Value *V2, unsigned V2Size) {
  459. // The PHI node has already been visited, avoid recursion any further.
  460. if (!VisitedPHIs.insert(PN))
  461. return MayAlias;
  462. SmallPtrSet<Value*, 4> UniqueSrc;
  463. SmallVector<Value*, 4> V1Srcs;
  464. for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
  465. Value *PV1 = PN->getIncomingValue(i);
  466. if (isa<PHINode>(PV1))
  467. // If any of the source itself is a PHI, return MayAlias conservatively
  468. // to avoid compile time explosion. The worst possible case is if both
  469. // sides are PHI nodes. In which case, this is O(m x n) time where 'm'
  470. // and 'n' are the number of PHI sources.
  471. return MayAlias;
  472. if (UniqueSrc.insert(PV1))
  473. V1Srcs.push_back(PV1);
  474. }
  475. AliasResult Alias = aliasCheck(V1Srcs[0], PNSize, V2, V2Size);
  476. // Early exit if the check of the first PHI source against V2 is MayAlias.
  477. // Other results are not possible.
  478. if (Alias == MayAlias)
  479. return MayAlias;
  480. // If all sources of the PHI node NoAlias or MustAlias V2, then returns
  481. // NoAlias / MustAlias. Otherwise, returns MayAlias.
  482. for (unsigned i = 1, e = V1Srcs.size(); i != e; ++i) {
  483. Value *V = V1Srcs[i];
  484. AliasResult ThisAlias = aliasCheck(V2, V2Size, V, PNSize);
  485. if (ThisAlias != Alias || ThisAlias == MayAlias)
  486. return MayAlias;
  487. }
  488. return Alias;
  489. }
  490. // aliasCheck - Provide a bunch of ad-hoc rules to disambiguate in common cases,
  491. // such as array references.
  492. //
  493. AliasAnalysis::AliasResult
  494. BasicAliasAnalysis::aliasCheck(const Value *V1, unsigned V1Size,
  495. const Value *V2, unsigned V2Size) {
  496. // Strip off any casts if they exist.
  497. V1 = V1->stripPointerCasts();
  498. V2 = V2->stripPointerCasts();
  499. // Are we checking for alias of the same value?
  500. if (V1 == V2) return MustAlias;
  501. if (!isa<PointerType>(V1->getType()) || !isa<PointerType>(V2->getType()))
  502. return NoAlias; // Scalars cannot alias each other
  503. // Figure out what objects these things are pointing to if we can.
  504. const Value *O1 = V1->getUnderlyingObject();
  505. const Value *O2 = V2->getUnderlyingObject();
  506. if (O1 != O2) {
  507. // If V1/V2 point to two different objects we know that we have no alias.
  508. if (isIdentifiedObject(O1) && isIdentifiedObject(O2))
  509. return NoAlias;
  510. // Arguments can't alias with local allocations or noalias calls.
  511. if ((isa<Argument>(O1) && (isa<AllocaInst>(O2) || isNoAliasCall(O2))) ||
  512. (isa<Argument>(O2) && (isa<AllocaInst>(O1) || isNoAliasCall(O1))))
  513. return NoAlias;
  514. // Most objects can't alias null.
  515. if ((isa<ConstantPointerNull>(V2) && isKnownNonNull(O1)) ||
  516. (isa<ConstantPointerNull>(V1) && isKnownNonNull(O2)))
  517. return NoAlias;
  518. }
  519. // If the size of one access is larger than the entire object on the other
  520. // side, then we know such behavior is undefined and can assume no alias.
  521. LLVMContext &Context = V1->getContext();
  522. if (TD)
  523. if ((V1Size != ~0U && isObjectSmallerThan(O2, V1Size, Context, *TD)) ||
  524. (V2Size != ~0U && isObjectSmallerThan(O1, V2Size, Context, *TD)))
  525. return NoAlias;
  526. // If one pointer is the result of a call/invoke and the other is a
  527. // non-escaping local object, then we know the object couldn't escape to a
  528. // point where the call could return it.
  529. if ((isa<CallInst>(O1) || isa<InvokeInst>(O1)) &&
  530. isNonEscapingLocalObject(O2) && O1 != O2)
  531. return NoAlias;
  532. if ((isa<CallInst>(O2) || isa<InvokeInst>(O2)) &&
  533. isNonEscapingLocalObject(O1) && O1 != O2)
  534. return NoAlias;
  535. if (!isa<GEPOperator>(V1) && isa<GEPOperator>(V2)) {
  536. std::swap(V1, V2);
  537. std::swap(V1Size, V2Size);
  538. }
  539. if (isa<GEPOperator>(V1))
  540. return aliasGEP(V1, V1Size, V2, V2Size);
  541. if (isa<PHINode>(V2) && !isa<PHINode>(V1)) {
  542. std::swap(V1, V2);
  543. std::swap(V1Size, V2Size);
  544. }
  545. if (const PHINode *PN = dyn_cast<PHINode>(V1))
  546. return aliasPHI(PN, V1Size, V2, V2Size);
  547. return MayAlias;
  548. }
  549. // This function is used to determine if the indices of two GEP instructions are
  550. // equal. V1 and V2 are the indices.
  551. static bool IndexOperandsEqual(Value *V1, Value *V2, LLVMContext &Context) {
  552. if (V1->getType() == V2->getType())
  553. return V1 == V2;
  554. if (Constant *C1 = dyn_cast<Constant>(V1))
  555. if (Constant *C2 = dyn_cast<Constant>(V2)) {
  556. // Sign extend the constants to long types, if necessary
  557. if (C1->getType() != Type::getInt64Ty(Context))
  558. C1 = ConstantExpr::getSExt(C1, Type::getInt64Ty(Context));
  559. if (C2->getType() != Type::getInt64Ty(Context))
  560. C2 = ConstantExpr::getSExt(C2, Type::getInt64Ty(Context));
  561. return C1 == C2;
  562. }
  563. return false;
  564. }
  565. /// CheckGEPInstructions - Check two GEP instructions with known must-aliasing
  566. /// base pointers. This checks to see if the index expressions preclude the
  567. /// pointers from aliasing...
  568. AliasAnalysis::AliasResult
  569. BasicAliasAnalysis::CheckGEPInstructions(
  570. const Type* BasePtr1Ty, Value **GEP1Ops, unsigned NumGEP1Ops, unsigned G1S,
  571. const Type *BasePtr2Ty, Value **GEP2Ops, unsigned NumGEP2Ops, unsigned G2S) {
  572. // We currently can't handle the case when the base pointers have different
  573. // primitive types. Since this is uncommon anyway, we are happy being
  574. // extremely conservative.
  575. if (BasePtr1Ty != BasePtr2Ty)
  576. return MayAlias;
  577. const PointerType *GEPPointerTy = cast<PointerType>(BasePtr1Ty);
  578. LLVMContext &Context = GEPPointerTy->getContext();
  579. // Find the (possibly empty) initial sequence of equal values... which are not
  580. // necessarily constants.
  581. unsigned NumGEP1Operands = NumGEP1Ops, NumGEP2Operands = NumGEP2Ops;
  582. unsigned MinOperands = std::min(NumGEP1Operands, NumGEP2Operands);
  583. unsigned MaxOperands = std::max(NumGEP1Operands, NumGEP2Operands);
  584. unsigned UnequalOper = 0;
  585. while (UnequalOper != MinOperands &&
  586. IndexOperandsEqual(GEP1Ops[UnequalOper], GEP2Ops[UnequalOper],
  587. Context)) {
  588. // Advance through the type as we go...
  589. ++UnequalOper;
  590. if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr1Ty))
  591. BasePtr1Ty = CT->getTypeAtIndex(GEP1Ops[UnequalOper-1]);
  592. else {
  593. // If all operands equal each other, then the derived pointers must
  594. // alias each other...
  595. BasePtr1Ty = 0;
  596. assert(UnequalOper == NumGEP1Operands && UnequalOper == NumGEP2Operands &&
  597. "Ran out of type nesting, but not out of operands?");
  598. return MustAlias;
  599. }
  600. }
  601. // If we have seen all constant operands, and run out of indexes on one of the
  602. // getelementptrs, check to see if the tail of the leftover one is all zeros.
  603. // If so, return mustalias.
  604. if (UnequalOper == MinOperands) {
  605. if (NumGEP1Ops < NumGEP2Ops) {
  606. std::swap(GEP1Ops, GEP2Ops);
  607. std::swap(NumGEP1Ops, NumGEP2Ops);
  608. }
  609. bool AllAreZeros = true;
  610. for (unsigned i = UnequalOper; i != MaxOperands; ++i)
  611. if (!isa<Constant>(GEP1Ops[i]) ||
  612. !cast<Constant>(GEP1Ops[i])->isNullValue()) {
  613. AllAreZeros = false;
  614. break;
  615. }
  616. if (AllAreZeros) return MustAlias;
  617. }
  618. // So now we know that the indexes derived from the base pointers,
  619. // which are known to alias, are different. We can still determine a
  620. // no-alias result if there are differing constant pairs in the index
  621. // chain. For example:
  622. // A[i][0] != A[j][1] iff (&A[0][1]-&A[0][0] >= std::max(G1S, G2S))
  623. //
  624. // We have to be careful here about array accesses. In particular, consider:
  625. // A[1][0] vs A[0][i]
  626. // In this case, we don't *know* that the array will be accessed in bounds:
  627. // the index could even be negative. Because of this, we have to
  628. // conservatively *give up* and return may alias. We disregard differing
  629. // array subscripts that are followed by a variable index without going
  630. // through a struct.
  631. //
  632. unsigned SizeMax = std::max(G1S, G2S);
  633. if (SizeMax == ~0U) return MayAlias; // Avoid frivolous work.
  634. // Scan for the first operand that is constant and unequal in the
  635. // two getelementptrs...
  636. unsigned FirstConstantOper = UnequalOper;
  637. for (; FirstConstantOper != MinOperands; ++FirstConstantOper) {
  638. const Value *G1Oper = GEP1Ops[FirstConstantOper];
  639. const Value *G2Oper = GEP2Ops[FirstConstantOper];
  640. if (G1Oper != G2Oper) // Found non-equal constant indexes...
  641. if (Constant *G1OC = dyn_cast<ConstantInt>(const_cast<Value*>(G1Oper)))
  642. if (Constant *G2OC = dyn_cast<ConstantInt>(const_cast<Value*>(G2Oper))){
  643. if (G1OC->getType() != G2OC->getType()) {
  644. // Sign extend both operands to long.
  645. if (G1OC->getType() != Type::getInt64Ty(Context))
  646. G1OC = ConstantExpr::getSExt(G1OC, Type::getInt64Ty(Context));
  647. if (G2OC->getType() != Type::getInt64Ty(Context))
  648. G2OC = ConstantExpr::getSExt(G2OC, Type::getInt64Ty(Context));
  649. GEP1Ops[FirstConstantOper] = G1OC;
  650. GEP2Ops[FirstConstantOper] = G2OC;
  651. }
  652. if (G1OC != G2OC) {
  653. // Handle the "be careful" case above: if this is an array/vector
  654. // subscript, scan for a subsequent variable array index.
  655. if (const SequentialType *STy =
  656. dyn_cast<SequentialType>(BasePtr1Ty)) {
  657. const Type *NextTy = STy;
  658. bool isBadCase = false;
  659. for (unsigned Idx = FirstConstantOper;
  660. Idx != MinOperands && isa<SequentialType>(NextTy); ++Idx) {
  661. const Value *V1 = GEP1Ops[Idx], *V2 = GEP2Ops[Idx];
  662. if (!isa<Constant>(V1) || !isa<Constant>(V2)) {
  663. isBadCase = true;
  664. break;
  665. }
  666. // If the array is indexed beyond the bounds of the static type
  667. // at this level, it will also fall into the "be careful" case.
  668. // It would theoretically be possible to analyze these cases,
  669. // but for now just be conservatively correct.
  670. if (const ArrayType *ATy = dyn_cast<ArrayType>(STy))
  671. if (cast<ConstantInt>(G1OC)->getZExtValue() >=
  672. ATy->getNumElements() ||
  673. cast<ConstantInt>(G2OC)->getZExtValue() >=
  674. ATy->getNumElements()) {
  675. isBadCase = true;
  676. break;
  677. }
  678. if (const VectorType *VTy = dyn_cast<VectorType>(STy))
  679. if (cast<ConstantInt>(G1OC)->getZExtValue() >=
  680. VTy->getNumElements() ||
  681. cast<ConstantInt>(G2OC)->getZExtValue() >=
  682. VTy->getNumElements()) {
  683. isBadCase = true;
  684. break;
  685. }
  686. STy = cast<SequentialType>(NextTy);
  687. NextTy = cast<SequentialType>(NextTy)->getElementType();
  688. }
  689. if (isBadCase) G1OC = 0;
  690. }
  691. // Make sure they are comparable (ie, not constant expressions), and
  692. // make sure the GEP with the smaller leading constant is GEP1.
  693. if (G1OC) {
  694. Constant *Compare = ConstantExpr::getICmp(ICmpInst::ICMP_SGT,
  695. G1OC, G2OC);
  696. if (ConstantInt *CV = dyn_cast<ConstantInt>(Compare)) {
  697. if (CV->getZExtValue()) { // If they are comparable and G2 > G1
  698. std::swap(GEP1Ops, GEP2Ops); // Make GEP1 < GEP2
  699. std::swap(NumGEP1Ops, NumGEP2Ops);
  700. }
  701. break;
  702. }
  703. }
  704. }
  705. }
  706. BasePtr1Ty = cast<CompositeType>(BasePtr1Ty)->getTypeAtIndex(G1Oper);
  707. }
  708. // No shared constant operands, and we ran out of common operands. At this
  709. // point, the GEP instructions have run through all of their operands, and we
  710. // haven't found evidence that there are any deltas between the GEP's.
  711. // However, one GEP may have more operands than the other. If this is the
  712. // case, there may still be hope. Check this now.
  713. if (FirstConstantOper == MinOperands) {
  714. // Without TargetData, we won't know what the offsets are.
  715. if (!TD)
  716. return MayAlias;
  717. // Make GEP1Ops be the longer one if there is a longer one.
  718. if (NumGEP1Ops < NumGEP2Ops) {
  719. std::swap(GEP1Ops, GEP2Ops);
  720. std::swap(NumGEP1Ops, NumGEP2Ops);
  721. }
  722. // Is there anything to check?
  723. if (NumGEP1Ops > MinOperands) {
  724. for (unsigned i = FirstConstantOper; i != MaxOperands; ++i)
  725. if (isa<ConstantInt>(GEP1Ops[i]) &&
  726. !cast<ConstantInt>(GEP1Ops[i])->isZero()) {
  727. // Yup, there's a constant in the tail. Set all variables to
  728. // constants in the GEP instruction to make it suitable for
  729. // TargetData::getIndexedOffset.
  730. for (i = 0; i != MaxOperands; ++i)
  731. if (!isa<ConstantInt>(GEP1Ops[i]))
  732. GEP1Ops[i] = Constant::getNullValue(GEP1Ops[i]->getType());
  733. // Okay, now get the offset. This is the relative offset for the full
  734. // instruction.
  735. int64_t Offset1 = TD->getIndexedOffset(GEPPointerTy, GEP1Ops,
  736. NumGEP1Ops);
  737. // Now check without any constants at the end.
  738. int64_t Offset2 = TD->getIndexedOffset(GEPPointerTy, GEP1Ops,
  739. MinOperands);
  740. // Make sure we compare the absolute difference.
  741. if (Offset1 > Offset2)
  742. std::swap(Offset1, Offset2);
  743. // If the tail provided a bit enough offset, return noalias!
  744. if ((uint64_t)(Offset2-Offset1) >= SizeMax)
  745. return NoAlias;
  746. // Otherwise break - we don't look for another constant in the tail.
  747. break;
  748. }
  749. }
  750. // Couldn't find anything useful.
  751. return MayAlias;
  752. }
  753. // If there are non-equal constants arguments, then we can figure
  754. // out a minimum known delta between the two index expressions... at
  755. // this point we know that the first constant index of GEP1 is less
  756. // than the first constant index of GEP2.
  757. // Advance BasePtr[12]Ty over this first differing constant operand.
  758. BasePtr2Ty = cast<CompositeType>(BasePtr1Ty)->
  759. getTypeAtIndex(GEP2Ops[FirstConstantOper]);
  760. BasePtr1Ty = cast<CompositeType>(BasePtr1Ty)->
  761. getTypeAtIndex(GEP1Ops[FirstConstantOper]);
  762. // We are going to be using TargetData::getIndexedOffset to determine the
  763. // offset that each of the GEP's is reaching. To do this, we have to convert
  764. // all variable references to constant references. To do this, we convert the
  765. // initial sequence of array subscripts into constant zeros to start with.
  766. const Type *ZeroIdxTy = GEPPointerTy;
  767. for (unsigned i = 0; i != FirstConstantOper; ++i) {
  768. if (!isa<StructType>(ZeroIdxTy))
  769. GEP1Ops[i] = GEP2Ops[i] =
  770. Constant::getNullValue(Type::getInt32Ty(Context));
  771. if (const CompositeType *CT = dyn_cast<CompositeType>(ZeroIdxTy))
  772. ZeroIdxTy = CT->getTypeAtIndex(GEP1Ops[i]);
  773. }
  774. // We know that GEP1Ops[FirstConstantOper] & GEP2Ops[FirstConstantOper] are ok
  775. // Loop over the rest of the operands...
  776. for (unsigned i = FirstConstantOper+1; i != MaxOperands; ++i) {
  777. const Value *Op1 = i < NumGEP1Ops ? GEP1Ops[i] : 0;
  778. const Value *Op2 = i < NumGEP2Ops ? GEP2Ops[i] : 0;
  779. // If they are equal, use a zero index...
  780. if (Op1 == Op2 && BasePtr1Ty == BasePtr2Ty) {
  781. if (!isa<ConstantInt>(Op1))
  782. GEP1Ops[i] = GEP2Ops[i] = Constant::getNullValue(Op1->getType());
  783. // Otherwise, just keep the constants we have.
  784. } else {
  785. if (Op1) {
  786. if (const ConstantInt *Op1C = dyn_cast<ConstantInt>(Op1)) {
  787. // If this is an array index, make sure the array element is in range.
  788. if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr1Ty)) {
  789. if (Op1C->getZExtValue() >= AT->getNumElements())
  790. return MayAlias; // Be conservative with out-of-range accesses
  791. } else if (const VectorType *VT = dyn_cast<VectorType>(BasePtr1Ty)) {
  792. if (Op1C->getZExtValue() >= VT->getNumElements())
  793. return MayAlias; // Be conservative with out-of-range accesses
  794. }
  795. } else {
  796. // GEP1 is known to produce a value less than GEP2. To be
  797. // conservatively correct, we must assume the largest possible
  798. // constant is used in this position. This cannot be the initial
  799. // index to the GEP instructions (because we know we have at least one
  800. // element before this one with the different constant arguments), so
  801. // we know that the current index must be into either a struct or
  802. // array. Because we know it's not constant, this cannot be a
  803. // structure index. Because of this, we can calculate the maximum
  804. // value possible.
  805. //
  806. if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr1Ty))
  807. GEP1Ops[i] =
  808. ConstantInt::get(Type::getInt64Ty(Context),
  809. AT->getNumElements()-1);
  810. else if (const VectorType *VT = dyn_cast<VectorType>(BasePtr1Ty))
  811. GEP1Ops[i] =
  812. ConstantInt::get(Type::getInt64Ty(Context),
  813. VT->getNumElements()-1);
  814. }
  815. }
  816. if (Op2) {
  817. if (const ConstantInt *Op2C = dyn_cast<ConstantInt>(Op2)) {
  818. // If this is an array index, make sure the array element is in range.
  819. if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr2Ty)) {
  820. if (Op2C->getZExtValue() >= AT->getNumElements())
  821. return MayAlias; // Be conservative with out-of-range accesses
  822. } else if (const VectorType *VT = dyn_cast<VectorType>(BasePtr2Ty)) {
  823. if (Op2C->getZExtValue() >= VT->getNumElements())
  824. return MayAlias; // Be conservative with out-of-range accesses
  825. }
  826. } else { // Conservatively assume the minimum value for this index
  827. GEP2Ops[i] = Constant::getNullValue(Op2->getType());
  828. }
  829. }
  830. }
  831. if (BasePtr1Ty && Op1) {
  832. if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr1Ty))
  833. BasePtr1Ty = CT->getTypeAtIndex(GEP1Ops[i]);
  834. else
  835. BasePtr1Ty = 0;
  836. }
  837. if (BasePtr2Ty && Op2) {
  838. if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr2Ty))
  839. BasePtr2Ty = CT->getTypeAtIndex(GEP2Ops[i]);
  840. else
  841. BasePtr2Ty = 0;
  842. }
  843. }
  844. if (TD && GEPPointerTy->getElementType()->isSized()) {
  845. int64_t Offset1 =
  846. TD->getIndexedOffset(GEPPointerTy, GEP1Ops, NumGEP1Ops);
  847. int64_t Offset2 =
  848. TD->getIndexedOffset(GEPPointerTy, GEP2Ops, NumGEP2Ops);
  849. assert(Offset1 != Offset2 &&
  850. "There is at least one different constant here!");
  851. // Make sure we compare the absolute difference.
  852. if (Offset1 > Offset2)
  853. std::swap(Offset1, Offset2);
  854. if ((uint64_t)(Offset2-Offset1) >= SizeMax) {
  855. //cerr << "Determined that these two GEP's don't alias ["
  856. // << SizeMax << " bytes]: \n" << *GEP1 << *GEP2;
  857. return NoAlias;
  858. }
  859. }
  860. return MayAlias;
  861. }
  862. // Make sure that anything that uses AliasAnalysis pulls in this file...
  863. DEFINING_FILE_FOR(BasicAliasAnalysis)