BasicAliasAnalysis.cpp 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861
  1. //===- BasicAliasAnalysis.cpp - Local Alias Analysis Impl -----------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines the default implementation of the Alias Analysis interface
  11. // that simply implements a few identities (two different globals cannot alias,
  12. // etc), but otherwise does no analysis.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "llvm/Analysis/AliasAnalysis.h"
  16. #include "llvm/Analysis/Passes.h"
  17. #include "llvm/Constants.h"
  18. #include "llvm/DerivedTypes.h"
  19. #include "llvm/Function.h"
  20. #include "llvm/ParameterAttributes.h"
  21. #include "llvm/GlobalVariable.h"
  22. #include "llvm/Instructions.h"
  23. #include "llvm/IntrinsicInst.h"
  24. #include "llvm/Pass.h"
  25. #include "llvm/Target/TargetData.h"
  26. #include "llvm/ADT/SmallVector.h"
  27. #include "llvm/ADT/STLExtras.h"
  28. #include "llvm/Support/Compiler.h"
  29. #include "llvm/Support/GetElementPtrTypeIterator.h"
  30. #include "llvm/Support/ManagedStatic.h"
  31. #include <algorithm>
  32. using namespace llvm;
  33. //===----------------------------------------------------------------------===//
  34. // Useful predicates
  35. //===----------------------------------------------------------------------===//
  36. // Determine if an AllocationInst instruction escapes from the function it is
  37. // contained in. If it does not escape, there is no way for another function to
  38. // mod/ref it. We do this by looking at its uses and determining if the uses
  39. // can escape (recursively).
  40. static bool AddressMightEscape(const Value *V) {
  41. for (Value::use_const_iterator UI = V->use_begin(), E = V->use_end();
  42. UI != E; ++UI) {
  43. const Instruction *I = cast<Instruction>(*UI);
  44. switch (I->getOpcode()) {
  45. case Instruction::Load:
  46. break; //next use.
  47. case Instruction::Store:
  48. if (I->getOperand(0) == V)
  49. return true; // Escapes if the pointer is stored.
  50. break; // next use.
  51. case Instruction::GetElementPtr:
  52. if (AddressMightEscape(I))
  53. return true;
  54. break; // next use.
  55. case Instruction::BitCast:
  56. if (AddressMightEscape(I))
  57. return true;
  58. break; // next use
  59. case Instruction::Ret:
  60. // If returned, the address will escape to calling functions, but no
  61. // callees could modify it.
  62. break; // next use
  63. case Instruction::Call:
  64. // If the call is to a few known safe intrinsics, we know that it does
  65. // not escape.
  66. // TODO: Eventually just check the 'nocapture' attribute.
  67. if (!isa<MemIntrinsic>(I))
  68. return true;
  69. break; // next use
  70. default:
  71. return true;
  72. }
  73. }
  74. return false;
  75. }
  76. /// getUnderlyingObject - This traverses the use chain to figure out what object
  77. /// the specified value points to. If the value points to, or is derived from,
  78. /// a unique object or an argument, return it. This returns:
  79. /// Arguments, GlobalVariables, Functions, Allocas, Mallocs.
  80. static const Value *getUnderlyingObject(const Value *V) {
  81. if (!isa<PointerType>(V->getType())) return V;
  82. // If we are at some type of object, return it. GlobalValues and Allocations
  83. // have unique addresses.
  84. if (isa<GlobalValue>(V) || isa<AllocationInst>(V) || isa<Argument>(V))
  85. return V;
  86. // Traverse through different addressing mechanisms...
  87. if (const Instruction *I = dyn_cast<Instruction>(V)) {
  88. if (isa<BitCastInst>(I) || isa<GetElementPtrInst>(I))
  89. return getUnderlyingObject(I->getOperand(0));
  90. } else if (const ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) {
  91. if (CE->getOpcode() == Instruction::BitCast ||
  92. CE->getOpcode() == Instruction::GetElementPtr)
  93. return getUnderlyingObject(CE->getOperand(0));
  94. }
  95. return V;
  96. }
  97. static const User *isGEP(const Value *V) {
  98. if (isa<GetElementPtrInst>(V) ||
  99. (isa<ConstantExpr>(V) &&
  100. cast<ConstantExpr>(V)->getOpcode() == Instruction::GetElementPtr))
  101. return cast<User>(V);
  102. return 0;
  103. }
  104. static const Value *GetGEPOperands(const Value *V,
  105. SmallVector<Value*, 16> &GEPOps){
  106. assert(GEPOps.empty() && "Expect empty list to populate!");
  107. GEPOps.insert(GEPOps.end(), cast<User>(V)->op_begin()+1,
  108. cast<User>(V)->op_end());
  109. // Accumulate all of the chained indexes into the operand array
  110. V = cast<User>(V)->getOperand(0);
  111. while (const User *G = isGEP(V)) {
  112. if (!isa<Constant>(GEPOps[0]) || isa<GlobalValue>(GEPOps[0]) ||
  113. !cast<Constant>(GEPOps[0])->isNullValue())
  114. break; // Don't handle folding arbitrary pointer offsets yet...
  115. GEPOps.erase(GEPOps.begin()); // Drop the zero index
  116. GEPOps.insert(GEPOps.begin(), G->op_begin()+1, G->op_end());
  117. V = G->getOperand(0);
  118. }
  119. return V;
  120. }
  121. /// isIdentifiedObject - Return true if this pointer refers to a distinct and
  122. /// identifiable object. This returns true for:
  123. /// Global Variables and Functions
  124. /// Allocas and Mallocs
  125. /// ByVal and NoAlias Arguments
  126. ///
  127. static bool isIdentifiedObject(const Value *V) {
  128. if (isa<GlobalValue>(V) || isa<AllocationInst>(V))
  129. return true;
  130. if (const Argument *A = dyn_cast<Argument>(V))
  131. return A->hasNoAliasAttr() || A->hasByValAttr();
  132. return false;
  133. }
  134. /// isKnownNonNull - Return true if we know that the specified value is never
  135. /// null.
  136. static bool isKnownNonNull(const Value *V) {
  137. // Alloca never returns null, malloc might.
  138. if (isa<AllocaInst>(V)) return true;
  139. // A byval argument is never null.
  140. if (const Argument *A = dyn_cast<Argument>(V))
  141. return A->hasByValAttr();
  142. // Global values are not null unless extern weak.
  143. if (const GlobalValue *GV = dyn_cast<GlobalValue>(V))
  144. return !GV->hasExternalWeakLinkage();
  145. return false;
  146. }
  147. /// isNonEscapingLocalObject - Return true if the pointer is to a function-local
  148. /// object that never escapes from the function.
  149. static bool isNonEscapingLocalObject(const Value *V) {
  150. // If this is a local allocation, check to see if it escapes.
  151. if (isa<AllocationInst>(V))
  152. return !AddressMightEscape(V);
  153. // If this is an argument that corresponds to a byval or noalias argument,
  154. // it can't escape either.
  155. if (const Argument *A = dyn_cast<Argument>(V))
  156. if (A->hasByValAttr() || A->hasNoAliasAttr())
  157. return !AddressMightEscape(V);
  158. return false;
  159. }
  160. /// isObjectSmallerThan - Return true if we can prove that the object specified
  161. /// by V is smaller than Size.
  162. static bool isObjectSmallerThan(const Value *V, unsigned Size,
  163. const TargetData &TD) {
  164. const Type *AccessTy = 0;
  165. if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V))
  166. AccessTy = GV->getType()->getElementType();
  167. if (const AllocationInst *AI = dyn_cast<AllocationInst>(V))
  168. if (!AI->isArrayAllocation())
  169. AccessTy = AI->getType()->getElementType();
  170. if (const Argument *A = dyn_cast<Argument>(V))
  171. if (A->hasByValAttr())
  172. AccessTy = cast<PointerType>(A->getType())->getElementType();
  173. if (AccessTy && AccessTy->isSized())
  174. return TD.getABITypeSize(AccessTy) < Size;
  175. return false;
  176. }
  177. //===----------------------------------------------------------------------===//
  178. // NoAA Pass
  179. //===----------------------------------------------------------------------===//
  180. namespace {
  181. /// NoAA - This class implements the -no-aa pass, which always returns "I
  182. /// don't know" for alias queries. NoAA is unlike other alias analysis
  183. /// implementations, in that it does not chain to a previous analysis. As
  184. /// such it doesn't follow many of the rules that other alias analyses must.
  185. ///
  186. struct VISIBILITY_HIDDEN NoAA : public ImmutablePass, public AliasAnalysis {
  187. static char ID; // Class identification, replacement for typeinfo
  188. NoAA() : ImmutablePass(&ID) {}
  189. explicit NoAA(void *PID) : ImmutablePass(PID) { }
  190. virtual void getAnalysisUsage(AnalysisUsage &AU) const {
  191. AU.addRequired<TargetData>();
  192. }
  193. virtual void initializePass() {
  194. TD = &getAnalysis<TargetData>();
  195. }
  196. virtual AliasResult alias(const Value *V1, unsigned V1Size,
  197. const Value *V2, unsigned V2Size) {
  198. return MayAlias;
  199. }
  200. virtual ModRefBehavior getModRefBehavior(Function *F, CallSite CS,
  201. std::vector<PointerAccessInfo> *Info) {
  202. return UnknownModRefBehavior;
  203. }
  204. virtual void getArgumentAccesses(Function *F, CallSite CS,
  205. std::vector<PointerAccessInfo> &Info) {
  206. assert(0 && "This method may not be called on this function!");
  207. }
  208. virtual void getMustAliases(Value *P, std::vector<Value*> &RetVals) { }
  209. virtual bool pointsToConstantMemory(const Value *P) { return false; }
  210. virtual ModRefResult getModRefInfo(CallSite CS, Value *P, unsigned Size) {
  211. return ModRef;
  212. }
  213. virtual ModRefResult getModRefInfo(CallSite CS1, CallSite CS2) {
  214. return ModRef;
  215. }
  216. virtual bool hasNoModRefInfoForCalls() const { return true; }
  217. virtual void deleteValue(Value *V) {}
  218. virtual void copyValue(Value *From, Value *To) {}
  219. };
  220. } // End of anonymous namespace
  221. // Register this pass...
  222. char NoAA::ID = 0;
  223. static RegisterPass<NoAA>
  224. U("no-aa", "No Alias Analysis (always returns 'may' alias)", true, true);
  225. // Declare that we implement the AliasAnalysis interface
  226. static RegisterAnalysisGroup<AliasAnalysis> V(U);
  227. ImmutablePass *llvm::createNoAAPass() { return new NoAA(); }
  228. //===----------------------------------------------------------------------===//
  229. // BasicAA Pass
  230. //===----------------------------------------------------------------------===//
  231. namespace {
  232. /// BasicAliasAnalysis - This is the default alias analysis implementation.
  233. /// Because it doesn't chain to a previous alias analysis (like -no-aa), it
  234. /// derives from the NoAA class.
  235. struct VISIBILITY_HIDDEN BasicAliasAnalysis : public NoAA {
  236. static char ID; // Class identification, replacement for typeinfo
  237. BasicAliasAnalysis() : NoAA(&ID) {}
  238. AliasResult alias(const Value *V1, unsigned V1Size,
  239. const Value *V2, unsigned V2Size);
  240. ModRefResult getModRefInfo(CallSite CS, Value *P, unsigned Size);
  241. ModRefResult getModRefInfo(CallSite CS1, CallSite CS2) {
  242. return NoAA::getModRefInfo(CS1,CS2);
  243. }
  244. /// hasNoModRefInfoForCalls - We can provide mod/ref information against
  245. /// non-escaping allocations.
  246. virtual bool hasNoModRefInfoForCalls() const { return false; }
  247. /// pointsToConstantMemory - Chase pointers until we find a (constant
  248. /// global) or not.
  249. bool pointsToConstantMemory(const Value *P);
  250. private:
  251. // CheckGEPInstructions - Check two GEP instructions with known
  252. // must-aliasing base pointers. This checks to see if the index expressions
  253. // preclude the pointers from aliasing...
  254. AliasResult
  255. CheckGEPInstructions(const Type* BasePtr1Ty,
  256. Value **GEP1Ops, unsigned NumGEP1Ops, unsigned G1Size,
  257. const Type *BasePtr2Ty,
  258. Value **GEP2Ops, unsigned NumGEP2Ops, unsigned G2Size);
  259. };
  260. } // End of anonymous namespace
  261. // Register this pass...
  262. char BasicAliasAnalysis::ID = 0;
  263. static RegisterPass<BasicAliasAnalysis>
  264. X("basicaa", "Basic Alias Analysis (default AA impl)", false, true);
  265. // Declare that we implement the AliasAnalysis interface
  266. static RegisterAnalysisGroup<AliasAnalysis, true> Y(X);
  267. ImmutablePass *llvm::createBasicAliasAnalysisPass() {
  268. return new BasicAliasAnalysis();
  269. }
  270. /// pointsToConstantMemory - Chase pointers until we find a (constant
  271. /// global) or not.
  272. bool BasicAliasAnalysis::pointsToConstantMemory(const Value *P) {
  273. if (const GlobalVariable *GV =
  274. dyn_cast<GlobalVariable>(getUnderlyingObject(P)))
  275. return GV->isConstant();
  276. return false;
  277. }
  278. // getModRefInfo - Check to see if the specified callsite can clobber the
  279. // specified memory object. Since we only look at local properties of this
  280. // function, we really can't say much about this query. We do, however, use
  281. // simple "address taken" analysis on local objects.
  282. //
  283. AliasAnalysis::ModRefResult
  284. BasicAliasAnalysis::getModRefInfo(CallSite CS, Value *P, unsigned Size) {
  285. if (!isa<Constant>(P)) {
  286. const Value *Object = getUnderlyingObject(P);
  287. // If this is a tail call and P points to a stack location, we know that
  288. // the tail call cannot access or modify the local stack.
  289. // We cannot exclude byval arguments here; these belong to the caller of
  290. // the current function not to the current function, and a tail callee
  291. // may reference them.
  292. if (isa<AllocaInst>(Object))
  293. if (CallInst *CI = dyn_cast<CallInst>(CS.getInstruction()))
  294. if (CI->isTailCall())
  295. return NoModRef;
  296. // If the pointer is to a locally allocated object that does not escape,
  297. // then the call can not mod/ref the pointer unless the call takes the
  298. // argument without capturing it.
  299. if (isNonEscapingLocalObject(Object)) {
  300. bool passedAsArg = false;
  301. // TODO: Eventually only check 'nocapture' arguments.
  302. for (CallSite::arg_iterator CI = CS.arg_begin(), CE = CS.arg_end();
  303. CI != CE; ++CI)
  304. if (isa<PointerType>((*CI)->getType()) &&
  305. alias(cast<Value>(CI), ~0U, P, ~0U) != NoAlias)
  306. passedAsArg = true;
  307. if (!passedAsArg)
  308. return NoModRef;
  309. }
  310. }
  311. // The AliasAnalysis base class has some smarts, lets use them.
  312. return AliasAnalysis::getModRefInfo(CS, P, Size);
  313. }
  314. // alias - Provide a bunch of ad-hoc rules to disambiguate in common cases, such
  315. // as array references. Note that this function is heavily tail recursive.
  316. // Hopefully we have a smart C++ compiler. :)
  317. //
  318. AliasAnalysis::AliasResult
  319. BasicAliasAnalysis::alias(const Value *V1, unsigned V1Size,
  320. const Value *V2, unsigned V2Size) {
  321. // Strip off any constant expression casts if they exist
  322. if (const ConstantExpr *CE = dyn_cast<ConstantExpr>(V1))
  323. if (CE->isCast() && isa<PointerType>(CE->getOperand(0)->getType()))
  324. V1 = CE->getOperand(0);
  325. if (const ConstantExpr *CE = dyn_cast<ConstantExpr>(V2))
  326. if (CE->isCast() && isa<PointerType>(CE->getOperand(0)->getType()))
  327. V2 = CE->getOperand(0);
  328. // Are we checking for alias of the same value?
  329. if (V1 == V2) return MustAlias;
  330. if ((!isa<PointerType>(V1->getType()) || !isa<PointerType>(V2->getType())) &&
  331. V1->getType() != Type::Int64Ty && V2->getType() != Type::Int64Ty)
  332. return NoAlias; // Scalars cannot alias each other
  333. // Strip off cast instructions...
  334. if (const BitCastInst *I = dyn_cast<BitCastInst>(V1))
  335. return alias(I->getOperand(0), V1Size, V2, V2Size);
  336. if (const BitCastInst *I = dyn_cast<BitCastInst>(V2))
  337. return alias(V1, V1Size, I->getOperand(0), V2Size);
  338. // Figure out what objects these things are pointing to if we can...
  339. const Value *O1 = getUnderlyingObject(V1);
  340. const Value *O2 = getUnderlyingObject(V2);
  341. if (O1 != O2) {
  342. // If V1/V2 point to two different objects we know that we have no alias.
  343. if (isIdentifiedObject(O1) && isIdentifiedObject(O2))
  344. return NoAlias;
  345. // Incoming argument cannot alias locally allocated object!
  346. if ((isa<Argument>(O1) && isa<AllocationInst>(O2)) ||
  347. (isa<Argument>(O2) && isa<AllocationInst>(O1)))
  348. return NoAlias;
  349. // Most objects can't alias null.
  350. if ((isa<ConstantPointerNull>(V2) && isKnownNonNull(O1)) ||
  351. (isa<ConstantPointerNull>(V1) && isKnownNonNull(O2)))
  352. return NoAlias;
  353. }
  354. // If the size of one access is larger than the entire object on the other
  355. // side, then we know such behavior is undefined and can assume no alias.
  356. const TargetData &TD = getTargetData();
  357. if ((V1Size != ~0U && isObjectSmallerThan(O2, V1Size, TD)) ||
  358. (V2Size != ~0U && isObjectSmallerThan(O1, V2Size, TD)))
  359. return NoAlias;
  360. // If one pointer is the result of a call/invoke and the other is a
  361. // non-escaping local object, then we know the object couldn't escape to a
  362. // point where the call could return it.
  363. if ((isa<CallInst>(O1) || isa<InvokeInst>(O1)) &&
  364. isNonEscapingLocalObject(O2))
  365. return NoAlias;
  366. if ((isa<CallInst>(O2) || isa<InvokeInst>(O2)) &&
  367. isNonEscapingLocalObject(O1))
  368. return NoAlias;
  369. // If we have two gep instructions with must-alias'ing base pointers, figure
  370. // out if the indexes to the GEP tell us anything about the derived pointer.
  371. // Note that we also handle chains of getelementptr instructions as well as
  372. // constant expression getelementptrs here.
  373. //
  374. if (isGEP(V1) && isGEP(V2)) {
  375. // Drill down into the first non-gep value, to test for must-aliasing of
  376. // the base pointers.
  377. const User *G = cast<User>(V1);
  378. while (isGEP(G->getOperand(0)) &&
  379. G->getOperand(1) ==
  380. Constant::getNullValue(G->getOperand(1)->getType()))
  381. G = cast<User>(G->getOperand(0));
  382. const Value *BasePtr1 = G->getOperand(0);
  383. G = cast<User>(V2);
  384. while (isGEP(G->getOperand(0)) &&
  385. G->getOperand(1) ==
  386. Constant::getNullValue(G->getOperand(1)->getType()))
  387. G = cast<User>(G->getOperand(0));
  388. const Value *BasePtr2 = G->getOperand(0);
  389. // Do the base pointers alias?
  390. AliasResult BaseAlias = alias(BasePtr1, ~0U, BasePtr2, ~0U);
  391. if (BaseAlias == NoAlias) return NoAlias;
  392. if (BaseAlias == MustAlias) {
  393. // If the base pointers alias each other exactly, check to see if we can
  394. // figure out anything about the resultant pointers, to try to prove
  395. // non-aliasing.
  396. // Collect all of the chained GEP operands together into one simple place
  397. SmallVector<Value*, 16> GEP1Ops, GEP2Ops;
  398. BasePtr1 = GetGEPOperands(V1, GEP1Ops);
  399. BasePtr2 = GetGEPOperands(V2, GEP2Ops);
  400. // If GetGEPOperands were able to fold to the same must-aliased pointer,
  401. // do the comparison.
  402. if (BasePtr1 == BasePtr2) {
  403. AliasResult GAlias =
  404. CheckGEPInstructions(BasePtr1->getType(),
  405. &GEP1Ops[0], GEP1Ops.size(), V1Size,
  406. BasePtr2->getType(),
  407. &GEP2Ops[0], GEP2Ops.size(), V2Size);
  408. if (GAlias != MayAlias)
  409. return GAlias;
  410. }
  411. }
  412. }
  413. // Check to see if these two pointers are related by a getelementptr
  414. // instruction. If one pointer is a GEP with a non-zero index of the other
  415. // pointer, we know they cannot alias.
  416. //
  417. if (isGEP(V2)) {
  418. std::swap(V1, V2);
  419. std::swap(V1Size, V2Size);
  420. }
  421. if (V1Size != ~0U && V2Size != ~0U)
  422. if (isGEP(V1)) {
  423. SmallVector<Value*, 16> GEPOperands;
  424. const Value *BasePtr = GetGEPOperands(V1, GEPOperands);
  425. AliasResult R = alias(BasePtr, V1Size, V2, V2Size);
  426. if (R == MustAlias) {
  427. // If there is at least one non-zero constant index, we know they cannot
  428. // alias.
  429. bool ConstantFound = false;
  430. bool AllZerosFound = true;
  431. for (unsigned i = 0, e = GEPOperands.size(); i != e; ++i)
  432. if (const Constant *C = dyn_cast<Constant>(GEPOperands[i])) {
  433. if (!C->isNullValue()) {
  434. ConstantFound = true;
  435. AllZerosFound = false;
  436. break;
  437. }
  438. } else {
  439. AllZerosFound = false;
  440. }
  441. // If we have getelementptr <ptr>, 0, 0, 0, 0, ... and V2 must aliases
  442. // the ptr, the end result is a must alias also.
  443. if (AllZerosFound)
  444. return MustAlias;
  445. if (ConstantFound) {
  446. if (V2Size <= 1 && V1Size <= 1) // Just pointer check?
  447. return NoAlias;
  448. // Otherwise we have to check to see that the distance is more than
  449. // the size of the argument... build an index vector that is equal to
  450. // the arguments provided, except substitute 0's for any variable
  451. // indexes we find...
  452. if (cast<PointerType>(
  453. BasePtr->getType())->getElementType()->isSized()) {
  454. for (unsigned i = 0; i != GEPOperands.size(); ++i)
  455. if (!isa<ConstantInt>(GEPOperands[i]))
  456. GEPOperands[i] =
  457. Constant::getNullValue(GEPOperands[i]->getType());
  458. int64_t Offset =
  459. getTargetData().getIndexedOffset(BasePtr->getType(),
  460. &GEPOperands[0],
  461. GEPOperands.size());
  462. if (Offset >= (int64_t)V2Size || Offset <= -(int64_t)V1Size)
  463. return NoAlias;
  464. }
  465. }
  466. }
  467. }
  468. return MayAlias;
  469. }
  470. // This function is used to determin if the indices of two GEP instructions are
  471. // equal. V1 and V2 are the indices.
  472. static bool IndexOperandsEqual(Value *V1, Value *V2) {
  473. if (V1->getType() == V2->getType())
  474. return V1 == V2;
  475. if (Constant *C1 = dyn_cast<Constant>(V1))
  476. if (Constant *C2 = dyn_cast<Constant>(V2)) {
  477. // Sign extend the constants to long types, if necessary
  478. if (C1->getType() != Type::Int64Ty)
  479. C1 = ConstantExpr::getSExt(C1, Type::Int64Ty);
  480. if (C2->getType() != Type::Int64Ty)
  481. C2 = ConstantExpr::getSExt(C2, Type::Int64Ty);
  482. return C1 == C2;
  483. }
  484. return false;
  485. }
  486. /// CheckGEPInstructions - Check two GEP instructions with known must-aliasing
  487. /// base pointers. This checks to see if the index expressions preclude the
  488. /// pointers from aliasing...
  489. AliasAnalysis::AliasResult
  490. BasicAliasAnalysis::CheckGEPInstructions(
  491. const Type* BasePtr1Ty, Value **GEP1Ops, unsigned NumGEP1Ops, unsigned G1S,
  492. const Type *BasePtr2Ty, Value **GEP2Ops, unsigned NumGEP2Ops, unsigned G2S) {
  493. // We currently can't handle the case when the base pointers have different
  494. // primitive types. Since this is uncommon anyway, we are happy being
  495. // extremely conservative.
  496. if (BasePtr1Ty != BasePtr2Ty)
  497. return MayAlias;
  498. const PointerType *GEPPointerTy = cast<PointerType>(BasePtr1Ty);
  499. // Find the (possibly empty) initial sequence of equal values... which are not
  500. // necessarily constants.
  501. unsigned NumGEP1Operands = NumGEP1Ops, NumGEP2Operands = NumGEP2Ops;
  502. unsigned MinOperands = std::min(NumGEP1Operands, NumGEP2Operands);
  503. unsigned MaxOperands = std::max(NumGEP1Operands, NumGEP2Operands);
  504. unsigned UnequalOper = 0;
  505. while (UnequalOper != MinOperands &&
  506. IndexOperandsEqual(GEP1Ops[UnequalOper], GEP2Ops[UnequalOper])) {
  507. // Advance through the type as we go...
  508. ++UnequalOper;
  509. if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr1Ty))
  510. BasePtr1Ty = CT->getTypeAtIndex(GEP1Ops[UnequalOper-1]);
  511. else {
  512. // If all operands equal each other, then the derived pointers must
  513. // alias each other...
  514. BasePtr1Ty = 0;
  515. assert(UnequalOper == NumGEP1Operands && UnequalOper == NumGEP2Operands &&
  516. "Ran out of type nesting, but not out of operands?");
  517. return MustAlias;
  518. }
  519. }
  520. // If we have seen all constant operands, and run out of indexes on one of the
  521. // getelementptrs, check to see if the tail of the leftover one is all zeros.
  522. // If so, return mustalias.
  523. if (UnequalOper == MinOperands) {
  524. if (NumGEP1Ops < NumGEP2Ops) {
  525. std::swap(GEP1Ops, GEP2Ops);
  526. std::swap(NumGEP1Ops, NumGEP2Ops);
  527. }
  528. bool AllAreZeros = true;
  529. for (unsigned i = UnequalOper; i != MaxOperands; ++i)
  530. if (!isa<Constant>(GEP1Ops[i]) ||
  531. !cast<Constant>(GEP1Ops[i])->isNullValue()) {
  532. AllAreZeros = false;
  533. break;
  534. }
  535. if (AllAreZeros) return MustAlias;
  536. }
  537. // So now we know that the indexes derived from the base pointers,
  538. // which are known to alias, are different. We can still determine a
  539. // no-alias result if there are differing constant pairs in the index
  540. // chain. For example:
  541. // A[i][0] != A[j][1] iff (&A[0][1]-&A[0][0] >= std::max(G1S, G2S))
  542. //
  543. // We have to be careful here about array accesses. In particular, consider:
  544. // A[1][0] vs A[0][i]
  545. // In this case, we don't *know* that the array will be accessed in bounds:
  546. // the index could even be negative. Because of this, we have to
  547. // conservatively *give up* and return may alias. We disregard differing
  548. // array subscripts that are followed by a variable index without going
  549. // through a struct.
  550. //
  551. unsigned SizeMax = std::max(G1S, G2S);
  552. if (SizeMax == ~0U) return MayAlias; // Avoid frivolous work.
  553. // Scan for the first operand that is constant and unequal in the
  554. // two getelementptrs...
  555. unsigned FirstConstantOper = UnequalOper;
  556. for (; FirstConstantOper != MinOperands; ++FirstConstantOper) {
  557. const Value *G1Oper = GEP1Ops[FirstConstantOper];
  558. const Value *G2Oper = GEP2Ops[FirstConstantOper];
  559. if (G1Oper != G2Oper) // Found non-equal constant indexes...
  560. if (Constant *G1OC = dyn_cast<ConstantInt>(const_cast<Value*>(G1Oper)))
  561. if (Constant *G2OC = dyn_cast<ConstantInt>(const_cast<Value*>(G2Oper))){
  562. if (G1OC->getType() != G2OC->getType()) {
  563. // Sign extend both operands to long.
  564. if (G1OC->getType() != Type::Int64Ty)
  565. G1OC = ConstantExpr::getSExt(G1OC, Type::Int64Ty);
  566. if (G2OC->getType() != Type::Int64Ty)
  567. G2OC = ConstantExpr::getSExt(G2OC, Type::Int64Ty);
  568. GEP1Ops[FirstConstantOper] = G1OC;
  569. GEP2Ops[FirstConstantOper] = G2OC;
  570. }
  571. if (G1OC != G2OC) {
  572. // Handle the "be careful" case above: if this is an array/vector
  573. // subscript, scan for a subsequent variable array index.
  574. if (isa<SequentialType>(BasePtr1Ty)) {
  575. const Type *NextTy =
  576. cast<SequentialType>(BasePtr1Ty)->getElementType();
  577. bool isBadCase = false;
  578. for (unsigned Idx = FirstConstantOper+1;
  579. Idx != MinOperands && isa<SequentialType>(NextTy); ++Idx) {
  580. const Value *V1 = GEP1Ops[Idx], *V2 = GEP2Ops[Idx];
  581. if (!isa<Constant>(V1) || !isa<Constant>(V2)) {
  582. isBadCase = true;
  583. break;
  584. }
  585. NextTy = cast<SequentialType>(NextTy)->getElementType();
  586. }
  587. if (isBadCase) G1OC = 0;
  588. }
  589. // Make sure they are comparable (ie, not constant expressions), and
  590. // make sure the GEP with the smaller leading constant is GEP1.
  591. if (G1OC) {
  592. Constant *Compare = ConstantExpr::getICmp(ICmpInst::ICMP_SGT,
  593. G1OC, G2OC);
  594. if (ConstantInt *CV = dyn_cast<ConstantInt>(Compare)) {
  595. if (CV->getZExtValue()) { // If they are comparable and G2 > G1
  596. std::swap(GEP1Ops, GEP2Ops); // Make GEP1 < GEP2
  597. std::swap(NumGEP1Ops, NumGEP2Ops);
  598. }
  599. break;
  600. }
  601. }
  602. }
  603. }
  604. BasePtr1Ty = cast<CompositeType>(BasePtr1Ty)->getTypeAtIndex(G1Oper);
  605. }
  606. // No shared constant operands, and we ran out of common operands. At this
  607. // point, the GEP instructions have run through all of their operands, and we
  608. // haven't found evidence that there are any deltas between the GEP's.
  609. // However, one GEP may have more operands than the other. If this is the
  610. // case, there may still be hope. Check this now.
  611. if (FirstConstantOper == MinOperands) {
  612. // Make GEP1Ops be the longer one if there is a longer one.
  613. if (NumGEP1Ops < NumGEP2Ops) {
  614. std::swap(GEP1Ops, GEP2Ops);
  615. std::swap(NumGEP1Ops, NumGEP2Ops);
  616. }
  617. // Is there anything to check?
  618. if (NumGEP1Ops > MinOperands) {
  619. for (unsigned i = FirstConstantOper; i != MaxOperands; ++i)
  620. if (isa<ConstantInt>(GEP1Ops[i]) &&
  621. !cast<ConstantInt>(GEP1Ops[i])->isZero()) {
  622. // Yup, there's a constant in the tail. Set all variables to
  623. // constants in the GEP instruction to make it suitable for
  624. // TargetData::getIndexedOffset.
  625. for (i = 0; i != MaxOperands; ++i)
  626. if (!isa<ConstantInt>(GEP1Ops[i]))
  627. GEP1Ops[i] = Constant::getNullValue(GEP1Ops[i]->getType());
  628. // Okay, now get the offset. This is the relative offset for the full
  629. // instruction.
  630. const TargetData &TD = getTargetData();
  631. int64_t Offset1 = TD.getIndexedOffset(GEPPointerTy, GEP1Ops,
  632. NumGEP1Ops);
  633. // Now check without any constants at the end.
  634. int64_t Offset2 = TD.getIndexedOffset(GEPPointerTy, GEP1Ops,
  635. MinOperands);
  636. // Make sure we compare the absolute difference.
  637. if (Offset1 > Offset2)
  638. std::swap(Offset1, Offset2);
  639. // If the tail provided a bit enough offset, return noalias!
  640. if ((uint64_t)(Offset2-Offset1) >= SizeMax)
  641. return NoAlias;
  642. // Otherwise break - we don't look for another constant in the tail.
  643. break;
  644. }
  645. }
  646. // Couldn't find anything useful.
  647. return MayAlias;
  648. }
  649. // If there are non-equal constants arguments, then we can figure
  650. // out a minimum known delta between the two index expressions... at
  651. // this point we know that the first constant index of GEP1 is less
  652. // than the first constant index of GEP2.
  653. // Advance BasePtr[12]Ty over this first differing constant operand.
  654. BasePtr2Ty = cast<CompositeType>(BasePtr1Ty)->
  655. getTypeAtIndex(GEP2Ops[FirstConstantOper]);
  656. BasePtr1Ty = cast<CompositeType>(BasePtr1Ty)->
  657. getTypeAtIndex(GEP1Ops[FirstConstantOper]);
  658. // We are going to be using TargetData::getIndexedOffset to determine the
  659. // offset that each of the GEP's is reaching. To do this, we have to convert
  660. // all variable references to constant references. To do this, we convert the
  661. // initial sequence of array subscripts into constant zeros to start with.
  662. const Type *ZeroIdxTy = GEPPointerTy;
  663. for (unsigned i = 0; i != FirstConstantOper; ++i) {
  664. if (!isa<StructType>(ZeroIdxTy))
  665. GEP1Ops[i] = GEP2Ops[i] = Constant::getNullValue(Type::Int32Ty);
  666. if (const CompositeType *CT = dyn_cast<CompositeType>(ZeroIdxTy))
  667. ZeroIdxTy = CT->getTypeAtIndex(GEP1Ops[i]);
  668. }
  669. // We know that GEP1Ops[FirstConstantOper] & GEP2Ops[FirstConstantOper] are ok
  670. // Loop over the rest of the operands...
  671. for (unsigned i = FirstConstantOper+1; i != MaxOperands; ++i) {
  672. const Value *Op1 = i < NumGEP1Ops ? GEP1Ops[i] : 0;
  673. const Value *Op2 = i < NumGEP2Ops ? GEP2Ops[i] : 0;
  674. // If they are equal, use a zero index...
  675. if (Op1 == Op2 && BasePtr1Ty == BasePtr2Ty) {
  676. if (!isa<ConstantInt>(Op1))
  677. GEP1Ops[i] = GEP2Ops[i] = Constant::getNullValue(Op1->getType());
  678. // Otherwise, just keep the constants we have.
  679. } else {
  680. if (Op1) {
  681. if (const ConstantInt *Op1C = dyn_cast<ConstantInt>(Op1)) {
  682. // If this is an array index, make sure the array element is in range.
  683. if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr1Ty)) {
  684. if (Op1C->getZExtValue() >= AT->getNumElements())
  685. return MayAlias; // Be conservative with out-of-range accesses
  686. } else if (const VectorType *VT = dyn_cast<VectorType>(BasePtr1Ty)) {
  687. if (Op1C->getZExtValue() >= VT->getNumElements())
  688. return MayAlias; // Be conservative with out-of-range accesses
  689. }
  690. } else {
  691. // GEP1 is known to produce a value less than GEP2. To be
  692. // conservatively correct, we must assume the largest possible
  693. // constant is used in this position. This cannot be the initial
  694. // index to the GEP instructions (because we know we have at least one
  695. // element before this one with the different constant arguments), so
  696. // we know that the current index must be into either a struct or
  697. // array. Because we know it's not constant, this cannot be a
  698. // structure index. Because of this, we can calculate the maximum
  699. // value possible.
  700. //
  701. if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr1Ty))
  702. GEP1Ops[i] = ConstantInt::get(Type::Int64Ty,AT->getNumElements()-1);
  703. else if (const VectorType *VT = dyn_cast<VectorType>(BasePtr1Ty))
  704. GEP1Ops[i] = ConstantInt::get(Type::Int64Ty,VT->getNumElements()-1);
  705. }
  706. }
  707. if (Op2) {
  708. if (const ConstantInt *Op2C = dyn_cast<ConstantInt>(Op2)) {
  709. // If this is an array index, make sure the array element is in range.
  710. if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr2Ty)) {
  711. if (Op2C->getZExtValue() >= AT->getNumElements())
  712. return MayAlias; // Be conservative with out-of-range accesses
  713. } else if (const VectorType *VT = dyn_cast<VectorType>(BasePtr2Ty)) {
  714. if (Op2C->getZExtValue() >= VT->getNumElements())
  715. return MayAlias; // Be conservative with out-of-range accesses
  716. }
  717. } else { // Conservatively assume the minimum value for this index
  718. GEP2Ops[i] = Constant::getNullValue(Op2->getType());
  719. }
  720. }
  721. }
  722. if (BasePtr1Ty && Op1) {
  723. if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr1Ty))
  724. BasePtr1Ty = CT->getTypeAtIndex(GEP1Ops[i]);
  725. else
  726. BasePtr1Ty = 0;
  727. }
  728. if (BasePtr2Ty && Op2) {
  729. if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr2Ty))
  730. BasePtr2Ty = CT->getTypeAtIndex(GEP2Ops[i]);
  731. else
  732. BasePtr2Ty = 0;
  733. }
  734. }
  735. if (GEPPointerTy->getElementType()->isSized()) {
  736. int64_t Offset1 =
  737. getTargetData().getIndexedOffset(GEPPointerTy, GEP1Ops, NumGEP1Ops);
  738. int64_t Offset2 =
  739. getTargetData().getIndexedOffset(GEPPointerTy, GEP2Ops, NumGEP2Ops);
  740. assert(Offset1 != Offset2 &&
  741. "There is at least one different constant here!");
  742. // Make sure we compare the absolute difference.
  743. if (Offset1 > Offset2)
  744. std::swap(Offset1, Offset2);
  745. if ((uint64_t)(Offset2-Offset1) >= SizeMax) {
  746. //cerr << "Determined that these two GEP's don't alias ["
  747. // << SizeMax << " bytes]: \n" << *GEP1 << *GEP2;
  748. return NoAlias;
  749. }
  750. }
  751. return MayAlias;
  752. }
  753. // Make sure that anything that uses AliasAnalysis pulls in this file...
  754. DEFINING_FILE_FOR(BasicAliasAnalysis)