ImplicitNullChecks.cpp 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726
  1. //===- ImplicitNullChecks.cpp - Fold null checks into memory accesses -----===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This pass turns explicit null checks of the form
  10. //
  11. // test %r10, %r10
  12. // je throw_npe
  13. // movl (%r10), %esi
  14. // ...
  15. //
  16. // to
  17. //
  18. // faulting_load_op("movl (%r10), %esi", throw_npe)
  19. // ...
  20. //
  21. // With the help of a runtime that understands the .fault_maps section,
  22. // faulting_load_op branches to throw_npe if executing movl (%r10), %esi incurs
  23. // a page fault.
  24. // Store and LoadStore are also supported.
  25. //
  26. //===----------------------------------------------------------------------===//
  27. #include "llvm/ADT/ArrayRef.h"
  28. #include "llvm/ADT/None.h"
  29. #include "llvm/ADT/Optional.h"
  30. #include "llvm/ADT/STLExtras.h"
  31. #include "llvm/ADT/SmallVector.h"
  32. #include "llvm/ADT/Statistic.h"
  33. #include "llvm/Analysis/AliasAnalysis.h"
  34. #include "llvm/Analysis/MemoryLocation.h"
  35. #include "llvm/CodeGen/FaultMaps.h"
  36. #include "llvm/CodeGen/MachineBasicBlock.h"
  37. #include "llvm/CodeGen/MachineFunction.h"
  38. #include "llvm/CodeGen/MachineFunctionPass.h"
  39. #include "llvm/CodeGen/MachineInstr.h"
  40. #include "llvm/CodeGen/MachineInstrBuilder.h"
  41. #include "llvm/CodeGen/MachineMemOperand.h"
  42. #include "llvm/CodeGen/MachineOperand.h"
  43. #include "llvm/CodeGen/MachineRegisterInfo.h"
  44. #include "llvm/CodeGen/PseudoSourceValue.h"
  45. #include "llvm/CodeGen/TargetInstrInfo.h"
  46. #include "llvm/CodeGen/TargetOpcodes.h"
  47. #include "llvm/CodeGen/TargetRegisterInfo.h"
  48. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  49. #include "llvm/IR/BasicBlock.h"
  50. #include "llvm/IR/DebugLoc.h"
  51. #include "llvm/IR/LLVMContext.h"
  52. #include "llvm/MC/MCInstrDesc.h"
  53. #include "llvm/MC/MCRegisterInfo.h"
  54. #include "llvm/Pass.h"
  55. #include "llvm/Support/CommandLine.h"
  56. #include <cassert>
  57. #include <cstdint>
  58. #include <iterator>
  59. using namespace llvm;
  60. static cl::opt<int> PageSize("imp-null-check-page-size",
  61. cl::desc("The page size of the target in bytes"),
  62. cl::init(4096), cl::Hidden);
  63. static cl::opt<unsigned> MaxInstsToConsider(
  64. "imp-null-max-insts-to-consider",
  65. cl::desc("The max number of instructions to consider hoisting loads over "
  66. "(the algorithm is quadratic over this number)"),
  67. cl::Hidden, cl::init(8));
  68. #define DEBUG_TYPE "implicit-null-checks"
  69. STATISTIC(NumImplicitNullChecks,
  70. "Number of explicit null checks made implicit");
  71. namespace {
  72. class ImplicitNullChecks : public MachineFunctionPass {
  73. /// Return true if \c computeDependence can process \p MI.
  74. static bool canHandle(const MachineInstr *MI);
  75. /// Helper function for \c computeDependence. Return true if \p A
  76. /// and \p B do not have any dependences between them, and can be
  77. /// re-ordered without changing program semantics.
  78. bool canReorder(const MachineInstr *A, const MachineInstr *B);
  79. /// A data type for representing the result computed by \c
  80. /// computeDependence. States whether it is okay to reorder the
  81. /// instruction passed to \c computeDependence with at most one
  82. /// dependency.
  83. struct DependenceResult {
  84. /// Can we actually re-order \p MI with \p Insts (see \c
  85. /// computeDependence).
  86. bool CanReorder;
  87. /// If non-None, then an instruction in \p Insts that also must be
  88. /// hoisted.
  89. Optional<ArrayRef<MachineInstr *>::iterator> PotentialDependence;
  90. /*implicit*/ DependenceResult(
  91. bool CanReorder,
  92. Optional<ArrayRef<MachineInstr *>::iterator> PotentialDependence)
  93. : CanReorder(CanReorder), PotentialDependence(PotentialDependence) {
  94. assert((!PotentialDependence || CanReorder) &&
  95. "!CanReorder && PotentialDependence.hasValue() not allowed!");
  96. }
  97. };
  98. /// Compute a result for the following question: can \p MI be
  99. /// re-ordered from after \p Insts to before it.
  100. ///
  101. /// \c canHandle should return true for all instructions in \p
  102. /// Insts.
  103. DependenceResult computeDependence(const MachineInstr *MI,
  104. ArrayRef<MachineInstr *> Block);
  105. /// Represents one null check that can be made implicit.
  106. class NullCheck {
  107. // The memory operation the null check can be folded into.
  108. MachineInstr *MemOperation;
  109. // The instruction actually doing the null check (Ptr != 0).
  110. MachineInstr *CheckOperation;
  111. // The block the check resides in.
  112. MachineBasicBlock *CheckBlock;
  113. // The block branched to if the pointer is non-null.
  114. MachineBasicBlock *NotNullSucc;
  115. // The block branched to if the pointer is null.
  116. MachineBasicBlock *NullSucc;
  117. // If this is non-null, then MemOperation has a dependency on this
  118. // instruction; and it needs to be hoisted to execute before MemOperation.
  119. MachineInstr *OnlyDependency;
  120. public:
  121. explicit NullCheck(MachineInstr *memOperation, MachineInstr *checkOperation,
  122. MachineBasicBlock *checkBlock,
  123. MachineBasicBlock *notNullSucc,
  124. MachineBasicBlock *nullSucc,
  125. MachineInstr *onlyDependency)
  126. : MemOperation(memOperation), CheckOperation(checkOperation),
  127. CheckBlock(checkBlock), NotNullSucc(notNullSucc), NullSucc(nullSucc),
  128. OnlyDependency(onlyDependency) {}
  129. MachineInstr *getMemOperation() const { return MemOperation; }
  130. MachineInstr *getCheckOperation() const { return CheckOperation; }
  131. MachineBasicBlock *getCheckBlock() const { return CheckBlock; }
  132. MachineBasicBlock *getNotNullSucc() const { return NotNullSucc; }
  133. MachineBasicBlock *getNullSucc() const { return NullSucc; }
  134. MachineInstr *getOnlyDependency() const { return OnlyDependency; }
  135. };
  136. const TargetInstrInfo *TII = nullptr;
  137. const TargetRegisterInfo *TRI = nullptr;
  138. AliasAnalysis *AA = nullptr;
  139. MachineFrameInfo *MFI = nullptr;
  140. bool analyzeBlockForNullChecks(MachineBasicBlock &MBB,
  141. SmallVectorImpl<NullCheck> &NullCheckList);
  142. MachineInstr *insertFaultingInstr(MachineInstr *MI, MachineBasicBlock *MBB,
  143. MachineBasicBlock *HandlerMBB);
  144. void rewriteNullChecks(ArrayRef<NullCheck> NullCheckList);
  145. enum AliasResult {
  146. AR_NoAlias,
  147. AR_MayAlias,
  148. AR_WillAliasEverything
  149. };
  150. /// Returns AR_NoAlias if \p MI memory operation does not alias with
  151. /// \p PrevMI, AR_MayAlias if they may alias and AR_WillAliasEverything if
  152. /// they may alias and any further memory operation may alias with \p PrevMI.
  153. AliasResult areMemoryOpsAliased(const MachineInstr &MI,
  154. const MachineInstr *PrevMI) const;
  155. enum SuitabilityResult {
  156. SR_Suitable,
  157. SR_Unsuitable,
  158. SR_Impossible
  159. };
  160. /// Return SR_Suitable if \p MI a memory operation that can be used to
  161. /// implicitly null check the value in \p PointerReg, SR_Unsuitable if
  162. /// \p MI cannot be used to null check and SR_Impossible if there is
  163. /// no sense to continue lookup due to any other instruction will not be able
  164. /// to be used. \p PrevInsts is the set of instruction seen since
  165. /// the explicit null check on \p PointerReg.
  166. SuitabilityResult isSuitableMemoryOp(const MachineInstr &MI,
  167. unsigned PointerReg,
  168. ArrayRef<MachineInstr *> PrevInsts);
  169. /// Return true if \p FaultingMI can be hoisted from after the
  170. /// instructions in \p InstsSeenSoFar to before them. Set \p Dependence to a
  171. /// non-null value if we also need to (and legally can) hoist a depedency.
  172. bool canHoistInst(MachineInstr *FaultingMI, unsigned PointerReg,
  173. ArrayRef<MachineInstr *> InstsSeenSoFar,
  174. MachineBasicBlock *NullSucc, MachineInstr *&Dependence);
  175. public:
  176. static char ID;
  177. ImplicitNullChecks() : MachineFunctionPass(ID) {
  178. initializeImplicitNullChecksPass(*PassRegistry::getPassRegistry());
  179. }
  180. bool runOnMachineFunction(MachineFunction &MF) override;
  181. void getAnalysisUsage(AnalysisUsage &AU) const override {
  182. AU.addRequired<AAResultsWrapperPass>();
  183. MachineFunctionPass::getAnalysisUsage(AU);
  184. }
  185. MachineFunctionProperties getRequiredProperties() const override {
  186. return MachineFunctionProperties().set(
  187. MachineFunctionProperties::Property::NoVRegs);
  188. }
  189. };
  190. } // end anonymous namespace
  191. bool ImplicitNullChecks::canHandle(const MachineInstr *MI) {
  192. if (MI->isCall() || MI->mayRaiseFPException() ||
  193. MI->hasUnmodeledSideEffects())
  194. return false;
  195. auto IsRegMask = [](const MachineOperand &MO) { return MO.isRegMask(); };
  196. (void)IsRegMask;
  197. assert(!llvm::any_of(MI->operands(), IsRegMask) &&
  198. "Calls were filtered out above!");
  199. auto IsUnordered = [](MachineMemOperand *MMO) { return MMO->isUnordered(); };
  200. return llvm::all_of(MI->memoperands(), IsUnordered);
  201. }
  202. ImplicitNullChecks::DependenceResult
  203. ImplicitNullChecks::computeDependence(const MachineInstr *MI,
  204. ArrayRef<MachineInstr *> Block) {
  205. assert(llvm::all_of(Block, canHandle) && "Check this first!");
  206. assert(!is_contained(Block, MI) && "Block must be exclusive of MI!");
  207. Optional<ArrayRef<MachineInstr *>::iterator> Dep;
  208. for (auto I = Block.begin(), E = Block.end(); I != E; ++I) {
  209. if (canReorder(*I, MI))
  210. continue;
  211. if (Dep == None) {
  212. // Found one possible dependency, keep track of it.
  213. Dep = I;
  214. } else {
  215. // We found two dependencies, so bail out.
  216. return {false, None};
  217. }
  218. }
  219. return {true, Dep};
  220. }
  221. bool ImplicitNullChecks::canReorder(const MachineInstr *A,
  222. const MachineInstr *B) {
  223. assert(canHandle(A) && canHandle(B) && "Precondition!");
  224. // canHandle makes sure that we _can_ correctly analyze the dependencies
  225. // between A and B here -- for instance, we should not be dealing with heap
  226. // load-store dependencies here.
  227. for (auto MOA : A->operands()) {
  228. if (!(MOA.isReg() && MOA.getReg()))
  229. continue;
  230. Register RegA = MOA.getReg();
  231. for (auto MOB : B->operands()) {
  232. if (!(MOB.isReg() && MOB.getReg()))
  233. continue;
  234. Register RegB = MOB.getReg();
  235. if (TRI->regsOverlap(RegA, RegB) && (MOA.isDef() || MOB.isDef()))
  236. return false;
  237. }
  238. }
  239. return true;
  240. }
  241. bool ImplicitNullChecks::runOnMachineFunction(MachineFunction &MF) {
  242. TII = MF.getSubtarget().getInstrInfo();
  243. TRI = MF.getRegInfo().getTargetRegisterInfo();
  244. MFI = &MF.getFrameInfo();
  245. AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
  246. SmallVector<NullCheck, 16> NullCheckList;
  247. for (auto &MBB : MF)
  248. analyzeBlockForNullChecks(MBB, NullCheckList);
  249. if (!NullCheckList.empty())
  250. rewriteNullChecks(NullCheckList);
  251. return !NullCheckList.empty();
  252. }
  253. // Return true if any register aliasing \p Reg is live-in into \p MBB.
  254. static bool AnyAliasLiveIn(const TargetRegisterInfo *TRI,
  255. MachineBasicBlock *MBB, unsigned Reg) {
  256. for (MCRegAliasIterator AR(Reg, TRI, /*IncludeSelf*/ true); AR.isValid();
  257. ++AR)
  258. if (MBB->isLiveIn(*AR))
  259. return true;
  260. return false;
  261. }
  262. ImplicitNullChecks::AliasResult
  263. ImplicitNullChecks::areMemoryOpsAliased(const MachineInstr &MI,
  264. const MachineInstr *PrevMI) const {
  265. // If it is not memory access, skip the check.
  266. if (!(PrevMI->mayStore() || PrevMI->mayLoad()))
  267. return AR_NoAlias;
  268. // Load-Load may alias
  269. if (!(MI.mayStore() || PrevMI->mayStore()))
  270. return AR_NoAlias;
  271. // We lost info, conservatively alias. If it was store then no sense to
  272. // continue because we won't be able to check against it further.
  273. if (MI.memoperands_empty())
  274. return MI.mayStore() ? AR_WillAliasEverything : AR_MayAlias;
  275. if (PrevMI->memoperands_empty())
  276. return PrevMI->mayStore() ? AR_WillAliasEverything : AR_MayAlias;
  277. for (MachineMemOperand *MMO1 : MI.memoperands()) {
  278. // MMO1 should have a value due it comes from operation we'd like to use
  279. // as implicit null check.
  280. assert(MMO1->getValue() && "MMO1 should have a Value!");
  281. for (MachineMemOperand *MMO2 : PrevMI->memoperands()) {
  282. if (const PseudoSourceValue *PSV = MMO2->getPseudoValue()) {
  283. if (PSV->mayAlias(MFI))
  284. return AR_MayAlias;
  285. continue;
  286. }
  287. llvm::AliasResult AAResult =
  288. AA->alias(MemoryLocation(MMO1->getValue(), LocationSize::unknown(),
  289. MMO1->getAAInfo()),
  290. MemoryLocation(MMO2->getValue(), LocationSize::unknown(),
  291. MMO2->getAAInfo()));
  292. if (AAResult != NoAlias)
  293. return AR_MayAlias;
  294. }
  295. }
  296. return AR_NoAlias;
  297. }
  298. ImplicitNullChecks::SuitabilityResult
  299. ImplicitNullChecks::isSuitableMemoryOp(const MachineInstr &MI,
  300. unsigned PointerReg,
  301. ArrayRef<MachineInstr *> PrevInsts) {
  302. int64_t Offset;
  303. const MachineOperand *BaseOp;
  304. if (!TII->getMemOperandWithOffset(MI, BaseOp, Offset, TRI) ||
  305. !BaseOp->isReg() || BaseOp->getReg() != PointerReg)
  306. return SR_Unsuitable;
  307. // We want the mem access to be issued at a sane offset from PointerReg,
  308. // so that if PointerReg is null then the access reliably page faults.
  309. if (!((MI.mayLoad() || MI.mayStore()) && !MI.isPredicable() &&
  310. -PageSize < Offset && Offset < PageSize))
  311. return SR_Unsuitable;
  312. // Finally, check whether the current memory access aliases with previous one.
  313. for (auto *PrevMI : PrevInsts) {
  314. AliasResult AR = areMemoryOpsAliased(MI, PrevMI);
  315. if (AR == AR_WillAliasEverything)
  316. return SR_Impossible;
  317. if (AR == AR_MayAlias)
  318. return SR_Unsuitable;
  319. }
  320. return SR_Suitable;
  321. }
  322. bool ImplicitNullChecks::canHoistInst(MachineInstr *FaultingMI,
  323. unsigned PointerReg,
  324. ArrayRef<MachineInstr *> InstsSeenSoFar,
  325. MachineBasicBlock *NullSucc,
  326. MachineInstr *&Dependence) {
  327. auto DepResult = computeDependence(FaultingMI, InstsSeenSoFar);
  328. if (!DepResult.CanReorder)
  329. return false;
  330. if (!DepResult.PotentialDependence) {
  331. Dependence = nullptr;
  332. return true;
  333. }
  334. auto DependenceItr = *DepResult.PotentialDependence;
  335. auto *DependenceMI = *DependenceItr;
  336. // We don't want to reason about speculating loads. Note -- at this point
  337. // we should have already filtered out all of the other non-speculatable
  338. // things, like calls and stores.
  339. // We also do not want to hoist stores because it might change the memory
  340. // while the FaultingMI may result in faulting.
  341. assert(canHandle(DependenceMI) && "Should never have reached here!");
  342. if (DependenceMI->mayLoadOrStore())
  343. return false;
  344. for (auto &DependenceMO : DependenceMI->operands()) {
  345. if (!(DependenceMO.isReg() && DependenceMO.getReg()))
  346. continue;
  347. // Make sure that we won't clobber any live ins to the sibling block by
  348. // hoisting Dependency. For instance, we can't hoist INST to before the
  349. // null check (even if it safe, and does not violate any dependencies in
  350. // the non_null_block) if %rdx is live in to _null_block.
  351. //
  352. // test %rcx, %rcx
  353. // je _null_block
  354. // _non_null_block:
  355. // %rdx = INST
  356. // ...
  357. //
  358. // This restriction does not apply to the faulting load inst because in
  359. // case the pointer loaded from is in the null page, the load will not
  360. // semantically execute, and affect machine state. That is, if the load
  361. // was loading into %rax and it faults, the value of %rax should stay the
  362. // same as it would have been had the load not have executed and we'd have
  363. // branched to NullSucc directly.
  364. if (AnyAliasLiveIn(TRI, NullSucc, DependenceMO.getReg()))
  365. return false;
  366. // The Dependency can't be re-defining the base register -- then we won't
  367. // get the memory operation on the address we want. This is already
  368. // checked in \c IsSuitableMemoryOp.
  369. assert(!(DependenceMO.isDef() &&
  370. TRI->regsOverlap(DependenceMO.getReg(), PointerReg)) &&
  371. "Should have been checked before!");
  372. }
  373. auto DepDepResult =
  374. computeDependence(DependenceMI, {InstsSeenSoFar.begin(), DependenceItr});
  375. if (!DepDepResult.CanReorder || DepDepResult.PotentialDependence)
  376. return false;
  377. Dependence = DependenceMI;
  378. return true;
  379. }
  380. /// Analyze MBB to check if its terminating branch can be turned into an
  381. /// implicit null check. If yes, append a description of the said null check to
  382. /// NullCheckList and return true, else return false.
  383. bool ImplicitNullChecks::analyzeBlockForNullChecks(
  384. MachineBasicBlock &MBB, SmallVectorImpl<NullCheck> &NullCheckList) {
  385. using MachineBranchPredicate = TargetInstrInfo::MachineBranchPredicate;
  386. MDNode *BranchMD = nullptr;
  387. if (auto *BB = MBB.getBasicBlock())
  388. BranchMD = BB->getTerminator()->getMetadata(LLVMContext::MD_make_implicit);
  389. if (!BranchMD)
  390. return false;
  391. MachineBranchPredicate MBP;
  392. if (TII->analyzeBranchPredicate(MBB, MBP, true))
  393. return false;
  394. // Is the predicate comparing an integer to zero?
  395. if (!(MBP.LHS.isReg() && MBP.RHS.isImm() && MBP.RHS.getImm() == 0 &&
  396. (MBP.Predicate == MachineBranchPredicate::PRED_NE ||
  397. MBP.Predicate == MachineBranchPredicate::PRED_EQ)))
  398. return false;
  399. // If we cannot erase the test instruction itself, then making the null check
  400. // implicit does not buy us much.
  401. if (!MBP.SingleUseCondition)
  402. return false;
  403. MachineBasicBlock *NotNullSucc, *NullSucc;
  404. if (MBP.Predicate == MachineBranchPredicate::PRED_NE) {
  405. NotNullSucc = MBP.TrueDest;
  406. NullSucc = MBP.FalseDest;
  407. } else {
  408. NotNullSucc = MBP.FalseDest;
  409. NullSucc = MBP.TrueDest;
  410. }
  411. // We handle the simplest case for now. We can potentially do better by using
  412. // the machine dominator tree.
  413. if (NotNullSucc->pred_size() != 1)
  414. return false;
  415. // To prevent the invalid transformation of the following code:
  416. //
  417. // mov %rax, %rcx
  418. // test %rax, %rax
  419. // %rax = ...
  420. // je throw_npe
  421. // mov(%rcx), %r9
  422. // mov(%rax), %r10
  423. //
  424. // into:
  425. //
  426. // mov %rax, %rcx
  427. // %rax = ....
  428. // faulting_load_op("movl (%rax), %r10", throw_npe)
  429. // mov(%rcx), %r9
  430. //
  431. // we must ensure that there are no instructions between the 'test' and
  432. // conditional jump that modify %rax.
  433. const Register PointerReg = MBP.LHS.getReg();
  434. assert(MBP.ConditionDef->getParent() == &MBB && "Should be in basic block");
  435. for (auto I = MBB.rbegin(); MBP.ConditionDef != &*I; ++I)
  436. if (I->modifiesRegister(PointerReg, TRI))
  437. return false;
  438. // Starting with a code fragment like:
  439. //
  440. // test %rax, %rax
  441. // jne LblNotNull
  442. //
  443. // LblNull:
  444. // callq throw_NullPointerException
  445. //
  446. // LblNotNull:
  447. // Inst0
  448. // Inst1
  449. // ...
  450. // Def = Load (%rax + <offset>)
  451. // ...
  452. //
  453. //
  454. // we want to end up with
  455. //
  456. // Def = FaultingLoad (%rax + <offset>), LblNull
  457. // jmp LblNotNull ;; explicit or fallthrough
  458. //
  459. // LblNotNull:
  460. // Inst0
  461. // Inst1
  462. // ...
  463. //
  464. // LblNull:
  465. // callq throw_NullPointerException
  466. //
  467. //
  468. // To see why this is legal, consider the two possibilities:
  469. //
  470. // 1. %rax is null: since we constrain <offset> to be less than PageSize, the
  471. // load instruction dereferences the null page, causing a segmentation
  472. // fault.
  473. //
  474. // 2. %rax is not null: in this case we know that the load cannot fault, as
  475. // otherwise the load would've faulted in the original program too and the
  476. // original program would've been undefined.
  477. //
  478. // This reasoning cannot be extended to justify hoisting through arbitrary
  479. // control flow. For instance, in the example below (in pseudo-C)
  480. //
  481. // if (ptr == null) { throw_npe(); unreachable; }
  482. // if (some_cond) { return 42; }
  483. // v = ptr->field; // LD
  484. // ...
  485. //
  486. // we cannot (without code duplication) use the load marked "LD" to null check
  487. // ptr -- clause (2) above does not apply in this case. In the above program
  488. // the safety of ptr->field can be dependent on some_cond; and, for instance,
  489. // ptr could be some non-null invalid reference that never gets loaded from
  490. // because some_cond is always true.
  491. SmallVector<MachineInstr *, 8> InstsSeenSoFar;
  492. for (auto &MI : *NotNullSucc) {
  493. if (!canHandle(&MI) || InstsSeenSoFar.size() >= MaxInstsToConsider)
  494. return false;
  495. MachineInstr *Dependence;
  496. SuitabilityResult SR = isSuitableMemoryOp(MI, PointerReg, InstsSeenSoFar);
  497. if (SR == SR_Impossible)
  498. return false;
  499. if (SR == SR_Suitable &&
  500. canHoistInst(&MI, PointerReg, InstsSeenSoFar, NullSucc, Dependence)) {
  501. NullCheckList.emplace_back(&MI, MBP.ConditionDef, &MBB, NotNullSucc,
  502. NullSucc, Dependence);
  503. return true;
  504. }
  505. // If MI re-defines the PointerReg then we cannot move further.
  506. if (llvm::any_of(MI.operands(), [&](MachineOperand &MO) {
  507. return MO.isReg() && MO.getReg() && MO.isDef() &&
  508. TRI->regsOverlap(MO.getReg(), PointerReg);
  509. }))
  510. return false;
  511. InstsSeenSoFar.push_back(&MI);
  512. }
  513. return false;
  514. }
  515. /// Wrap a machine instruction, MI, into a FAULTING machine instruction.
  516. /// The FAULTING instruction does the same load/store as MI
  517. /// (defining the same register), and branches to HandlerMBB if the mem access
  518. /// faults. The FAULTING instruction is inserted at the end of MBB.
  519. MachineInstr *ImplicitNullChecks::insertFaultingInstr(
  520. MachineInstr *MI, MachineBasicBlock *MBB, MachineBasicBlock *HandlerMBB) {
  521. const unsigned NoRegister = 0; // Guaranteed to be the NoRegister value for
  522. // all targets.
  523. DebugLoc DL;
  524. unsigned NumDefs = MI->getDesc().getNumDefs();
  525. assert(NumDefs <= 1 && "other cases unhandled!");
  526. unsigned DefReg = NoRegister;
  527. if (NumDefs != 0) {
  528. DefReg = MI->getOperand(0).getReg();
  529. assert(NumDefs == 1 && "expected exactly one def!");
  530. }
  531. FaultMaps::FaultKind FK;
  532. if (MI->mayLoad())
  533. FK =
  534. MI->mayStore() ? FaultMaps::FaultingLoadStore : FaultMaps::FaultingLoad;
  535. else
  536. FK = FaultMaps::FaultingStore;
  537. auto MIB = BuildMI(MBB, DL, TII->get(TargetOpcode::FAULTING_OP), DefReg)
  538. .addImm(FK)
  539. .addMBB(HandlerMBB)
  540. .addImm(MI->getOpcode());
  541. for (auto &MO : MI->uses()) {
  542. if (MO.isReg()) {
  543. MachineOperand NewMO = MO;
  544. if (MO.isUse()) {
  545. NewMO.setIsKill(false);
  546. } else {
  547. assert(MO.isDef() && "Expected def or use");
  548. NewMO.setIsDead(false);
  549. }
  550. MIB.add(NewMO);
  551. } else {
  552. MIB.add(MO);
  553. }
  554. }
  555. MIB.setMemRefs(MI->memoperands());
  556. return MIB;
  557. }
  558. /// Rewrite the null checks in NullCheckList into implicit null checks.
  559. void ImplicitNullChecks::rewriteNullChecks(
  560. ArrayRef<ImplicitNullChecks::NullCheck> NullCheckList) {
  561. DebugLoc DL;
  562. for (auto &NC : NullCheckList) {
  563. // Remove the conditional branch dependent on the null check.
  564. unsigned BranchesRemoved = TII->removeBranch(*NC.getCheckBlock());
  565. (void)BranchesRemoved;
  566. assert(BranchesRemoved > 0 && "expected at least one branch!");
  567. if (auto *DepMI = NC.getOnlyDependency()) {
  568. DepMI->removeFromParent();
  569. NC.getCheckBlock()->insert(NC.getCheckBlock()->end(), DepMI);
  570. }
  571. // Insert a faulting instruction where the conditional branch was
  572. // originally. We check earlier ensures that this bit of code motion
  573. // is legal. We do not touch the successors list for any basic block
  574. // since we haven't changed control flow, we've just made it implicit.
  575. MachineInstr *FaultingInstr = insertFaultingInstr(
  576. NC.getMemOperation(), NC.getCheckBlock(), NC.getNullSucc());
  577. // Now the values defined by MemOperation, if any, are live-in of
  578. // the block of MemOperation.
  579. // The original operation may define implicit-defs alongside
  580. // the value.
  581. MachineBasicBlock *MBB = NC.getMemOperation()->getParent();
  582. for (const MachineOperand &MO : FaultingInstr->operands()) {
  583. if (!MO.isReg() || !MO.isDef())
  584. continue;
  585. Register Reg = MO.getReg();
  586. if (!Reg || MBB->isLiveIn(Reg))
  587. continue;
  588. MBB->addLiveIn(Reg);
  589. }
  590. if (auto *DepMI = NC.getOnlyDependency()) {
  591. for (auto &MO : DepMI->operands()) {
  592. if (!MO.isReg() || !MO.getReg() || !MO.isDef())
  593. continue;
  594. if (!NC.getNotNullSucc()->isLiveIn(MO.getReg()))
  595. NC.getNotNullSucc()->addLiveIn(MO.getReg());
  596. }
  597. }
  598. NC.getMemOperation()->eraseFromParent();
  599. NC.getCheckOperation()->eraseFromParent();
  600. // Insert an *unconditional* branch to not-null successor.
  601. TII->insertBranch(*NC.getCheckBlock(), NC.getNotNullSucc(), nullptr,
  602. /*Cond=*/None, DL);
  603. NumImplicitNullChecks++;
  604. }
  605. }
  606. char ImplicitNullChecks::ID = 0;
  607. char &llvm::ImplicitNullChecksID = ImplicitNullChecks::ID;
  608. INITIALIZE_PASS_BEGIN(ImplicitNullChecks, DEBUG_TYPE,
  609. "Implicit null checks", false, false)
  610. INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
  611. INITIALIZE_PASS_END(ImplicitNullChecks, DEBUG_TYPE,
  612. "Implicit null checks", false, false)