DataFlowSanitizer.cpp 66 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773
  1. //===- DataFlowSanitizer.cpp - dynamic data flow analysis -----------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. /// \file
  10. /// This file is a part of DataFlowSanitizer, a generalised dynamic data flow
  11. /// analysis.
  12. ///
  13. /// Unlike other Sanitizer tools, this tool is not designed to detect a specific
  14. /// class of bugs on its own. Instead, it provides a generic dynamic data flow
  15. /// analysis framework to be used by clients to help detect application-specific
  16. /// issues within their own code.
  17. ///
  18. /// The analysis is based on automatic propagation of data flow labels (also
  19. /// known as taint labels) through a program as it performs computation. Each
  20. /// byte of application memory is backed by two bytes of shadow memory which
  21. /// hold the label. On Linux/x86_64, memory is laid out as follows:
  22. ///
  23. /// +--------------------+ 0x800000000000 (top of memory)
  24. /// | application memory |
  25. /// +--------------------+ 0x700000008000 (kAppAddr)
  26. /// | |
  27. /// | unused |
  28. /// | |
  29. /// +--------------------+ 0x200200000000 (kUnusedAddr)
  30. /// | union table |
  31. /// +--------------------+ 0x200000000000 (kUnionTableAddr)
  32. /// | shadow memory |
  33. /// +--------------------+ 0x000000010000 (kShadowAddr)
  34. /// | reserved by kernel |
  35. /// +--------------------+ 0x000000000000
  36. ///
  37. /// To derive a shadow memory address from an application memory address,
  38. /// bits 44-46 are cleared to bring the address into the range
  39. /// [0x000000008000,0x100000000000). Then the address is shifted left by 1 to
  40. /// account for the double byte representation of shadow labels and move the
  41. /// address into the shadow memory range. See the function
  42. /// DataFlowSanitizer::getShadowAddress below.
  43. ///
  44. /// For more information, please refer to the design document:
  45. /// http://clang.llvm.org/docs/DataFlowSanitizerDesign.html
  46. //
  47. //===----------------------------------------------------------------------===//
  48. #include "llvm/ADT/DenseMap.h"
  49. #include "llvm/ADT/DenseSet.h"
  50. #include "llvm/ADT/DepthFirstIterator.h"
  51. #include "llvm/ADT/None.h"
  52. #include "llvm/ADT/SmallPtrSet.h"
  53. #include "llvm/ADT/SmallVector.h"
  54. #include "llvm/ADT/StringExtras.h"
  55. #include "llvm/ADT/StringRef.h"
  56. #include "llvm/ADT/Triple.h"
  57. #include "llvm/Transforms/Utils/Local.h"
  58. #include "llvm/Analysis/ValueTracking.h"
  59. #include "llvm/IR/Argument.h"
  60. #include "llvm/IR/Attributes.h"
  61. #include "llvm/IR/BasicBlock.h"
  62. #include "llvm/IR/CallSite.h"
  63. #include "llvm/IR/Constant.h"
  64. #include "llvm/IR/Constants.h"
  65. #include "llvm/IR/DataLayout.h"
  66. #include "llvm/IR/DerivedTypes.h"
  67. #include "llvm/IR/Dominators.h"
  68. #include "llvm/IR/Function.h"
  69. #include "llvm/IR/GlobalAlias.h"
  70. #include "llvm/IR/GlobalValue.h"
  71. #include "llvm/IR/GlobalVariable.h"
  72. #include "llvm/IR/IRBuilder.h"
  73. #include "llvm/IR/InlineAsm.h"
  74. #include "llvm/IR/InstVisitor.h"
  75. #include "llvm/IR/InstrTypes.h"
  76. #include "llvm/IR/Instruction.h"
  77. #include "llvm/IR/Instructions.h"
  78. #include "llvm/IR/IntrinsicInst.h"
  79. #include "llvm/IR/LLVMContext.h"
  80. #include "llvm/IR/MDBuilder.h"
  81. #include "llvm/IR/Module.h"
  82. #include "llvm/IR/Type.h"
  83. #include "llvm/IR/User.h"
  84. #include "llvm/IR/Value.h"
  85. #include "llvm/Pass.h"
  86. #include "llvm/Support/Casting.h"
  87. #include "llvm/Support/CommandLine.h"
  88. #include "llvm/Support/ErrorHandling.h"
  89. #include "llvm/Support/SpecialCaseList.h"
  90. #include "llvm/Transforms/Instrumentation.h"
  91. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  92. #include <algorithm>
  93. #include <cassert>
  94. #include <cstddef>
  95. #include <cstdint>
  96. #include <iterator>
  97. #include <memory>
  98. #include <set>
  99. #include <string>
  100. #include <utility>
  101. #include <vector>
  102. using namespace llvm;
  103. // External symbol to be used when generating the shadow address for
  104. // architectures with multiple VMAs. Instead of using a constant integer
  105. // the runtime will set the external mask based on the VMA range.
  106. static const char *const kDFSanExternShadowPtrMask = "__dfsan_shadow_ptr_mask";
  107. // The -dfsan-preserve-alignment flag controls whether this pass assumes that
  108. // alignment requirements provided by the input IR are correct. For example,
  109. // if the input IR contains a load with alignment 8, this flag will cause
  110. // the shadow load to have alignment 16. This flag is disabled by default as
  111. // we have unfortunately encountered too much code (including Clang itself;
  112. // see PR14291) which performs misaligned access.
  113. static cl::opt<bool> ClPreserveAlignment(
  114. "dfsan-preserve-alignment",
  115. cl::desc("respect alignment requirements provided by input IR"), cl::Hidden,
  116. cl::init(false));
  117. // The ABI list files control how shadow parameters are passed. The pass treats
  118. // every function labelled "uninstrumented" in the ABI list file as conforming
  119. // to the "native" (i.e. unsanitized) ABI. Unless the ABI list contains
  120. // additional annotations for those functions, a call to one of those functions
  121. // will produce a warning message, as the labelling behaviour of the function is
  122. // unknown. The other supported annotations are "functional" and "discard",
  123. // which are described below under DataFlowSanitizer::WrapperKind.
  124. static cl::list<std::string> ClABIListFiles(
  125. "dfsan-abilist",
  126. cl::desc("File listing native ABI functions and how the pass treats them"),
  127. cl::Hidden);
  128. // Controls whether the pass uses IA_Args or IA_TLS as the ABI for instrumented
  129. // functions (see DataFlowSanitizer::InstrumentedABI below).
  130. static cl::opt<bool> ClArgsABI(
  131. "dfsan-args-abi",
  132. cl::desc("Use the argument ABI rather than the TLS ABI"),
  133. cl::Hidden);
  134. // Controls whether the pass includes or ignores the labels of pointers in load
  135. // instructions.
  136. static cl::opt<bool> ClCombinePointerLabelsOnLoad(
  137. "dfsan-combine-pointer-labels-on-load",
  138. cl::desc("Combine the label of the pointer with the label of the data when "
  139. "loading from memory."),
  140. cl::Hidden, cl::init(true));
  141. // Controls whether the pass includes or ignores the labels of pointers in
  142. // stores instructions.
  143. static cl::opt<bool> ClCombinePointerLabelsOnStore(
  144. "dfsan-combine-pointer-labels-on-store",
  145. cl::desc("Combine the label of the pointer with the label of the data when "
  146. "storing in memory."),
  147. cl::Hidden, cl::init(false));
  148. static cl::opt<bool> ClDebugNonzeroLabels(
  149. "dfsan-debug-nonzero-labels",
  150. cl::desc("Insert calls to __dfsan_nonzero_label on observing a parameter, "
  151. "load or return with a nonzero label"),
  152. cl::Hidden);
  153. static StringRef GetGlobalTypeString(const GlobalValue &G) {
  154. // Types of GlobalVariables are always pointer types.
  155. Type *GType = G.getValueType();
  156. // For now we support blacklisting struct types only.
  157. if (StructType *SGType = dyn_cast<StructType>(GType)) {
  158. if (!SGType->isLiteral())
  159. return SGType->getName();
  160. }
  161. return "<unknown type>";
  162. }
  163. namespace {
  164. class DFSanABIList {
  165. std::unique_ptr<SpecialCaseList> SCL;
  166. public:
  167. DFSanABIList() = default;
  168. void set(std::unique_ptr<SpecialCaseList> List) { SCL = std::move(List); }
  169. /// Returns whether either this function or its source file are listed in the
  170. /// given category.
  171. bool isIn(const Function &F, StringRef Category) const {
  172. return isIn(*F.getParent(), Category) ||
  173. SCL->inSection("dataflow", "fun", F.getName(), Category);
  174. }
  175. /// Returns whether this global alias is listed in the given category.
  176. ///
  177. /// If GA aliases a function, the alias's name is matched as a function name
  178. /// would be. Similarly, aliases of globals are matched like globals.
  179. bool isIn(const GlobalAlias &GA, StringRef Category) const {
  180. if (isIn(*GA.getParent(), Category))
  181. return true;
  182. if (isa<FunctionType>(GA.getValueType()))
  183. return SCL->inSection("dataflow", "fun", GA.getName(), Category);
  184. return SCL->inSection("dataflow", "global", GA.getName(), Category) ||
  185. SCL->inSection("dataflow", "type", GetGlobalTypeString(GA),
  186. Category);
  187. }
  188. /// Returns whether this module is listed in the given category.
  189. bool isIn(const Module &M, StringRef Category) const {
  190. return SCL->inSection("dataflow", "src", M.getModuleIdentifier(), Category);
  191. }
  192. };
  193. /// TransformedFunction is used to express the result of transforming one
  194. /// function type into another. This struct is immutable. It holds metadata
  195. /// useful for updating calls of the old function to the new type.
  196. struct TransformedFunction {
  197. TransformedFunction(FunctionType* OriginalType,
  198. FunctionType* TransformedType,
  199. std::vector<unsigned> ArgumentIndexMapping)
  200. : OriginalType(OriginalType),
  201. TransformedType(TransformedType),
  202. ArgumentIndexMapping(ArgumentIndexMapping) {}
  203. // Disallow copies.
  204. TransformedFunction(const TransformedFunction&) = delete;
  205. TransformedFunction& operator=(const TransformedFunction&) = delete;
  206. // Allow moves.
  207. TransformedFunction(TransformedFunction&&) = default;
  208. TransformedFunction& operator=(TransformedFunction&&) = default;
  209. /// Type of the function before the transformation.
  210. FunctionType *OriginalType;
  211. /// Type of the function after the transformation.
  212. FunctionType *TransformedType;
  213. /// Transforming a function may change the position of arguments. This
  214. /// member records the mapping from each argument's old position to its new
  215. /// position. Argument positions are zero-indexed. If the transformation
  216. /// from F to F' made the first argument of F into the third argument of F',
  217. /// then ArgumentIndexMapping[0] will equal 2.
  218. std::vector<unsigned> ArgumentIndexMapping;
  219. };
  220. /// Given function attributes from a call site for the original function,
  221. /// return function attributes appropriate for a call to the transformed
  222. /// function.
  223. AttributeList TransformFunctionAttributes(
  224. const TransformedFunction& TransformedFunction,
  225. LLVMContext& Ctx, AttributeList CallSiteAttrs) {
  226. // Construct a vector of AttributeSet for each function argument.
  227. std::vector<llvm::AttributeSet> ArgumentAttributes(
  228. TransformedFunction.TransformedType->getNumParams());
  229. // Copy attributes from the parameter of the original function to the
  230. // transformed version. 'ArgumentIndexMapping' holds the mapping from
  231. // old argument position to new.
  232. for (unsigned i=0, ie = TransformedFunction.ArgumentIndexMapping.size();
  233. i < ie; ++i) {
  234. unsigned TransformedIndex = TransformedFunction.ArgumentIndexMapping[i];
  235. ArgumentAttributes[TransformedIndex] = CallSiteAttrs.getParamAttributes(i);
  236. }
  237. // Copy annotations on varargs arguments.
  238. for (unsigned i = TransformedFunction.OriginalType->getNumParams(),
  239. ie = CallSiteAttrs.getNumAttrSets(); i<ie; ++i) {
  240. ArgumentAttributes.push_back(CallSiteAttrs.getParamAttributes(i));
  241. }
  242. return AttributeList::get(
  243. Ctx,
  244. CallSiteAttrs.getFnAttributes(),
  245. CallSiteAttrs.getRetAttributes(),
  246. llvm::makeArrayRef(ArgumentAttributes));
  247. }
  248. class DataFlowSanitizer : public ModulePass {
  249. friend struct DFSanFunction;
  250. friend class DFSanVisitor;
  251. enum {
  252. ShadowWidth = 16
  253. };
  254. /// Which ABI should be used for instrumented functions?
  255. enum InstrumentedABI {
  256. /// Argument and return value labels are passed through additional
  257. /// arguments and by modifying the return type.
  258. IA_Args,
  259. /// Argument and return value labels are passed through TLS variables
  260. /// __dfsan_arg_tls and __dfsan_retval_tls.
  261. IA_TLS
  262. };
  263. /// How should calls to uninstrumented functions be handled?
  264. enum WrapperKind {
  265. /// This function is present in an uninstrumented form but we don't know
  266. /// how it should be handled. Print a warning and call the function anyway.
  267. /// Don't label the return value.
  268. WK_Warning,
  269. /// This function does not write to (user-accessible) memory, and its return
  270. /// value is unlabelled.
  271. WK_Discard,
  272. /// This function does not write to (user-accessible) memory, and the label
  273. /// of its return value is the union of the label of its arguments.
  274. WK_Functional,
  275. /// Instead of calling the function, a custom wrapper __dfsw_F is called,
  276. /// where F is the name of the function. This function may wrap the
  277. /// original function or provide its own implementation. This is similar to
  278. /// the IA_Args ABI, except that IA_Args uses a struct return type to
  279. /// pass the return value shadow in a register, while WK_Custom uses an
  280. /// extra pointer argument to return the shadow. This allows the wrapped
  281. /// form of the function type to be expressed in C.
  282. WK_Custom
  283. };
  284. Module *Mod;
  285. LLVMContext *Ctx;
  286. IntegerType *ShadowTy;
  287. PointerType *ShadowPtrTy;
  288. IntegerType *IntptrTy;
  289. ConstantInt *ZeroShadow;
  290. ConstantInt *ShadowPtrMask;
  291. ConstantInt *ShadowPtrMul;
  292. Constant *ArgTLS;
  293. Constant *RetvalTLS;
  294. void *(*GetArgTLSPtr)();
  295. void *(*GetRetvalTLSPtr)();
  296. FunctionType *GetArgTLSTy;
  297. FunctionType *GetRetvalTLSTy;
  298. Constant *GetArgTLS;
  299. Constant *GetRetvalTLS;
  300. Constant *ExternalShadowMask;
  301. FunctionType *DFSanUnionFnTy;
  302. FunctionType *DFSanUnionLoadFnTy;
  303. FunctionType *DFSanUnimplementedFnTy;
  304. FunctionType *DFSanSetLabelFnTy;
  305. FunctionType *DFSanNonzeroLabelFnTy;
  306. FunctionType *DFSanVarargWrapperFnTy;
  307. FunctionCallee DFSanUnionFn;
  308. FunctionCallee DFSanCheckedUnionFn;
  309. FunctionCallee DFSanUnionLoadFn;
  310. FunctionCallee DFSanUnimplementedFn;
  311. FunctionCallee DFSanSetLabelFn;
  312. FunctionCallee DFSanNonzeroLabelFn;
  313. FunctionCallee DFSanVarargWrapperFn;
  314. MDNode *ColdCallWeights;
  315. DFSanABIList ABIList;
  316. DenseMap<Value *, Function *> UnwrappedFnMap;
  317. AttrBuilder ReadOnlyNoneAttrs;
  318. bool DFSanRuntimeShadowMask = false;
  319. Value *getShadowAddress(Value *Addr, Instruction *Pos);
  320. bool isInstrumented(const Function *F);
  321. bool isInstrumented(const GlobalAlias *GA);
  322. FunctionType *getArgsFunctionType(FunctionType *T);
  323. FunctionType *getTrampolineFunctionType(FunctionType *T);
  324. TransformedFunction getCustomFunctionType(FunctionType *T);
  325. InstrumentedABI getInstrumentedABI();
  326. WrapperKind getWrapperKind(Function *F);
  327. void addGlobalNamePrefix(GlobalValue *GV);
  328. Function *buildWrapperFunction(Function *F, StringRef NewFName,
  329. GlobalValue::LinkageTypes NewFLink,
  330. FunctionType *NewFT);
  331. Constant *getOrBuildTrampolineFunction(FunctionType *FT, StringRef FName);
  332. public:
  333. static char ID;
  334. DataFlowSanitizer(
  335. const std::vector<std::string> &ABIListFiles = std::vector<std::string>(),
  336. void *(*getArgTLS)() = nullptr, void *(*getRetValTLS)() = nullptr);
  337. bool doInitialization(Module &M) override;
  338. bool runOnModule(Module &M) override;
  339. };
  340. struct DFSanFunction {
  341. DataFlowSanitizer &DFS;
  342. Function *F;
  343. DominatorTree DT;
  344. DataFlowSanitizer::InstrumentedABI IA;
  345. bool IsNativeABI;
  346. Value *ArgTLSPtr = nullptr;
  347. Value *RetvalTLSPtr = nullptr;
  348. AllocaInst *LabelReturnAlloca = nullptr;
  349. DenseMap<Value *, Value *> ValShadowMap;
  350. DenseMap<AllocaInst *, AllocaInst *> AllocaShadowMap;
  351. std::vector<std::pair<PHINode *, PHINode *>> PHIFixups;
  352. DenseSet<Instruction *> SkipInsts;
  353. std::vector<Value *> NonZeroChecks;
  354. bool AvoidNewBlocks;
  355. struct CachedCombinedShadow {
  356. BasicBlock *Block;
  357. Value *Shadow;
  358. };
  359. DenseMap<std::pair<Value *, Value *>, CachedCombinedShadow>
  360. CachedCombinedShadows;
  361. DenseMap<Value *, std::set<Value *>> ShadowElements;
  362. DFSanFunction(DataFlowSanitizer &DFS, Function *F, bool IsNativeABI)
  363. : DFS(DFS), F(F), IA(DFS.getInstrumentedABI()), IsNativeABI(IsNativeABI) {
  364. DT.recalculate(*F);
  365. // FIXME: Need to track down the register allocator issue which causes poor
  366. // performance in pathological cases with large numbers of basic blocks.
  367. AvoidNewBlocks = F->size() > 1000;
  368. }
  369. Value *getArgTLSPtr();
  370. Value *getArgTLS(unsigned Index, Instruction *Pos);
  371. Value *getRetvalTLS();
  372. Value *getShadow(Value *V);
  373. void setShadow(Instruction *I, Value *Shadow);
  374. Value *combineShadows(Value *V1, Value *V2, Instruction *Pos);
  375. Value *combineOperandShadows(Instruction *Inst);
  376. Value *loadShadow(Value *ShadowAddr, uint64_t Size, uint64_t Align,
  377. Instruction *Pos);
  378. void storeShadow(Value *Addr, uint64_t Size, uint64_t Align, Value *Shadow,
  379. Instruction *Pos);
  380. };
  381. class DFSanVisitor : public InstVisitor<DFSanVisitor> {
  382. public:
  383. DFSanFunction &DFSF;
  384. DFSanVisitor(DFSanFunction &DFSF) : DFSF(DFSF) {}
  385. const DataLayout &getDataLayout() const {
  386. return DFSF.F->getParent()->getDataLayout();
  387. }
  388. void visitOperandShadowInst(Instruction &I);
  389. void visitBinaryOperator(BinaryOperator &BO);
  390. void visitCastInst(CastInst &CI);
  391. void visitCmpInst(CmpInst &CI);
  392. void visitGetElementPtrInst(GetElementPtrInst &GEPI);
  393. void visitLoadInst(LoadInst &LI);
  394. void visitStoreInst(StoreInst &SI);
  395. void visitReturnInst(ReturnInst &RI);
  396. void visitCallSite(CallSite CS);
  397. void visitPHINode(PHINode &PN);
  398. void visitExtractElementInst(ExtractElementInst &I);
  399. void visitInsertElementInst(InsertElementInst &I);
  400. void visitShuffleVectorInst(ShuffleVectorInst &I);
  401. void visitExtractValueInst(ExtractValueInst &I);
  402. void visitInsertValueInst(InsertValueInst &I);
  403. void visitAllocaInst(AllocaInst &I);
  404. void visitSelectInst(SelectInst &I);
  405. void visitMemSetInst(MemSetInst &I);
  406. void visitMemTransferInst(MemTransferInst &I);
  407. };
  408. } // end anonymous namespace
  409. char DataFlowSanitizer::ID;
  410. INITIALIZE_PASS(DataFlowSanitizer, "dfsan",
  411. "DataFlowSanitizer: dynamic data flow analysis.", false, false)
  412. ModulePass *
  413. llvm::createDataFlowSanitizerPass(const std::vector<std::string> &ABIListFiles,
  414. void *(*getArgTLS)(),
  415. void *(*getRetValTLS)()) {
  416. return new DataFlowSanitizer(ABIListFiles, getArgTLS, getRetValTLS);
  417. }
  418. DataFlowSanitizer::DataFlowSanitizer(
  419. const std::vector<std::string> &ABIListFiles, void *(*getArgTLS)(),
  420. void *(*getRetValTLS)())
  421. : ModulePass(ID), GetArgTLSPtr(getArgTLS), GetRetvalTLSPtr(getRetValTLS) {
  422. std::vector<std::string> AllABIListFiles(std::move(ABIListFiles));
  423. AllABIListFiles.insert(AllABIListFiles.end(), ClABIListFiles.begin(),
  424. ClABIListFiles.end());
  425. ABIList.set(SpecialCaseList::createOrDie(AllABIListFiles));
  426. }
  427. FunctionType *DataFlowSanitizer::getArgsFunctionType(FunctionType *T) {
  428. SmallVector<Type *, 4> ArgTypes(T->param_begin(), T->param_end());
  429. ArgTypes.append(T->getNumParams(), ShadowTy);
  430. if (T->isVarArg())
  431. ArgTypes.push_back(ShadowPtrTy);
  432. Type *RetType = T->getReturnType();
  433. if (!RetType->isVoidTy())
  434. RetType = StructType::get(RetType, ShadowTy);
  435. return FunctionType::get(RetType, ArgTypes, T->isVarArg());
  436. }
  437. FunctionType *DataFlowSanitizer::getTrampolineFunctionType(FunctionType *T) {
  438. assert(!T->isVarArg());
  439. SmallVector<Type *, 4> ArgTypes;
  440. ArgTypes.push_back(T->getPointerTo());
  441. ArgTypes.append(T->param_begin(), T->param_end());
  442. ArgTypes.append(T->getNumParams(), ShadowTy);
  443. Type *RetType = T->getReturnType();
  444. if (!RetType->isVoidTy())
  445. ArgTypes.push_back(ShadowPtrTy);
  446. return FunctionType::get(T->getReturnType(), ArgTypes, false);
  447. }
  448. TransformedFunction DataFlowSanitizer::getCustomFunctionType(FunctionType *T) {
  449. SmallVector<Type *, 4> ArgTypes;
  450. // Some parameters of the custom function being constructed are
  451. // parameters of T. Record the mapping from parameters of T to
  452. // parameters of the custom function, so that parameter attributes
  453. // at call sites can be updated.
  454. std::vector<unsigned> ArgumentIndexMapping;
  455. for (unsigned i = 0, ie = T->getNumParams(); i != ie; ++i) {
  456. Type* param_type = T->getParamType(i);
  457. FunctionType *FT;
  458. if (isa<PointerType>(param_type) && (FT = dyn_cast<FunctionType>(
  459. cast<PointerType>(param_type)->getElementType()))) {
  460. ArgumentIndexMapping.push_back(ArgTypes.size());
  461. ArgTypes.push_back(getTrampolineFunctionType(FT)->getPointerTo());
  462. ArgTypes.push_back(Type::getInt8PtrTy(*Ctx));
  463. } else {
  464. ArgumentIndexMapping.push_back(ArgTypes.size());
  465. ArgTypes.push_back(param_type);
  466. }
  467. }
  468. for (unsigned i = 0, e = T->getNumParams(); i != e; ++i)
  469. ArgTypes.push_back(ShadowTy);
  470. if (T->isVarArg())
  471. ArgTypes.push_back(ShadowPtrTy);
  472. Type *RetType = T->getReturnType();
  473. if (!RetType->isVoidTy())
  474. ArgTypes.push_back(ShadowPtrTy);
  475. return TransformedFunction(
  476. T, FunctionType::get(T->getReturnType(), ArgTypes, T->isVarArg()),
  477. ArgumentIndexMapping);
  478. }
  479. bool DataFlowSanitizer::doInitialization(Module &M) {
  480. Triple TargetTriple(M.getTargetTriple());
  481. bool IsX86_64 = TargetTriple.getArch() == Triple::x86_64;
  482. bool IsMIPS64 = TargetTriple.isMIPS64();
  483. bool IsAArch64 = TargetTriple.getArch() == Triple::aarch64 ||
  484. TargetTriple.getArch() == Triple::aarch64_be;
  485. const DataLayout &DL = M.getDataLayout();
  486. Mod = &M;
  487. Ctx = &M.getContext();
  488. ShadowTy = IntegerType::get(*Ctx, ShadowWidth);
  489. ShadowPtrTy = PointerType::getUnqual(ShadowTy);
  490. IntptrTy = DL.getIntPtrType(*Ctx);
  491. ZeroShadow = ConstantInt::getSigned(ShadowTy, 0);
  492. ShadowPtrMul = ConstantInt::getSigned(IntptrTy, ShadowWidth / 8);
  493. if (IsX86_64)
  494. ShadowPtrMask = ConstantInt::getSigned(IntptrTy, ~0x700000000000LL);
  495. else if (IsMIPS64)
  496. ShadowPtrMask = ConstantInt::getSigned(IntptrTy, ~0xF000000000LL);
  497. // AArch64 supports multiple VMAs and the shadow mask is set at runtime.
  498. else if (IsAArch64)
  499. DFSanRuntimeShadowMask = true;
  500. else
  501. report_fatal_error("unsupported triple");
  502. Type *DFSanUnionArgs[2] = { ShadowTy, ShadowTy };
  503. DFSanUnionFnTy =
  504. FunctionType::get(ShadowTy, DFSanUnionArgs, /*isVarArg=*/ false);
  505. Type *DFSanUnionLoadArgs[2] = { ShadowPtrTy, IntptrTy };
  506. DFSanUnionLoadFnTy =
  507. FunctionType::get(ShadowTy, DFSanUnionLoadArgs, /*isVarArg=*/ false);
  508. DFSanUnimplementedFnTy = FunctionType::get(
  509. Type::getVoidTy(*Ctx), Type::getInt8PtrTy(*Ctx), /*isVarArg=*/false);
  510. Type *DFSanSetLabelArgs[3] = { ShadowTy, Type::getInt8PtrTy(*Ctx), IntptrTy };
  511. DFSanSetLabelFnTy = FunctionType::get(Type::getVoidTy(*Ctx),
  512. DFSanSetLabelArgs, /*isVarArg=*/false);
  513. DFSanNonzeroLabelFnTy = FunctionType::get(
  514. Type::getVoidTy(*Ctx), None, /*isVarArg=*/false);
  515. DFSanVarargWrapperFnTy = FunctionType::get(
  516. Type::getVoidTy(*Ctx), Type::getInt8PtrTy(*Ctx), /*isVarArg=*/false);
  517. if (GetArgTLSPtr) {
  518. Type *ArgTLSTy = ArrayType::get(ShadowTy, 64);
  519. ArgTLS = nullptr;
  520. GetArgTLSTy = FunctionType::get(PointerType::getUnqual(ArgTLSTy), false);
  521. GetArgTLS = ConstantExpr::getIntToPtr(
  522. ConstantInt::get(IntptrTy, uintptr_t(GetArgTLSPtr)),
  523. PointerType::getUnqual(GetArgTLSTy));
  524. }
  525. if (GetRetvalTLSPtr) {
  526. RetvalTLS = nullptr;
  527. GetRetvalTLSTy = FunctionType::get(PointerType::getUnqual(ShadowTy), false);
  528. GetRetvalTLS = ConstantExpr::getIntToPtr(
  529. ConstantInt::get(IntptrTy, uintptr_t(GetRetvalTLSPtr)),
  530. PointerType::getUnqual(GetRetvalTLSTy));
  531. }
  532. ColdCallWeights = MDBuilder(*Ctx).createBranchWeights(1, 1000);
  533. return true;
  534. }
  535. bool DataFlowSanitizer::isInstrumented(const Function *F) {
  536. return !ABIList.isIn(*F, "uninstrumented");
  537. }
  538. bool DataFlowSanitizer::isInstrumented(const GlobalAlias *GA) {
  539. return !ABIList.isIn(*GA, "uninstrumented");
  540. }
  541. DataFlowSanitizer::InstrumentedABI DataFlowSanitizer::getInstrumentedABI() {
  542. return ClArgsABI ? IA_Args : IA_TLS;
  543. }
  544. DataFlowSanitizer::WrapperKind DataFlowSanitizer::getWrapperKind(Function *F) {
  545. if (ABIList.isIn(*F, "functional"))
  546. return WK_Functional;
  547. if (ABIList.isIn(*F, "discard"))
  548. return WK_Discard;
  549. if (ABIList.isIn(*F, "custom"))
  550. return WK_Custom;
  551. return WK_Warning;
  552. }
  553. void DataFlowSanitizer::addGlobalNamePrefix(GlobalValue *GV) {
  554. std::string GVName = GV->getName(), Prefix = "dfs$";
  555. GV->setName(Prefix + GVName);
  556. // Try to change the name of the function in module inline asm. We only do
  557. // this for specific asm directives, currently only ".symver", to try to avoid
  558. // corrupting asm which happens to contain the symbol name as a substring.
  559. // Note that the substitution for .symver assumes that the versioned symbol
  560. // also has an instrumented name.
  561. std::string Asm = GV->getParent()->getModuleInlineAsm();
  562. std::string SearchStr = ".symver " + GVName + ",";
  563. size_t Pos = Asm.find(SearchStr);
  564. if (Pos != std::string::npos) {
  565. Asm.replace(Pos, SearchStr.size(),
  566. ".symver " + Prefix + GVName + "," + Prefix);
  567. GV->getParent()->setModuleInlineAsm(Asm);
  568. }
  569. }
  570. Function *
  571. DataFlowSanitizer::buildWrapperFunction(Function *F, StringRef NewFName,
  572. GlobalValue::LinkageTypes NewFLink,
  573. FunctionType *NewFT) {
  574. FunctionType *FT = F->getFunctionType();
  575. Function *NewF = Function::Create(NewFT, NewFLink, F->getAddressSpace(),
  576. NewFName, F->getParent());
  577. NewF->copyAttributesFrom(F);
  578. NewF->removeAttributes(
  579. AttributeList::ReturnIndex,
  580. AttributeFuncs::typeIncompatible(NewFT->getReturnType()));
  581. BasicBlock *BB = BasicBlock::Create(*Ctx, "entry", NewF);
  582. if (F->isVarArg()) {
  583. NewF->removeAttributes(AttributeList::FunctionIndex,
  584. AttrBuilder().addAttribute("split-stack"));
  585. CallInst::Create(DFSanVarargWrapperFn,
  586. IRBuilder<>(BB).CreateGlobalStringPtr(F->getName()), "",
  587. BB);
  588. new UnreachableInst(*Ctx, BB);
  589. } else {
  590. std::vector<Value *> Args;
  591. unsigned n = FT->getNumParams();
  592. for (Function::arg_iterator ai = NewF->arg_begin(); n != 0; ++ai, --n)
  593. Args.push_back(&*ai);
  594. CallInst *CI = CallInst::Create(F, Args, "", BB);
  595. if (FT->getReturnType()->isVoidTy())
  596. ReturnInst::Create(*Ctx, BB);
  597. else
  598. ReturnInst::Create(*Ctx, CI, BB);
  599. }
  600. return NewF;
  601. }
  602. Constant *DataFlowSanitizer::getOrBuildTrampolineFunction(FunctionType *FT,
  603. StringRef FName) {
  604. FunctionType *FTT = getTrampolineFunctionType(FT);
  605. FunctionCallee C = Mod->getOrInsertFunction(FName, FTT);
  606. Function *F = dyn_cast<Function>(C.getCallee());
  607. if (F && F->isDeclaration()) {
  608. F->setLinkage(GlobalValue::LinkOnceODRLinkage);
  609. BasicBlock *BB = BasicBlock::Create(*Ctx, "entry", F);
  610. std::vector<Value *> Args;
  611. Function::arg_iterator AI = F->arg_begin(); ++AI;
  612. for (unsigned N = FT->getNumParams(); N != 0; ++AI, --N)
  613. Args.push_back(&*AI);
  614. CallInst *CI = CallInst::Create(FT, &*F->arg_begin(), Args, "", BB);
  615. ReturnInst *RI;
  616. if (FT->getReturnType()->isVoidTy())
  617. RI = ReturnInst::Create(*Ctx, BB);
  618. else
  619. RI = ReturnInst::Create(*Ctx, CI, BB);
  620. DFSanFunction DFSF(*this, F, /*IsNativeABI=*/true);
  621. Function::arg_iterator ValAI = F->arg_begin(), ShadowAI = AI; ++ValAI;
  622. for (unsigned N = FT->getNumParams(); N != 0; ++ValAI, ++ShadowAI, --N)
  623. DFSF.ValShadowMap[&*ValAI] = &*ShadowAI;
  624. DFSanVisitor(DFSF).visitCallInst(*CI);
  625. if (!FT->getReturnType()->isVoidTy())
  626. new StoreInst(DFSF.getShadow(RI->getReturnValue()),
  627. &*std::prev(F->arg_end()), RI);
  628. }
  629. return cast<Constant>(C.getCallee());
  630. }
  631. bool DataFlowSanitizer::runOnModule(Module &M) {
  632. if (ABIList.isIn(M, "skip"))
  633. return false;
  634. if (!GetArgTLSPtr) {
  635. Type *ArgTLSTy = ArrayType::get(ShadowTy, 64);
  636. ArgTLS = Mod->getOrInsertGlobal("__dfsan_arg_tls", ArgTLSTy);
  637. if (GlobalVariable *G = dyn_cast<GlobalVariable>(ArgTLS))
  638. G->setThreadLocalMode(GlobalVariable::InitialExecTLSModel);
  639. }
  640. if (!GetRetvalTLSPtr) {
  641. RetvalTLS = Mod->getOrInsertGlobal("__dfsan_retval_tls", ShadowTy);
  642. if (GlobalVariable *G = dyn_cast<GlobalVariable>(RetvalTLS))
  643. G->setThreadLocalMode(GlobalVariable::InitialExecTLSModel);
  644. }
  645. ExternalShadowMask =
  646. Mod->getOrInsertGlobal(kDFSanExternShadowPtrMask, IntptrTy);
  647. {
  648. AttributeList AL;
  649. AL = AL.addAttribute(M.getContext(), AttributeList::FunctionIndex,
  650. Attribute::NoUnwind);
  651. AL = AL.addAttribute(M.getContext(), AttributeList::FunctionIndex,
  652. Attribute::ReadNone);
  653. AL = AL.addAttribute(M.getContext(), AttributeList::ReturnIndex,
  654. Attribute::ZExt);
  655. AL = AL.addParamAttribute(M.getContext(), 0, Attribute::ZExt);
  656. AL = AL.addParamAttribute(M.getContext(), 1, Attribute::ZExt);
  657. DFSanUnionFn =
  658. Mod->getOrInsertFunction("__dfsan_union", DFSanUnionFnTy, AL);
  659. }
  660. {
  661. AttributeList AL;
  662. AL = AL.addAttribute(M.getContext(), AttributeList::FunctionIndex,
  663. Attribute::NoUnwind);
  664. AL = AL.addAttribute(M.getContext(), AttributeList::FunctionIndex,
  665. Attribute::ReadNone);
  666. AL = AL.addAttribute(M.getContext(), AttributeList::ReturnIndex,
  667. Attribute::ZExt);
  668. AL = AL.addParamAttribute(M.getContext(), 0, Attribute::ZExt);
  669. AL = AL.addParamAttribute(M.getContext(), 1, Attribute::ZExt);
  670. DFSanCheckedUnionFn =
  671. Mod->getOrInsertFunction("dfsan_union", DFSanUnionFnTy, AL);
  672. }
  673. {
  674. AttributeList AL;
  675. AL = AL.addAttribute(M.getContext(), AttributeList::FunctionIndex,
  676. Attribute::NoUnwind);
  677. AL = AL.addAttribute(M.getContext(), AttributeList::FunctionIndex,
  678. Attribute::ReadOnly);
  679. AL = AL.addAttribute(M.getContext(), AttributeList::ReturnIndex,
  680. Attribute::ZExt);
  681. DFSanUnionLoadFn =
  682. Mod->getOrInsertFunction("__dfsan_union_load", DFSanUnionLoadFnTy, AL);
  683. }
  684. DFSanUnimplementedFn =
  685. Mod->getOrInsertFunction("__dfsan_unimplemented", DFSanUnimplementedFnTy);
  686. {
  687. AttributeList AL;
  688. AL = AL.addParamAttribute(M.getContext(), 0, Attribute::ZExt);
  689. DFSanSetLabelFn =
  690. Mod->getOrInsertFunction("__dfsan_set_label", DFSanSetLabelFnTy, AL);
  691. }
  692. DFSanNonzeroLabelFn =
  693. Mod->getOrInsertFunction("__dfsan_nonzero_label", DFSanNonzeroLabelFnTy);
  694. DFSanVarargWrapperFn = Mod->getOrInsertFunction("__dfsan_vararg_wrapper",
  695. DFSanVarargWrapperFnTy);
  696. std::vector<Function *> FnsToInstrument;
  697. SmallPtrSet<Function *, 2> FnsWithNativeABI;
  698. for (Function &i : M) {
  699. if (!i.isIntrinsic() &&
  700. &i != DFSanUnionFn.getCallee()->stripPointerCasts() &&
  701. &i != DFSanCheckedUnionFn.getCallee()->stripPointerCasts() &&
  702. &i != DFSanUnionLoadFn.getCallee()->stripPointerCasts() &&
  703. &i != DFSanUnimplementedFn.getCallee()->stripPointerCasts() &&
  704. &i != DFSanSetLabelFn.getCallee()->stripPointerCasts() &&
  705. &i != DFSanNonzeroLabelFn.getCallee()->stripPointerCasts() &&
  706. &i != DFSanVarargWrapperFn.getCallee()->stripPointerCasts())
  707. FnsToInstrument.push_back(&i);
  708. }
  709. // Give function aliases prefixes when necessary, and build wrappers where the
  710. // instrumentedness is inconsistent.
  711. for (Module::alias_iterator i = M.alias_begin(), e = M.alias_end(); i != e;) {
  712. GlobalAlias *GA = &*i;
  713. ++i;
  714. // Don't stop on weak. We assume people aren't playing games with the
  715. // instrumentedness of overridden weak aliases.
  716. if (auto F = dyn_cast<Function>(GA->getBaseObject())) {
  717. bool GAInst = isInstrumented(GA), FInst = isInstrumented(F);
  718. if (GAInst && FInst) {
  719. addGlobalNamePrefix(GA);
  720. } else if (GAInst != FInst) {
  721. // Non-instrumented alias of an instrumented function, or vice versa.
  722. // Replace the alias with a native-ABI wrapper of the aliasee. The pass
  723. // below will take care of instrumenting it.
  724. Function *NewF =
  725. buildWrapperFunction(F, "", GA->getLinkage(), F->getFunctionType());
  726. GA->replaceAllUsesWith(ConstantExpr::getBitCast(NewF, GA->getType()));
  727. NewF->takeName(GA);
  728. GA->eraseFromParent();
  729. FnsToInstrument.push_back(NewF);
  730. }
  731. }
  732. }
  733. ReadOnlyNoneAttrs.addAttribute(Attribute::ReadOnly)
  734. .addAttribute(Attribute::ReadNone);
  735. // First, change the ABI of every function in the module. ABI-listed
  736. // functions keep their original ABI and get a wrapper function.
  737. for (std::vector<Function *>::iterator i = FnsToInstrument.begin(),
  738. e = FnsToInstrument.end();
  739. i != e; ++i) {
  740. Function &F = **i;
  741. FunctionType *FT = F.getFunctionType();
  742. bool IsZeroArgsVoidRet = (FT->getNumParams() == 0 && !FT->isVarArg() &&
  743. FT->getReturnType()->isVoidTy());
  744. if (isInstrumented(&F)) {
  745. // Instrumented functions get a 'dfs$' prefix. This allows us to more
  746. // easily identify cases of mismatching ABIs.
  747. if (getInstrumentedABI() == IA_Args && !IsZeroArgsVoidRet) {
  748. FunctionType *NewFT = getArgsFunctionType(FT);
  749. Function *NewF = Function::Create(NewFT, F.getLinkage(),
  750. F.getAddressSpace(), "", &M);
  751. NewF->copyAttributesFrom(&F);
  752. NewF->removeAttributes(
  753. AttributeList::ReturnIndex,
  754. AttributeFuncs::typeIncompatible(NewFT->getReturnType()));
  755. for (Function::arg_iterator FArg = F.arg_begin(),
  756. NewFArg = NewF->arg_begin(),
  757. FArgEnd = F.arg_end();
  758. FArg != FArgEnd; ++FArg, ++NewFArg) {
  759. FArg->replaceAllUsesWith(&*NewFArg);
  760. }
  761. NewF->getBasicBlockList().splice(NewF->begin(), F.getBasicBlockList());
  762. for (Function::user_iterator UI = F.user_begin(), UE = F.user_end();
  763. UI != UE;) {
  764. BlockAddress *BA = dyn_cast<BlockAddress>(*UI);
  765. ++UI;
  766. if (BA) {
  767. BA->replaceAllUsesWith(
  768. BlockAddress::get(NewF, BA->getBasicBlock()));
  769. delete BA;
  770. }
  771. }
  772. F.replaceAllUsesWith(
  773. ConstantExpr::getBitCast(NewF, PointerType::getUnqual(FT)));
  774. NewF->takeName(&F);
  775. F.eraseFromParent();
  776. *i = NewF;
  777. addGlobalNamePrefix(NewF);
  778. } else {
  779. addGlobalNamePrefix(&F);
  780. }
  781. } else if (!IsZeroArgsVoidRet || getWrapperKind(&F) == WK_Custom) {
  782. // Build a wrapper function for F. The wrapper simply calls F, and is
  783. // added to FnsToInstrument so that any instrumentation according to its
  784. // WrapperKind is done in the second pass below.
  785. FunctionType *NewFT = getInstrumentedABI() == IA_Args
  786. ? getArgsFunctionType(FT)
  787. : FT;
  788. // If the function being wrapped has local linkage, then preserve the
  789. // function's linkage in the wrapper function.
  790. GlobalValue::LinkageTypes wrapperLinkage =
  791. F.hasLocalLinkage()
  792. ? F.getLinkage()
  793. : GlobalValue::LinkOnceODRLinkage;
  794. Function *NewF = buildWrapperFunction(
  795. &F, std::string("dfsw$") + std::string(F.getName()),
  796. wrapperLinkage, NewFT);
  797. if (getInstrumentedABI() == IA_TLS)
  798. NewF->removeAttributes(AttributeList::FunctionIndex, ReadOnlyNoneAttrs);
  799. Value *WrappedFnCst =
  800. ConstantExpr::getBitCast(NewF, PointerType::getUnqual(FT));
  801. F.replaceAllUsesWith(WrappedFnCst);
  802. UnwrappedFnMap[WrappedFnCst] = &F;
  803. *i = NewF;
  804. if (!F.isDeclaration()) {
  805. // This function is probably defining an interposition of an
  806. // uninstrumented function and hence needs to keep the original ABI.
  807. // But any functions it may call need to use the instrumented ABI, so
  808. // we instrument it in a mode which preserves the original ABI.
  809. FnsWithNativeABI.insert(&F);
  810. // This code needs to rebuild the iterators, as they may be invalidated
  811. // by the push_back, taking care that the new range does not include
  812. // any functions added by this code.
  813. size_t N = i - FnsToInstrument.begin(),
  814. Count = e - FnsToInstrument.begin();
  815. FnsToInstrument.push_back(&F);
  816. i = FnsToInstrument.begin() + N;
  817. e = FnsToInstrument.begin() + Count;
  818. }
  819. // Hopefully, nobody will try to indirectly call a vararg
  820. // function... yet.
  821. } else if (FT->isVarArg()) {
  822. UnwrappedFnMap[&F] = &F;
  823. *i = nullptr;
  824. }
  825. }
  826. for (Function *i : FnsToInstrument) {
  827. if (!i || i->isDeclaration())
  828. continue;
  829. removeUnreachableBlocks(*i);
  830. DFSanFunction DFSF(*this, i, FnsWithNativeABI.count(i));
  831. // DFSanVisitor may create new basic blocks, which confuses df_iterator.
  832. // Build a copy of the list before iterating over it.
  833. SmallVector<BasicBlock *, 4> BBList(depth_first(&i->getEntryBlock()));
  834. for (BasicBlock *i : BBList) {
  835. Instruction *Inst = &i->front();
  836. while (true) {
  837. // DFSanVisitor may split the current basic block, changing the current
  838. // instruction's next pointer and moving the next instruction to the
  839. // tail block from which we should continue.
  840. Instruction *Next = Inst->getNextNode();
  841. // DFSanVisitor may delete Inst, so keep track of whether it was a
  842. // terminator.
  843. bool IsTerminator = Inst->isTerminator();
  844. if (!DFSF.SkipInsts.count(Inst))
  845. DFSanVisitor(DFSF).visit(Inst);
  846. if (IsTerminator)
  847. break;
  848. Inst = Next;
  849. }
  850. }
  851. // We will not necessarily be able to compute the shadow for every phi node
  852. // until we have visited every block. Therefore, the code that handles phi
  853. // nodes adds them to the PHIFixups list so that they can be properly
  854. // handled here.
  855. for (std::vector<std::pair<PHINode *, PHINode *>>::iterator
  856. i = DFSF.PHIFixups.begin(),
  857. e = DFSF.PHIFixups.end();
  858. i != e; ++i) {
  859. for (unsigned val = 0, n = i->first->getNumIncomingValues(); val != n;
  860. ++val) {
  861. i->second->setIncomingValue(
  862. val, DFSF.getShadow(i->first->getIncomingValue(val)));
  863. }
  864. }
  865. // -dfsan-debug-nonzero-labels will split the CFG in all kinds of crazy
  866. // places (i.e. instructions in basic blocks we haven't even begun visiting
  867. // yet). To make our life easier, do this work in a pass after the main
  868. // instrumentation.
  869. if (ClDebugNonzeroLabels) {
  870. for (Value *V : DFSF.NonZeroChecks) {
  871. Instruction *Pos;
  872. if (Instruction *I = dyn_cast<Instruction>(V))
  873. Pos = I->getNextNode();
  874. else
  875. Pos = &DFSF.F->getEntryBlock().front();
  876. while (isa<PHINode>(Pos) || isa<AllocaInst>(Pos))
  877. Pos = Pos->getNextNode();
  878. IRBuilder<> IRB(Pos);
  879. Value *Ne = IRB.CreateICmpNE(V, DFSF.DFS.ZeroShadow);
  880. BranchInst *BI = cast<BranchInst>(SplitBlockAndInsertIfThen(
  881. Ne, Pos, /*Unreachable=*/false, ColdCallWeights));
  882. IRBuilder<> ThenIRB(BI);
  883. ThenIRB.CreateCall(DFSF.DFS.DFSanNonzeroLabelFn, {});
  884. }
  885. }
  886. }
  887. return false;
  888. }
  889. Value *DFSanFunction::getArgTLSPtr() {
  890. if (ArgTLSPtr)
  891. return ArgTLSPtr;
  892. if (DFS.ArgTLS)
  893. return ArgTLSPtr = DFS.ArgTLS;
  894. IRBuilder<> IRB(&F->getEntryBlock().front());
  895. return ArgTLSPtr = IRB.CreateCall(DFS.GetArgTLSTy, DFS.GetArgTLS, {});
  896. }
  897. Value *DFSanFunction::getRetvalTLS() {
  898. if (RetvalTLSPtr)
  899. return RetvalTLSPtr;
  900. if (DFS.RetvalTLS)
  901. return RetvalTLSPtr = DFS.RetvalTLS;
  902. IRBuilder<> IRB(&F->getEntryBlock().front());
  903. return RetvalTLSPtr =
  904. IRB.CreateCall(DFS.GetRetvalTLSTy, DFS.GetRetvalTLS, {});
  905. }
  906. Value *DFSanFunction::getArgTLS(unsigned Idx, Instruction *Pos) {
  907. IRBuilder<> IRB(Pos);
  908. return IRB.CreateConstGEP2_64(ArrayType::get(DFS.ShadowTy, 64),
  909. getArgTLSPtr(), 0, Idx);
  910. }
  911. Value *DFSanFunction::getShadow(Value *V) {
  912. if (!isa<Argument>(V) && !isa<Instruction>(V))
  913. return DFS.ZeroShadow;
  914. Value *&Shadow = ValShadowMap[V];
  915. if (!Shadow) {
  916. if (Argument *A = dyn_cast<Argument>(V)) {
  917. if (IsNativeABI)
  918. return DFS.ZeroShadow;
  919. switch (IA) {
  920. case DataFlowSanitizer::IA_TLS: {
  921. Value *ArgTLSPtr = getArgTLSPtr();
  922. Instruction *ArgTLSPos =
  923. DFS.ArgTLS ? &*F->getEntryBlock().begin()
  924. : cast<Instruction>(ArgTLSPtr)->getNextNode();
  925. IRBuilder<> IRB(ArgTLSPos);
  926. Shadow =
  927. IRB.CreateLoad(DFS.ShadowTy, getArgTLS(A->getArgNo(), ArgTLSPos));
  928. break;
  929. }
  930. case DataFlowSanitizer::IA_Args: {
  931. unsigned ArgIdx = A->getArgNo() + F->arg_size() / 2;
  932. Function::arg_iterator i = F->arg_begin();
  933. while (ArgIdx--)
  934. ++i;
  935. Shadow = &*i;
  936. assert(Shadow->getType() == DFS.ShadowTy);
  937. break;
  938. }
  939. }
  940. NonZeroChecks.push_back(Shadow);
  941. } else {
  942. Shadow = DFS.ZeroShadow;
  943. }
  944. }
  945. return Shadow;
  946. }
  947. void DFSanFunction::setShadow(Instruction *I, Value *Shadow) {
  948. assert(!ValShadowMap.count(I));
  949. assert(Shadow->getType() == DFS.ShadowTy);
  950. ValShadowMap[I] = Shadow;
  951. }
  952. Value *DataFlowSanitizer::getShadowAddress(Value *Addr, Instruction *Pos) {
  953. assert(Addr != RetvalTLS && "Reinstrumenting?");
  954. IRBuilder<> IRB(Pos);
  955. Value *ShadowPtrMaskValue;
  956. if (DFSanRuntimeShadowMask)
  957. ShadowPtrMaskValue = IRB.CreateLoad(IntptrTy, ExternalShadowMask);
  958. else
  959. ShadowPtrMaskValue = ShadowPtrMask;
  960. return IRB.CreateIntToPtr(
  961. IRB.CreateMul(
  962. IRB.CreateAnd(IRB.CreatePtrToInt(Addr, IntptrTy),
  963. IRB.CreatePtrToInt(ShadowPtrMaskValue, IntptrTy)),
  964. ShadowPtrMul),
  965. ShadowPtrTy);
  966. }
  967. // Generates IR to compute the union of the two given shadows, inserting it
  968. // before Pos. Returns the computed union Value.
  969. Value *DFSanFunction::combineShadows(Value *V1, Value *V2, Instruction *Pos) {
  970. if (V1 == DFS.ZeroShadow)
  971. return V2;
  972. if (V2 == DFS.ZeroShadow)
  973. return V1;
  974. if (V1 == V2)
  975. return V1;
  976. auto V1Elems = ShadowElements.find(V1);
  977. auto V2Elems = ShadowElements.find(V2);
  978. if (V1Elems != ShadowElements.end() && V2Elems != ShadowElements.end()) {
  979. if (std::includes(V1Elems->second.begin(), V1Elems->second.end(),
  980. V2Elems->second.begin(), V2Elems->second.end())) {
  981. return V1;
  982. } else if (std::includes(V2Elems->second.begin(), V2Elems->second.end(),
  983. V1Elems->second.begin(), V1Elems->second.end())) {
  984. return V2;
  985. }
  986. } else if (V1Elems != ShadowElements.end()) {
  987. if (V1Elems->second.count(V2))
  988. return V1;
  989. } else if (V2Elems != ShadowElements.end()) {
  990. if (V2Elems->second.count(V1))
  991. return V2;
  992. }
  993. auto Key = std::make_pair(V1, V2);
  994. if (V1 > V2)
  995. std::swap(Key.first, Key.second);
  996. CachedCombinedShadow &CCS = CachedCombinedShadows[Key];
  997. if (CCS.Block && DT.dominates(CCS.Block, Pos->getParent()))
  998. return CCS.Shadow;
  999. IRBuilder<> IRB(Pos);
  1000. if (AvoidNewBlocks) {
  1001. CallInst *Call = IRB.CreateCall(DFS.DFSanCheckedUnionFn, {V1, V2});
  1002. Call->addAttribute(AttributeList::ReturnIndex, Attribute::ZExt);
  1003. Call->addParamAttr(0, Attribute::ZExt);
  1004. Call->addParamAttr(1, Attribute::ZExt);
  1005. CCS.Block = Pos->getParent();
  1006. CCS.Shadow = Call;
  1007. } else {
  1008. BasicBlock *Head = Pos->getParent();
  1009. Value *Ne = IRB.CreateICmpNE(V1, V2);
  1010. BranchInst *BI = cast<BranchInst>(SplitBlockAndInsertIfThen(
  1011. Ne, Pos, /*Unreachable=*/false, DFS.ColdCallWeights, &DT));
  1012. IRBuilder<> ThenIRB(BI);
  1013. CallInst *Call = ThenIRB.CreateCall(DFS.DFSanUnionFn, {V1, V2});
  1014. Call->addAttribute(AttributeList::ReturnIndex, Attribute::ZExt);
  1015. Call->addParamAttr(0, Attribute::ZExt);
  1016. Call->addParamAttr(1, Attribute::ZExt);
  1017. BasicBlock *Tail = BI->getSuccessor(0);
  1018. PHINode *Phi = PHINode::Create(DFS.ShadowTy, 2, "", &Tail->front());
  1019. Phi->addIncoming(Call, Call->getParent());
  1020. Phi->addIncoming(V1, Head);
  1021. CCS.Block = Tail;
  1022. CCS.Shadow = Phi;
  1023. }
  1024. std::set<Value *> UnionElems;
  1025. if (V1Elems != ShadowElements.end()) {
  1026. UnionElems = V1Elems->second;
  1027. } else {
  1028. UnionElems.insert(V1);
  1029. }
  1030. if (V2Elems != ShadowElements.end()) {
  1031. UnionElems.insert(V2Elems->second.begin(), V2Elems->second.end());
  1032. } else {
  1033. UnionElems.insert(V2);
  1034. }
  1035. ShadowElements[CCS.Shadow] = std::move(UnionElems);
  1036. return CCS.Shadow;
  1037. }
  1038. // A convenience function which folds the shadows of each of the operands
  1039. // of the provided instruction Inst, inserting the IR before Inst. Returns
  1040. // the computed union Value.
  1041. Value *DFSanFunction::combineOperandShadows(Instruction *Inst) {
  1042. if (Inst->getNumOperands() == 0)
  1043. return DFS.ZeroShadow;
  1044. Value *Shadow = getShadow(Inst->getOperand(0));
  1045. for (unsigned i = 1, n = Inst->getNumOperands(); i != n; ++i) {
  1046. Shadow = combineShadows(Shadow, getShadow(Inst->getOperand(i)), Inst);
  1047. }
  1048. return Shadow;
  1049. }
  1050. void DFSanVisitor::visitOperandShadowInst(Instruction &I) {
  1051. Value *CombinedShadow = DFSF.combineOperandShadows(&I);
  1052. DFSF.setShadow(&I, CombinedShadow);
  1053. }
  1054. // Generates IR to load shadow corresponding to bytes [Addr, Addr+Size), where
  1055. // Addr has alignment Align, and take the union of each of those shadows.
  1056. Value *DFSanFunction::loadShadow(Value *Addr, uint64_t Size, uint64_t Align,
  1057. Instruction *Pos) {
  1058. if (AllocaInst *AI = dyn_cast<AllocaInst>(Addr)) {
  1059. const auto i = AllocaShadowMap.find(AI);
  1060. if (i != AllocaShadowMap.end()) {
  1061. IRBuilder<> IRB(Pos);
  1062. return IRB.CreateLoad(DFS.ShadowTy, i->second);
  1063. }
  1064. }
  1065. uint64_t ShadowAlign = Align * DFS.ShadowWidth / 8;
  1066. SmallVector<const Value *, 2> Objs;
  1067. GetUnderlyingObjects(Addr, Objs, Pos->getModule()->getDataLayout());
  1068. bool AllConstants = true;
  1069. for (const Value *Obj : Objs) {
  1070. if (isa<Function>(Obj) || isa<BlockAddress>(Obj))
  1071. continue;
  1072. if (isa<GlobalVariable>(Obj) && cast<GlobalVariable>(Obj)->isConstant())
  1073. continue;
  1074. AllConstants = false;
  1075. break;
  1076. }
  1077. if (AllConstants)
  1078. return DFS.ZeroShadow;
  1079. Value *ShadowAddr = DFS.getShadowAddress(Addr, Pos);
  1080. switch (Size) {
  1081. case 0:
  1082. return DFS.ZeroShadow;
  1083. case 1: {
  1084. LoadInst *LI = new LoadInst(DFS.ShadowTy, ShadowAddr, "", Pos);
  1085. LI->setAlignment(ShadowAlign);
  1086. return LI;
  1087. }
  1088. case 2: {
  1089. IRBuilder<> IRB(Pos);
  1090. Value *ShadowAddr1 = IRB.CreateGEP(DFS.ShadowTy, ShadowAddr,
  1091. ConstantInt::get(DFS.IntptrTy, 1));
  1092. return combineShadows(
  1093. IRB.CreateAlignedLoad(DFS.ShadowTy, ShadowAddr, ShadowAlign),
  1094. IRB.CreateAlignedLoad(DFS.ShadowTy, ShadowAddr1, ShadowAlign), Pos);
  1095. }
  1096. }
  1097. if (!AvoidNewBlocks && Size % (64 / DFS.ShadowWidth) == 0) {
  1098. // Fast path for the common case where each byte has identical shadow: load
  1099. // shadow 64 bits at a time, fall out to a __dfsan_union_load call if any
  1100. // shadow is non-equal.
  1101. BasicBlock *FallbackBB = BasicBlock::Create(*DFS.Ctx, "", F);
  1102. IRBuilder<> FallbackIRB(FallbackBB);
  1103. CallInst *FallbackCall = FallbackIRB.CreateCall(
  1104. DFS.DFSanUnionLoadFn,
  1105. {ShadowAddr, ConstantInt::get(DFS.IntptrTy, Size)});
  1106. FallbackCall->addAttribute(AttributeList::ReturnIndex, Attribute::ZExt);
  1107. // Compare each of the shadows stored in the loaded 64 bits to each other,
  1108. // by computing (WideShadow rotl ShadowWidth) == WideShadow.
  1109. IRBuilder<> IRB(Pos);
  1110. Value *WideAddr =
  1111. IRB.CreateBitCast(ShadowAddr, Type::getInt64PtrTy(*DFS.Ctx));
  1112. Value *WideShadow =
  1113. IRB.CreateAlignedLoad(IRB.getInt64Ty(), WideAddr, ShadowAlign);
  1114. Value *TruncShadow = IRB.CreateTrunc(WideShadow, DFS.ShadowTy);
  1115. Value *ShlShadow = IRB.CreateShl(WideShadow, DFS.ShadowWidth);
  1116. Value *ShrShadow = IRB.CreateLShr(WideShadow, 64 - DFS.ShadowWidth);
  1117. Value *RotShadow = IRB.CreateOr(ShlShadow, ShrShadow);
  1118. Value *ShadowsEq = IRB.CreateICmpEQ(WideShadow, RotShadow);
  1119. BasicBlock *Head = Pos->getParent();
  1120. BasicBlock *Tail = Head->splitBasicBlock(Pos->getIterator());
  1121. if (DomTreeNode *OldNode = DT.getNode(Head)) {
  1122. std::vector<DomTreeNode *> Children(OldNode->begin(), OldNode->end());
  1123. DomTreeNode *NewNode = DT.addNewBlock(Tail, Head);
  1124. for (auto Child : Children)
  1125. DT.changeImmediateDominator(Child, NewNode);
  1126. }
  1127. // In the following code LastBr will refer to the previous basic block's
  1128. // conditional branch instruction, whose true successor is fixed up to point
  1129. // to the next block during the loop below or to the tail after the final
  1130. // iteration.
  1131. BranchInst *LastBr = BranchInst::Create(FallbackBB, FallbackBB, ShadowsEq);
  1132. ReplaceInstWithInst(Head->getTerminator(), LastBr);
  1133. DT.addNewBlock(FallbackBB, Head);
  1134. for (uint64_t Ofs = 64 / DFS.ShadowWidth; Ofs != Size;
  1135. Ofs += 64 / DFS.ShadowWidth) {
  1136. BasicBlock *NextBB = BasicBlock::Create(*DFS.Ctx, "", F);
  1137. DT.addNewBlock(NextBB, LastBr->getParent());
  1138. IRBuilder<> NextIRB(NextBB);
  1139. WideAddr = NextIRB.CreateGEP(Type::getInt64Ty(*DFS.Ctx), WideAddr,
  1140. ConstantInt::get(DFS.IntptrTy, 1));
  1141. Value *NextWideShadow = NextIRB.CreateAlignedLoad(NextIRB.getInt64Ty(),
  1142. WideAddr, ShadowAlign);
  1143. ShadowsEq = NextIRB.CreateICmpEQ(WideShadow, NextWideShadow);
  1144. LastBr->setSuccessor(0, NextBB);
  1145. LastBr = NextIRB.CreateCondBr(ShadowsEq, FallbackBB, FallbackBB);
  1146. }
  1147. LastBr->setSuccessor(0, Tail);
  1148. FallbackIRB.CreateBr(Tail);
  1149. PHINode *Shadow = PHINode::Create(DFS.ShadowTy, 2, "", &Tail->front());
  1150. Shadow->addIncoming(FallbackCall, FallbackBB);
  1151. Shadow->addIncoming(TruncShadow, LastBr->getParent());
  1152. return Shadow;
  1153. }
  1154. IRBuilder<> IRB(Pos);
  1155. CallInst *FallbackCall = IRB.CreateCall(
  1156. DFS.DFSanUnionLoadFn, {ShadowAddr, ConstantInt::get(DFS.IntptrTy, Size)});
  1157. FallbackCall->addAttribute(AttributeList::ReturnIndex, Attribute::ZExt);
  1158. return FallbackCall;
  1159. }
  1160. void DFSanVisitor::visitLoadInst(LoadInst &LI) {
  1161. auto &DL = LI.getModule()->getDataLayout();
  1162. uint64_t Size = DL.getTypeStoreSize(LI.getType());
  1163. if (Size == 0) {
  1164. DFSF.setShadow(&LI, DFSF.DFS.ZeroShadow);
  1165. return;
  1166. }
  1167. uint64_t Align;
  1168. if (ClPreserveAlignment) {
  1169. Align = LI.getAlignment();
  1170. if (Align == 0)
  1171. Align = DL.getABITypeAlignment(LI.getType());
  1172. } else {
  1173. Align = 1;
  1174. }
  1175. IRBuilder<> IRB(&LI);
  1176. Value *Shadow = DFSF.loadShadow(LI.getPointerOperand(), Size, Align, &LI);
  1177. if (ClCombinePointerLabelsOnLoad) {
  1178. Value *PtrShadow = DFSF.getShadow(LI.getPointerOperand());
  1179. Shadow = DFSF.combineShadows(Shadow, PtrShadow, &LI);
  1180. }
  1181. if (Shadow != DFSF.DFS.ZeroShadow)
  1182. DFSF.NonZeroChecks.push_back(Shadow);
  1183. DFSF.setShadow(&LI, Shadow);
  1184. }
  1185. void DFSanFunction::storeShadow(Value *Addr, uint64_t Size, uint64_t Align,
  1186. Value *Shadow, Instruction *Pos) {
  1187. if (AllocaInst *AI = dyn_cast<AllocaInst>(Addr)) {
  1188. const auto i = AllocaShadowMap.find(AI);
  1189. if (i != AllocaShadowMap.end()) {
  1190. IRBuilder<> IRB(Pos);
  1191. IRB.CreateStore(Shadow, i->second);
  1192. return;
  1193. }
  1194. }
  1195. uint64_t ShadowAlign = Align * DFS.ShadowWidth / 8;
  1196. IRBuilder<> IRB(Pos);
  1197. Value *ShadowAddr = DFS.getShadowAddress(Addr, Pos);
  1198. if (Shadow == DFS.ZeroShadow) {
  1199. IntegerType *ShadowTy = IntegerType::get(*DFS.Ctx, Size * DFS.ShadowWidth);
  1200. Value *ExtZeroShadow = ConstantInt::get(ShadowTy, 0);
  1201. Value *ExtShadowAddr =
  1202. IRB.CreateBitCast(ShadowAddr, PointerType::getUnqual(ShadowTy));
  1203. IRB.CreateAlignedStore(ExtZeroShadow, ExtShadowAddr, ShadowAlign);
  1204. return;
  1205. }
  1206. const unsigned ShadowVecSize = 128 / DFS.ShadowWidth;
  1207. uint64_t Offset = 0;
  1208. if (Size >= ShadowVecSize) {
  1209. VectorType *ShadowVecTy = VectorType::get(DFS.ShadowTy, ShadowVecSize);
  1210. Value *ShadowVec = UndefValue::get(ShadowVecTy);
  1211. for (unsigned i = 0; i != ShadowVecSize; ++i) {
  1212. ShadowVec = IRB.CreateInsertElement(
  1213. ShadowVec, Shadow, ConstantInt::get(Type::getInt32Ty(*DFS.Ctx), i));
  1214. }
  1215. Value *ShadowVecAddr =
  1216. IRB.CreateBitCast(ShadowAddr, PointerType::getUnqual(ShadowVecTy));
  1217. do {
  1218. Value *CurShadowVecAddr =
  1219. IRB.CreateConstGEP1_32(ShadowVecTy, ShadowVecAddr, Offset);
  1220. IRB.CreateAlignedStore(ShadowVec, CurShadowVecAddr, ShadowAlign);
  1221. Size -= ShadowVecSize;
  1222. ++Offset;
  1223. } while (Size >= ShadowVecSize);
  1224. Offset *= ShadowVecSize;
  1225. }
  1226. while (Size > 0) {
  1227. Value *CurShadowAddr =
  1228. IRB.CreateConstGEP1_32(DFS.ShadowTy, ShadowAddr, Offset);
  1229. IRB.CreateAlignedStore(Shadow, CurShadowAddr, ShadowAlign);
  1230. --Size;
  1231. ++Offset;
  1232. }
  1233. }
  1234. void DFSanVisitor::visitStoreInst(StoreInst &SI) {
  1235. auto &DL = SI.getModule()->getDataLayout();
  1236. uint64_t Size = DL.getTypeStoreSize(SI.getValueOperand()->getType());
  1237. if (Size == 0)
  1238. return;
  1239. uint64_t Align;
  1240. if (ClPreserveAlignment) {
  1241. Align = SI.getAlignment();
  1242. if (Align == 0)
  1243. Align = DL.getABITypeAlignment(SI.getValueOperand()->getType());
  1244. } else {
  1245. Align = 1;
  1246. }
  1247. Value* Shadow = DFSF.getShadow(SI.getValueOperand());
  1248. if (ClCombinePointerLabelsOnStore) {
  1249. Value *PtrShadow = DFSF.getShadow(SI.getPointerOperand());
  1250. Shadow = DFSF.combineShadows(Shadow, PtrShadow, &SI);
  1251. }
  1252. DFSF.storeShadow(SI.getPointerOperand(), Size, Align, Shadow, &SI);
  1253. }
  1254. void DFSanVisitor::visitBinaryOperator(BinaryOperator &BO) {
  1255. visitOperandShadowInst(BO);
  1256. }
  1257. void DFSanVisitor::visitCastInst(CastInst &CI) { visitOperandShadowInst(CI); }
  1258. void DFSanVisitor::visitCmpInst(CmpInst &CI) { visitOperandShadowInst(CI); }
  1259. void DFSanVisitor::visitGetElementPtrInst(GetElementPtrInst &GEPI) {
  1260. visitOperandShadowInst(GEPI);
  1261. }
  1262. void DFSanVisitor::visitExtractElementInst(ExtractElementInst &I) {
  1263. visitOperandShadowInst(I);
  1264. }
  1265. void DFSanVisitor::visitInsertElementInst(InsertElementInst &I) {
  1266. visitOperandShadowInst(I);
  1267. }
  1268. void DFSanVisitor::visitShuffleVectorInst(ShuffleVectorInst &I) {
  1269. visitOperandShadowInst(I);
  1270. }
  1271. void DFSanVisitor::visitExtractValueInst(ExtractValueInst &I) {
  1272. visitOperandShadowInst(I);
  1273. }
  1274. void DFSanVisitor::visitInsertValueInst(InsertValueInst &I) {
  1275. visitOperandShadowInst(I);
  1276. }
  1277. void DFSanVisitor::visitAllocaInst(AllocaInst &I) {
  1278. bool AllLoadsStores = true;
  1279. for (User *U : I.users()) {
  1280. if (isa<LoadInst>(U))
  1281. continue;
  1282. if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
  1283. if (SI->getPointerOperand() == &I)
  1284. continue;
  1285. }
  1286. AllLoadsStores = false;
  1287. break;
  1288. }
  1289. if (AllLoadsStores) {
  1290. IRBuilder<> IRB(&I);
  1291. DFSF.AllocaShadowMap[&I] = IRB.CreateAlloca(DFSF.DFS.ShadowTy);
  1292. }
  1293. DFSF.setShadow(&I, DFSF.DFS.ZeroShadow);
  1294. }
  1295. void DFSanVisitor::visitSelectInst(SelectInst &I) {
  1296. Value *CondShadow = DFSF.getShadow(I.getCondition());
  1297. Value *TrueShadow = DFSF.getShadow(I.getTrueValue());
  1298. Value *FalseShadow = DFSF.getShadow(I.getFalseValue());
  1299. if (isa<VectorType>(I.getCondition()->getType())) {
  1300. DFSF.setShadow(
  1301. &I,
  1302. DFSF.combineShadows(
  1303. CondShadow, DFSF.combineShadows(TrueShadow, FalseShadow, &I), &I));
  1304. } else {
  1305. Value *ShadowSel;
  1306. if (TrueShadow == FalseShadow) {
  1307. ShadowSel = TrueShadow;
  1308. } else {
  1309. ShadowSel =
  1310. SelectInst::Create(I.getCondition(), TrueShadow, FalseShadow, "", &I);
  1311. }
  1312. DFSF.setShadow(&I, DFSF.combineShadows(CondShadow, ShadowSel, &I));
  1313. }
  1314. }
  1315. void DFSanVisitor::visitMemSetInst(MemSetInst &I) {
  1316. IRBuilder<> IRB(&I);
  1317. Value *ValShadow = DFSF.getShadow(I.getValue());
  1318. IRB.CreateCall(DFSF.DFS.DFSanSetLabelFn,
  1319. {ValShadow, IRB.CreateBitCast(I.getDest(), Type::getInt8PtrTy(
  1320. *DFSF.DFS.Ctx)),
  1321. IRB.CreateZExtOrTrunc(I.getLength(), DFSF.DFS.IntptrTy)});
  1322. }
  1323. void DFSanVisitor::visitMemTransferInst(MemTransferInst &I) {
  1324. IRBuilder<> IRB(&I);
  1325. Value *DestShadow = DFSF.DFS.getShadowAddress(I.getDest(), &I);
  1326. Value *SrcShadow = DFSF.DFS.getShadowAddress(I.getSource(), &I);
  1327. Value *LenShadow = IRB.CreateMul(
  1328. I.getLength(),
  1329. ConstantInt::get(I.getLength()->getType(), DFSF.DFS.ShadowWidth / 8));
  1330. Type *Int8Ptr = Type::getInt8PtrTy(*DFSF.DFS.Ctx);
  1331. DestShadow = IRB.CreateBitCast(DestShadow, Int8Ptr);
  1332. SrcShadow = IRB.CreateBitCast(SrcShadow, Int8Ptr);
  1333. auto *MTI = cast<MemTransferInst>(
  1334. IRB.CreateCall(I.getFunctionType(), I.getCalledValue(),
  1335. {DestShadow, SrcShadow, LenShadow, I.getVolatileCst()}));
  1336. if (ClPreserveAlignment) {
  1337. MTI->setDestAlignment(I.getDestAlignment() * (DFSF.DFS.ShadowWidth / 8));
  1338. MTI->setSourceAlignment(I.getSourceAlignment() * (DFSF.DFS.ShadowWidth / 8));
  1339. } else {
  1340. MTI->setDestAlignment(DFSF.DFS.ShadowWidth / 8);
  1341. MTI->setSourceAlignment(DFSF.DFS.ShadowWidth / 8);
  1342. }
  1343. }
  1344. void DFSanVisitor::visitReturnInst(ReturnInst &RI) {
  1345. if (!DFSF.IsNativeABI && RI.getReturnValue()) {
  1346. switch (DFSF.IA) {
  1347. case DataFlowSanitizer::IA_TLS: {
  1348. Value *S = DFSF.getShadow(RI.getReturnValue());
  1349. IRBuilder<> IRB(&RI);
  1350. IRB.CreateStore(S, DFSF.getRetvalTLS());
  1351. break;
  1352. }
  1353. case DataFlowSanitizer::IA_Args: {
  1354. IRBuilder<> IRB(&RI);
  1355. Type *RT = DFSF.F->getFunctionType()->getReturnType();
  1356. Value *InsVal =
  1357. IRB.CreateInsertValue(UndefValue::get(RT), RI.getReturnValue(), 0);
  1358. Value *InsShadow =
  1359. IRB.CreateInsertValue(InsVal, DFSF.getShadow(RI.getReturnValue()), 1);
  1360. RI.setOperand(0, InsShadow);
  1361. break;
  1362. }
  1363. }
  1364. }
  1365. }
  1366. void DFSanVisitor::visitCallSite(CallSite CS) {
  1367. Function *F = CS.getCalledFunction();
  1368. if ((F && F->isIntrinsic()) || isa<InlineAsm>(CS.getCalledValue())) {
  1369. visitOperandShadowInst(*CS.getInstruction());
  1370. return;
  1371. }
  1372. // Calls to this function are synthesized in wrappers, and we shouldn't
  1373. // instrument them.
  1374. if (F == DFSF.DFS.DFSanVarargWrapperFn.getCallee()->stripPointerCasts())
  1375. return;
  1376. IRBuilder<> IRB(CS.getInstruction());
  1377. DenseMap<Value *, Function *>::iterator i =
  1378. DFSF.DFS.UnwrappedFnMap.find(CS.getCalledValue());
  1379. if (i != DFSF.DFS.UnwrappedFnMap.end()) {
  1380. Function *F = i->second;
  1381. switch (DFSF.DFS.getWrapperKind(F)) {
  1382. case DataFlowSanitizer::WK_Warning:
  1383. CS.setCalledFunction(F);
  1384. IRB.CreateCall(DFSF.DFS.DFSanUnimplementedFn,
  1385. IRB.CreateGlobalStringPtr(F->getName()));
  1386. DFSF.setShadow(CS.getInstruction(), DFSF.DFS.ZeroShadow);
  1387. return;
  1388. case DataFlowSanitizer::WK_Discard:
  1389. CS.setCalledFunction(F);
  1390. DFSF.setShadow(CS.getInstruction(), DFSF.DFS.ZeroShadow);
  1391. return;
  1392. case DataFlowSanitizer::WK_Functional:
  1393. CS.setCalledFunction(F);
  1394. visitOperandShadowInst(*CS.getInstruction());
  1395. return;
  1396. case DataFlowSanitizer::WK_Custom:
  1397. // Don't try to handle invokes of custom functions, it's too complicated.
  1398. // Instead, invoke the dfsw$ wrapper, which will in turn call the __dfsw_
  1399. // wrapper.
  1400. if (CallInst *CI = dyn_cast<CallInst>(CS.getInstruction())) {
  1401. FunctionType *FT = F->getFunctionType();
  1402. TransformedFunction CustomFn = DFSF.DFS.getCustomFunctionType(FT);
  1403. std::string CustomFName = "__dfsw_";
  1404. CustomFName += F->getName();
  1405. FunctionCallee CustomF = DFSF.DFS.Mod->getOrInsertFunction(
  1406. CustomFName, CustomFn.TransformedType);
  1407. if (Function *CustomFn = dyn_cast<Function>(CustomF.getCallee())) {
  1408. CustomFn->copyAttributesFrom(F);
  1409. // Custom functions returning non-void will write to the return label.
  1410. if (!FT->getReturnType()->isVoidTy()) {
  1411. CustomFn->removeAttributes(AttributeList::FunctionIndex,
  1412. DFSF.DFS.ReadOnlyNoneAttrs);
  1413. }
  1414. }
  1415. std::vector<Value *> Args;
  1416. CallSite::arg_iterator i = CS.arg_begin();
  1417. for (unsigned n = FT->getNumParams(); n != 0; ++i, --n) {
  1418. Type *T = (*i)->getType();
  1419. FunctionType *ParamFT;
  1420. if (isa<PointerType>(T) &&
  1421. (ParamFT = dyn_cast<FunctionType>(
  1422. cast<PointerType>(T)->getElementType()))) {
  1423. std::string TName = "dfst";
  1424. TName += utostr(FT->getNumParams() - n);
  1425. TName += "$";
  1426. TName += F->getName();
  1427. Constant *T = DFSF.DFS.getOrBuildTrampolineFunction(ParamFT, TName);
  1428. Args.push_back(T);
  1429. Args.push_back(
  1430. IRB.CreateBitCast(*i, Type::getInt8PtrTy(*DFSF.DFS.Ctx)));
  1431. } else {
  1432. Args.push_back(*i);
  1433. }
  1434. }
  1435. i = CS.arg_begin();
  1436. const unsigned ShadowArgStart = Args.size();
  1437. for (unsigned n = FT->getNumParams(); n != 0; ++i, --n)
  1438. Args.push_back(DFSF.getShadow(*i));
  1439. if (FT->isVarArg()) {
  1440. auto *LabelVATy = ArrayType::get(DFSF.DFS.ShadowTy,
  1441. CS.arg_size() - FT->getNumParams());
  1442. auto *LabelVAAlloca = new AllocaInst(
  1443. LabelVATy, getDataLayout().getAllocaAddrSpace(),
  1444. "labelva", &DFSF.F->getEntryBlock().front());
  1445. for (unsigned n = 0; i != CS.arg_end(); ++i, ++n) {
  1446. auto LabelVAPtr = IRB.CreateStructGEP(LabelVATy, LabelVAAlloca, n);
  1447. IRB.CreateStore(DFSF.getShadow(*i), LabelVAPtr);
  1448. }
  1449. Args.push_back(IRB.CreateStructGEP(LabelVATy, LabelVAAlloca, 0));
  1450. }
  1451. if (!FT->getReturnType()->isVoidTy()) {
  1452. if (!DFSF.LabelReturnAlloca) {
  1453. DFSF.LabelReturnAlloca =
  1454. new AllocaInst(DFSF.DFS.ShadowTy,
  1455. getDataLayout().getAllocaAddrSpace(),
  1456. "labelreturn", &DFSF.F->getEntryBlock().front());
  1457. }
  1458. Args.push_back(DFSF.LabelReturnAlloca);
  1459. }
  1460. for (i = CS.arg_begin() + FT->getNumParams(); i != CS.arg_end(); ++i)
  1461. Args.push_back(*i);
  1462. CallInst *CustomCI = IRB.CreateCall(CustomF, Args);
  1463. CustomCI->setCallingConv(CI->getCallingConv());
  1464. CustomCI->setAttributes(TransformFunctionAttributes(CustomFn,
  1465. CI->getContext(), CI->getAttributes()));
  1466. // Update the parameter attributes of the custom call instruction to
  1467. // zero extend the shadow parameters. This is required for targets
  1468. // which consider ShadowTy an illegal type.
  1469. for (unsigned n = 0; n < FT->getNumParams(); n++) {
  1470. const unsigned ArgNo = ShadowArgStart + n;
  1471. if (CustomCI->getArgOperand(ArgNo)->getType() == DFSF.DFS.ShadowTy)
  1472. CustomCI->addParamAttr(ArgNo, Attribute::ZExt);
  1473. }
  1474. if (!FT->getReturnType()->isVoidTy()) {
  1475. LoadInst *LabelLoad =
  1476. IRB.CreateLoad(DFSF.DFS.ShadowTy, DFSF.LabelReturnAlloca);
  1477. DFSF.setShadow(CustomCI, LabelLoad);
  1478. }
  1479. CI->replaceAllUsesWith(CustomCI);
  1480. CI->eraseFromParent();
  1481. return;
  1482. }
  1483. break;
  1484. }
  1485. }
  1486. FunctionType *FT = cast<FunctionType>(
  1487. CS.getCalledValue()->getType()->getPointerElementType());
  1488. if (DFSF.DFS.getInstrumentedABI() == DataFlowSanitizer::IA_TLS) {
  1489. for (unsigned i = 0, n = FT->getNumParams(); i != n; ++i) {
  1490. IRB.CreateStore(DFSF.getShadow(CS.getArgument(i)),
  1491. DFSF.getArgTLS(i, CS.getInstruction()));
  1492. }
  1493. }
  1494. Instruction *Next = nullptr;
  1495. if (!CS.getType()->isVoidTy()) {
  1496. if (InvokeInst *II = dyn_cast<InvokeInst>(CS.getInstruction())) {
  1497. if (II->getNormalDest()->getSinglePredecessor()) {
  1498. Next = &II->getNormalDest()->front();
  1499. } else {
  1500. BasicBlock *NewBB =
  1501. SplitEdge(II->getParent(), II->getNormalDest(), &DFSF.DT);
  1502. Next = &NewBB->front();
  1503. }
  1504. } else {
  1505. assert(CS->getIterator() != CS->getParent()->end());
  1506. Next = CS->getNextNode();
  1507. }
  1508. if (DFSF.DFS.getInstrumentedABI() == DataFlowSanitizer::IA_TLS) {
  1509. IRBuilder<> NextIRB(Next);
  1510. LoadInst *LI = NextIRB.CreateLoad(DFSF.DFS.ShadowTy, DFSF.getRetvalTLS());
  1511. DFSF.SkipInsts.insert(LI);
  1512. DFSF.setShadow(CS.getInstruction(), LI);
  1513. DFSF.NonZeroChecks.push_back(LI);
  1514. }
  1515. }
  1516. // Do all instrumentation for IA_Args down here to defer tampering with the
  1517. // CFG in a way that SplitEdge may be able to detect.
  1518. if (DFSF.DFS.getInstrumentedABI() == DataFlowSanitizer::IA_Args) {
  1519. FunctionType *NewFT = DFSF.DFS.getArgsFunctionType(FT);
  1520. Value *Func =
  1521. IRB.CreateBitCast(CS.getCalledValue(), PointerType::getUnqual(NewFT));
  1522. std::vector<Value *> Args;
  1523. CallSite::arg_iterator i = CS.arg_begin(), e = CS.arg_end();
  1524. for (unsigned n = FT->getNumParams(); n != 0; ++i, --n)
  1525. Args.push_back(*i);
  1526. i = CS.arg_begin();
  1527. for (unsigned n = FT->getNumParams(); n != 0; ++i, --n)
  1528. Args.push_back(DFSF.getShadow(*i));
  1529. if (FT->isVarArg()) {
  1530. unsigned VarArgSize = CS.arg_size() - FT->getNumParams();
  1531. ArrayType *VarArgArrayTy = ArrayType::get(DFSF.DFS.ShadowTy, VarArgSize);
  1532. AllocaInst *VarArgShadow =
  1533. new AllocaInst(VarArgArrayTy, getDataLayout().getAllocaAddrSpace(),
  1534. "", &DFSF.F->getEntryBlock().front());
  1535. Args.push_back(IRB.CreateConstGEP2_32(VarArgArrayTy, VarArgShadow, 0, 0));
  1536. for (unsigned n = 0; i != e; ++i, ++n) {
  1537. IRB.CreateStore(
  1538. DFSF.getShadow(*i),
  1539. IRB.CreateConstGEP2_32(VarArgArrayTy, VarArgShadow, 0, n));
  1540. Args.push_back(*i);
  1541. }
  1542. }
  1543. CallSite NewCS;
  1544. if (InvokeInst *II = dyn_cast<InvokeInst>(CS.getInstruction())) {
  1545. NewCS = IRB.CreateInvoke(NewFT, Func, II->getNormalDest(),
  1546. II->getUnwindDest(), Args);
  1547. } else {
  1548. NewCS = IRB.CreateCall(NewFT, Func, Args);
  1549. }
  1550. NewCS.setCallingConv(CS.getCallingConv());
  1551. NewCS.setAttributes(CS.getAttributes().removeAttributes(
  1552. *DFSF.DFS.Ctx, AttributeList::ReturnIndex,
  1553. AttributeFuncs::typeIncompatible(NewCS.getInstruction()->getType())));
  1554. if (Next) {
  1555. ExtractValueInst *ExVal =
  1556. ExtractValueInst::Create(NewCS.getInstruction(), 0, "", Next);
  1557. DFSF.SkipInsts.insert(ExVal);
  1558. ExtractValueInst *ExShadow =
  1559. ExtractValueInst::Create(NewCS.getInstruction(), 1, "", Next);
  1560. DFSF.SkipInsts.insert(ExShadow);
  1561. DFSF.setShadow(ExVal, ExShadow);
  1562. DFSF.NonZeroChecks.push_back(ExShadow);
  1563. CS.getInstruction()->replaceAllUsesWith(ExVal);
  1564. }
  1565. CS.getInstruction()->eraseFromParent();
  1566. }
  1567. }
  1568. void DFSanVisitor::visitPHINode(PHINode &PN) {
  1569. PHINode *ShadowPN =
  1570. PHINode::Create(DFSF.DFS.ShadowTy, PN.getNumIncomingValues(), "", &PN);
  1571. // Give the shadow phi node valid predecessors to fool SplitEdge into working.
  1572. Value *UndefShadow = UndefValue::get(DFSF.DFS.ShadowTy);
  1573. for (PHINode::block_iterator i = PN.block_begin(), e = PN.block_end(); i != e;
  1574. ++i) {
  1575. ShadowPN->addIncoming(UndefShadow, *i);
  1576. }
  1577. DFSF.PHIFixups.push_back(std::make_pair(&PN, ShadowPN));
  1578. DFSF.setShadow(&PN, ShadowPN);
  1579. }