CGVTables.cpp 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991
  1. //===--- CGVTables.cpp - Emit LLVM Code for C++ vtables -------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code dealing with C++ code generation of virtual tables.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CGCXXABI.h"
  14. #include "CodeGenFunction.h"
  15. #include "CodeGenModule.h"
  16. #include "ConstantBuilder.h"
  17. #include "clang/AST/CXXInheritance.h"
  18. #include "clang/AST/RecordLayout.h"
  19. #include "clang/CodeGen/CGFunctionInfo.h"
  20. #include "clang/Frontend/CodeGenOptions.h"
  21. #include "llvm/Support/Format.h"
  22. #include "llvm/Transforms/Utils/Cloning.h"
  23. #include <algorithm>
  24. #include <cstdio>
  25. using namespace clang;
  26. using namespace CodeGen;
  27. CodeGenVTables::CodeGenVTables(CodeGenModule &CGM)
  28. : CGM(CGM), VTContext(CGM.getContext().getVTableContext()) {}
  29. llvm::Constant *CodeGenModule::GetAddrOfThunk(GlobalDecl GD,
  30. const ThunkInfo &Thunk) {
  31. const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
  32. // Compute the mangled name.
  33. SmallString<256> Name;
  34. llvm::raw_svector_ostream Out(Name);
  35. if (const CXXDestructorDecl* DD = dyn_cast<CXXDestructorDecl>(MD))
  36. getCXXABI().getMangleContext().mangleCXXDtorThunk(DD, GD.getDtorType(),
  37. Thunk.This, Out);
  38. else
  39. getCXXABI().getMangleContext().mangleThunk(MD, Thunk, Out);
  40. llvm::Type *Ty = getTypes().GetFunctionTypeForVTable(GD);
  41. return GetOrCreateLLVMFunction(Name, Ty, GD, /*ForVTable=*/true,
  42. /*DontDefer=*/true, /*IsThunk=*/true);
  43. }
  44. static void setThunkVisibility(CodeGenModule &CGM, const CXXMethodDecl *MD,
  45. const ThunkInfo &Thunk, llvm::Function *Fn) {
  46. CGM.setGlobalVisibility(Fn, MD);
  47. }
  48. static void setThunkProperties(CodeGenModule &CGM, const ThunkInfo &Thunk,
  49. llvm::Function *ThunkFn, bool ForVTable,
  50. GlobalDecl GD) {
  51. CGM.setFunctionLinkage(GD, ThunkFn);
  52. CGM.getCXXABI().setThunkLinkage(ThunkFn, ForVTable, GD,
  53. !Thunk.Return.isEmpty());
  54. // Set the right visibility.
  55. const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
  56. setThunkVisibility(CGM, MD, Thunk, ThunkFn);
  57. if (CGM.supportsCOMDAT() && ThunkFn->isWeakForLinker())
  58. ThunkFn->setComdat(CGM.getModule().getOrInsertComdat(ThunkFn->getName()));
  59. }
  60. #ifndef NDEBUG
  61. static bool similar(const ABIArgInfo &infoL, CanQualType typeL,
  62. const ABIArgInfo &infoR, CanQualType typeR) {
  63. return (infoL.getKind() == infoR.getKind() &&
  64. (typeL == typeR ||
  65. (isa<PointerType>(typeL) && isa<PointerType>(typeR)) ||
  66. (isa<ReferenceType>(typeL) && isa<ReferenceType>(typeR))));
  67. }
  68. #endif
  69. static RValue PerformReturnAdjustment(CodeGenFunction &CGF,
  70. QualType ResultType, RValue RV,
  71. const ThunkInfo &Thunk) {
  72. // Emit the return adjustment.
  73. bool NullCheckValue = !ResultType->isReferenceType();
  74. llvm::BasicBlock *AdjustNull = nullptr;
  75. llvm::BasicBlock *AdjustNotNull = nullptr;
  76. llvm::BasicBlock *AdjustEnd = nullptr;
  77. llvm::Value *ReturnValue = RV.getScalarVal();
  78. if (NullCheckValue) {
  79. AdjustNull = CGF.createBasicBlock("adjust.null");
  80. AdjustNotNull = CGF.createBasicBlock("adjust.notnull");
  81. AdjustEnd = CGF.createBasicBlock("adjust.end");
  82. llvm::Value *IsNull = CGF.Builder.CreateIsNull(ReturnValue);
  83. CGF.Builder.CreateCondBr(IsNull, AdjustNull, AdjustNotNull);
  84. CGF.EmitBlock(AdjustNotNull);
  85. }
  86. auto ClassDecl = ResultType->getPointeeType()->getAsCXXRecordDecl();
  87. auto ClassAlign = CGF.CGM.getClassPointerAlignment(ClassDecl);
  88. ReturnValue = CGF.CGM.getCXXABI().performReturnAdjustment(CGF,
  89. Address(ReturnValue, ClassAlign),
  90. Thunk.Return);
  91. if (NullCheckValue) {
  92. CGF.Builder.CreateBr(AdjustEnd);
  93. CGF.EmitBlock(AdjustNull);
  94. CGF.Builder.CreateBr(AdjustEnd);
  95. CGF.EmitBlock(AdjustEnd);
  96. llvm::PHINode *PHI = CGF.Builder.CreatePHI(ReturnValue->getType(), 2);
  97. PHI->addIncoming(ReturnValue, AdjustNotNull);
  98. PHI->addIncoming(llvm::Constant::getNullValue(ReturnValue->getType()),
  99. AdjustNull);
  100. ReturnValue = PHI;
  101. }
  102. return RValue::get(ReturnValue);
  103. }
  104. // This function does roughly the same thing as GenerateThunk, but in a
  105. // very different way, so that va_start and va_end work correctly.
  106. // FIXME: This function assumes "this" is the first non-sret LLVM argument of
  107. // a function, and that there is an alloca built in the entry block
  108. // for all accesses to "this".
  109. // FIXME: This function assumes there is only one "ret" statement per function.
  110. // FIXME: Cloning isn't correct in the presence of indirect goto!
  111. // FIXME: This implementation of thunks bloats codesize by duplicating the
  112. // function definition. There are alternatives:
  113. // 1. Add some sort of stub support to LLVM for cases where we can
  114. // do a this adjustment, then a sibcall.
  115. // 2. We could transform the definition to take a va_list instead of an
  116. // actual variable argument list, then have the thunks (including a
  117. // no-op thunk for the regular definition) call va_start/va_end.
  118. // There's a bit of per-call overhead for this solution, but it's
  119. // better for codesize if the definition is long.
  120. llvm::Function *
  121. CodeGenFunction::GenerateVarArgsThunk(llvm::Function *Fn,
  122. const CGFunctionInfo &FnInfo,
  123. GlobalDecl GD, const ThunkInfo &Thunk) {
  124. const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
  125. const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
  126. QualType ResultType = FPT->getReturnType();
  127. // Get the original function
  128. assert(FnInfo.isVariadic());
  129. llvm::Type *Ty = CGM.getTypes().GetFunctionType(FnInfo);
  130. llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
  131. llvm::Function *BaseFn = cast<llvm::Function>(Callee);
  132. // Clone to thunk.
  133. llvm::ValueToValueMapTy VMap;
  134. llvm::Function *NewFn = llvm::CloneFunction(BaseFn, VMap);
  135. Fn->replaceAllUsesWith(NewFn);
  136. NewFn->takeName(Fn);
  137. Fn->eraseFromParent();
  138. Fn = NewFn;
  139. // "Initialize" CGF (minimally).
  140. CurFn = Fn;
  141. // Get the "this" value
  142. llvm::Function::arg_iterator AI = Fn->arg_begin();
  143. if (CGM.ReturnTypeUsesSRet(FnInfo))
  144. ++AI;
  145. // Find the first store of "this", which will be to the alloca associated
  146. // with "this".
  147. Address ThisPtr(&*AI, CGM.getClassPointerAlignment(MD->getParent()));
  148. llvm::BasicBlock *EntryBB = &Fn->front();
  149. llvm::BasicBlock::iterator ThisStore =
  150. std::find_if(EntryBB->begin(), EntryBB->end(), [&](llvm::Instruction &I) {
  151. return isa<llvm::StoreInst>(I) &&
  152. I.getOperand(0) == ThisPtr.getPointer();
  153. });
  154. assert(ThisStore != EntryBB->end() &&
  155. "Store of this should be in entry block?");
  156. // Adjust "this", if necessary.
  157. Builder.SetInsertPoint(&*ThisStore);
  158. llvm::Value *AdjustedThisPtr =
  159. CGM.getCXXABI().performThisAdjustment(*this, ThisPtr, Thunk.This);
  160. ThisStore->setOperand(0, AdjustedThisPtr);
  161. if (!Thunk.Return.isEmpty()) {
  162. // Fix up the returned value, if necessary.
  163. for (llvm::BasicBlock &BB : *Fn) {
  164. llvm::Instruction *T = BB.getTerminator();
  165. if (isa<llvm::ReturnInst>(T)) {
  166. RValue RV = RValue::get(T->getOperand(0));
  167. T->eraseFromParent();
  168. Builder.SetInsertPoint(&BB);
  169. RV = PerformReturnAdjustment(*this, ResultType, RV, Thunk);
  170. Builder.CreateRet(RV.getScalarVal());
  171. break;
  172. }
  173. }
  174. }
  175. return Fn;
  176. }
  177. void CodeGenFunction::StartThunk(llvm::Function *Fn, GlobalDecl GD,
  178. const CGFunctionInfo &FnInfo) {
  179. assert(!CurGD.getDecl() && "CurGD was already set!");
  180. CurGD = GD;
  181. CurFuncIsThunk = true;
  182. // Build FunctionArgs.
  183. const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
  184. QualType ThisType = MD->getThisType(getContext());
  185. const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
  186. QualType ResultType = CGM.getCXXABI().HasThisReturn(GD)
  187. ? ThisType
  188. : CGM.getCXXABI().hasMostDerivedReturn(GD)
  189. ? CGM.getContext().VoidPtrTy
  190. : FPT->getReturnType();
  191. FunctionArgList FunctionArgs;
  192. // Create the implicit 'this' parameter declaration.
  193. CGM.getCXXABI().buildThisParam(*this, FunctionArgs);
  194. // Add the rest of the parameters.
  195. FunctionArgs.append(MD->param_begin(), MD->param_end());
  196. if (isa<CXXDestructorDecl>(MD))
  197. CGM.getCXXABI().addImplicitStructorParams(*this, ResultType, FunctionArgs);
  198. // Start defining the function.
  199. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  200. StartFunction(GlobalDecl(), ResultType, Fn, FnInfo, FunctionArgs,
  201. MD->getLocation());
  202. // Create a scope with an artificial location for the body of this function.
  203. auto AL = ApplyDebugLocation::CreateArtificial(*this);
  204. // Since we didn't pass a GlobalDecl to StartFunction, do this ourselves.
  205. CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
  206. CXXThisValue = CXXABIThisValue;
  207. CurCodeDecl = MD;
  208. CurFuncDecl = MD;
  209. }
  210. void CodeGenFunction::FinishThunk() {
  211. // Clear these to restore the invariants expected by
  212. // StartFunction/FinishFunction.
  213. CurCodeDecl = nullptr;
  214. CurFuncDecl = nullptr;
  215. FinishFunction();
  216. }
  217. void CodeGenFunction::EmitCallAndReturnForThunk(llvm::Constant *CalleePtr,
  218. const ThunkInfo *Thunk) {
  219. assert(isa<CXXMethodDecl>(CurGD.getDecl()) &&
  220. "Please use a new CGF for this thunk");
  221. const CXXMethodDecl *MD = cast<CXXMethodDecl>(CurGD.getDecl());
  222. // Adjust the 'this' pointer if necessary
  223. llvm::Value *AdjustedThisPtr =
  224. Thunk ? CGM.getCXXABI().performThisAdjustment(
  225. *this, LoadCXXThisAddress(), Thunk->This)
  226. : LoadCXXThis();
  227. if (CurFnInfo->usesInAlloca()) {
  228. // We don't handle return adjusting thunks, because they require us to call
  229. // the copy constructor. For now, fall through and pretend the return
  230. // adjustment was empty so we don't crash.
  231. if (Thunk && !Thunk->Return.isEmpty()) {
  232. CGM.ErrorUnsupported(
  233. MD, "non-trivial argument copy for return-adjusting thunk");
  234. }
  235. EmitMustTailThunk(MD, AdjustedThisPtr, CalleePtr);
  236. return;
  237. }
  238. // Start building CallArgs.
  239. CallArgList CallArgs;
  240. QualType ThisType = MD->getThisType(getContext());
  241. CallArgs.add(RValue::get(AdjustedThisPtr), ThisType);
  242. if (isa<CXXDestructorDecl>(MD))
  243. CGM.getCXXABI().adjustCallArgsForDestructorThunk(*this, CurGD, CallArgs);
  244. unsigned PrefixArgs = CallArgs.size() - 1;
  245. // Add the rest of the arguments.
  246. for (const ParmVarDecl *PD : MD->parameters())
  247. EmitDelegateCallArg(CallArgs, PD, SourceLocation());
  248. const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
  249. #ifndef NDEBUG
  250. const CGFunctionInfo &CallFnInfo = CGM.getTypes().arrangeCXXMethodCall(
  251. CallArgs, FPT, RequiredArgs::forPrototypePlus(FPT, 1, MD), PrefixArgs);
  252. assert(CallFnInfo.getRegParm() == CurFnInfo->getRegParm() &&
  253. CallFnInfo.isNoReturn() == CurFnInfo->isNoReturn() &&
  254. CallFnInfo.getCallingConvention() == CurFnInfo->getCallingConvention());
  255. assert(isa<CXXDestructorDecl>(MD) || // ignore dtor return types
  256. similar(CallFnInfo.getReturnInfo(), CallFnInfo.getReturnType(),
  257. CurFnInfo->getReturnInfo(), CurFnInfo->getReturnType()));
  258. assert(CallFnInfo.arg_size() == CurFnInfo->arg_size());
  259. for (unsigned i = 0, e = CurFnInfo->arg_size(); i != e; ++i)
  260. assert(similar(CallFnInfo.arg_begin()[i].info,
  261. CallFnInfo.arg_begin()[i].type,
  262. CurFnInfo->arg_begin()[i].info,
  263. CurFnInfo->arg_begin()[i].type));
  264. #endif
  265. // Determine whether we have a return value slot to use.
  266. QualType ResultType = CGM.getCXXABI().HasThisReturn(CurGD)
  267. ? ThisType
  268. : CGM.getCXXABI().hasMostDerivedReturn(CurGD)
  269. ? CGM.getContext().VoidPtrTy
  270. : FPT->getReturnType();
  271. ReturnValueSlot Slot;
  272. if (!ResultType->isVoidType() &&
  273. CurFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect &&
  274. !hasScalarEvaluationKind(CurFnInfo->getReturnType()))
  275. Slot = ReturnValueSlot(ReturnValue, ResultType.isVolatileQualified());
  276. // Now emit our call.
  277. llvm::Instruction *CallOrInvoke;
  278. CGCallee Callee = CGCallee::forDirect(CalleePtr, MD);
  279. RValue RV = EmitCall(*CurFnInfo, Callee, Slot, CallArgs, &CallOrInvoke);
  280. // Consider return adjustment if we have ThunkInfo.
  281. if (Thunk && !Thunk->Return.isEmpty())
  282. RV = PerformReturnAdjustment(*this, ResultType, RV, *Thunk);
  283. else if (llvm::CallInst* Call = dyn_cast<llvm::CallInst>(CallOrInvoke))
  284. Call->setTailCallKind(llvm::CallInst::TCK_Tail);
  285. // Emit return.
  286. if (!ResultType->isVoidType() && Slot.isNull())
  287. CGM.getCXXABI().EmitReturnFromThunk(*this, RV, ResultType);
  288. // Disable the final ARC autorelease.
  289. AutoreleaseResult = false;
  290. FinishThunk();
  291. }
  292. void CodeGenFunction::EmitMustTailThunk(const CXXMethodDecl *MD,
  293. llvm::Value *AdjustedThisPtr,
  294. llvm::Value *CalleePtr) {
  295. // Emitting a musttail call thunk doesn't use any of the CGCall.cpp machinery
  296. // to translate AST arguments into LLVM IR arguments. For thunks, we know
  297. // that the caller prototype more or less matches the callee prototype with
  298. // the exception of 'this'.
  299. SmallVector<llvm::Value *, 8> Args;
  300. for (llvm::Argument &A : CurFn->args())
  301. Args.push_back(&A);
  302. // Set the adjusted 'this' pointer.
  303. const ABIArgInfo &ThisAI = CurFnInfo->arg_begin()->info;
  304. if (ThisAI.isDirect()) {
  305. const ABIArgInfo &RetAI = CurFnInfo->getReturnInfo();
  306. int ThisArgNo = RetAI.isIndirect() && !RetAI.isSRetAfterThis() ? 1 : 0;
  307. llvm::Type *ThisType = Args[ThisArgNo]->getType();
  308. if (ThisType != AdjustedThisPtr->getType())
  309. AdjustedThisPtr = Builder.CreateBitCast(AdjustedThisPtr, ThisType);
  310. Args[ThisArgNo] = AdjustedThisPtr;
  311. } else {
  312. assert(ThisAI.isInAlloca() && "this is passed directly or inalloca");
  313. Address ThisAddr = GetAddrOfLocalVar(CXXABIThisDecl);
  314. llvm::Type *ThisType = ThisAddr.getElementType();
  315. if (ThisType != AdjustedThisPtr->getType())
  316. AdjustedThisPtr = Builder.CreateBitCast(AdjustedThisPtr, ThisType);
  317. Builder.CreateStore(AdjustedThisPtr, ThisAddr);
  318. }
  319. // Emit the musttail call manually. Even if the prologue pushed cleanups, we
  320. // don't actually want to run them.
  321. llvm::CallInst *Call = Builder.CreateCall(CalleePtr, Args);
  322. Call->setTailCallKind(llvm::CallInst::TCK_MustTail);
  323. // Apply the standard set of call attributes.
  324. unsigned CallingConv;
  325. CodeGen::AttributeListType AttributeList;
  326. CGM.ConstructAttributeList(CalleePtr->getName(),
  327. *CurFnInfo, MD, AttributeList,
  328. CallingConv, /*AttrOnCallSite=*/true);
  329. llvm::AttributeSet Attrs =
  330. llvm::AttributeSet::get(getLLVMContext(), AttributeList);
  331. Call->setAttributes(Attrs);
  332. Call->setCallingConv(static_cast<llvm::CallingConv::ID>(CallingConv));
  333. if (Call->getType()->isVoidTy())
  334. Builder.CreateRetVoid();
  335. else
  336. Builder.CreateRet(Call);
  337. // Finish the function to maintain CodeGenFunction invariants.
  338. // FIXME: Don't emit unreachable code.
  339. EmitBlock(createBasicBlock());
  340. FinishFunction();
  341. }
  342. void CodeGenFunction::generateThunk(llvm::Function *Fn,
  343. const CGFunctionInfo &FnInfo,
  344. GlobalDecl GD, const ThunkInfo &Thunk) {
  345. StartThunk(Fn, GD, FnInfo);
  346. // Create a scope with an artificial location for the body of this function.
  347. auto AL = ApplyDebugLocation::CreateArtificial(*this);
  348. // Get our callee.
  349. llvm::Type *Ty =
  350. CGM.getTypes().GetFunctionType(CGM.getTypes().arrangeGlobalDeclaration(GD));
  351. llvm::Constant *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
  352. // Make the call and return the result.
  353. EmitCallAndReturnForThunk(Callee, &Thunk);
  354. }
  355. void CodeGenVTables::emitThunk(GlobalDecl GD, const ThunkInfo &Thunk,
  356. bool ForVTable) {
  357. const CGFunctionInfo &FnInfo = CGM.getTypes().arrangeGlobalDeclaration(GD);
  358. // FIXME: re-use FnInfo in this computation.
  359. llvm::Constant *C = CGM.GetAddrOfThunk(GD, Thunk);
  360. llvm::GlobalValue *Entry;
  361. // Strip off a bitcast if we got one back.
  362. if (llvm::ConstantExpr *CE = dyn_cast<llvm::ConstantExpr>(C)) {
  363. assert(CE->getOpcode() == llvm::Instruction::BitCast);
  364. Entry = cast<llvm::GlobalValue>(CE->getOperand(0));
  365. } else {
  366. Entry = cast<llvm::GlobalValue>(C);
  367. }
  368. // There's already a declaration with the same name, check if it has the same
  369. // type or if we need to replace it.
  370. if (Entry->getType()->getElementType() !=
  371. CGM.getTypes().GetFunctionTypeForVTable(GD)) {
  372. llvm::GlobalValue *OldThunkFn = Entry;
  373. // If the types mismatch then we have to rewrite the definition.
  374. assert(OldThunkFn->isDeclaration() &&
  375. "Shouldn't replace non-declaration");
  376. // Remove the name from the old thunk function and get a new thunk.
  377. OldThunkFn->setName(StringRef());
  378. Entry = cast<llvm::GlobalValue>(CGM.GetAddrOfThunk(GD, Thunk));
  379. // If needed, replace the old thunk with a bitcast.
  380. if (!OldThunkFn->use_empty()) {
  381. llvm::Constant *NewPtrForOldDecl =
  382. llvm::ConstantExpr::getBitCast(Entry, OldThunkFn->getType());
  383. OldThunkFn->replaceAllUsesWith(NewPtrForOldDecl);
  384. }
  385. // Remove the old thunk.
  386. OldThunkFn->eraseFromParent();
  387. }
  388. llvm::Function *ThunkFn = cast<llvm::Function>(Entry);
  389. bool ABIHasKeyFunctions = CGM.getTarget().getCXXABI().hasKeyFunctions();
  390. bool UseAvailableExternallyLinkage = ForVTable && ABIHasKeyFunctions;
  391. if (!ThunkFn->isDeclaration()) {
  392. if (!ABIHasKeyFunctions || UseAvailableExternallyLinkage) {
  393. // There is already a thunk emitted for this function, do nothing.
  394. return;
  395. }
  396. setThunkProperties(CGM, Thunk, ThunkFn, ForVTable, GD);
  397. return;
  398. }
  399. CGM.SetLLVMFunctionAttributesForDefinition(GD.getDecl(), ThunkFn);
  400. if (ThunkFn->isVarArg()) {
  401. // Varargs thunks are special; we can't just generate a call because
  402. // we can't copy the varargs. Our implementation is rather
  403. // expensive/sucky at the moment, so don't generate the thunk unless
  404. // we have to.
  405. // FIXME: Do something better here; GenerateVarArgsThunk is extremely ugly.
  406. if (UseAvailableExternallyLinkage)
  407. return;
  408. ThunkFn =
  409. CodeGenFunction(CGM).GenerateVarArgsThunk(ThunkFn, FnInfo, GD, Thunk);
  410. } else {
  411. // Normal thunk body generation.
  412. CodeGenFunction(CGM).generateThunk(ThunkFn, FnInfo, GD, Thunk);
  413. }
  414. setThunkProperties(CGM, Thunk, ThunkFn, ForVTable, GD);
  415. }
  416. void CodeGenVTables::maybeEmitThunkForVTable(GlobalDecl GD,
  417. const ThunkInfo &Thunk) {
  418. // If the ABI has key functions, only the TU with the key function should emit
  419. // the thunk. However, we can allow inlining of thunks if we emit them with
  420. // available_externally linkage together with vtables when optimizations are
  421. // enabled.
  422. if (CGM.getTarget().getCXXABI().hasKeyFunctions() &&
  423. !CGM.getCodeGenOpts().OptimizationLevel)
  424. return;
  425. // We can't emit thunks for member functions with incomplete types.
  426. const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
  427. if (!CGM.getTypes().isFuncTypeConvertible(
  428. MD->getType()->castAs<FunctionType>()))
  429. return;
  430. emitThunk(GD, Thunk, /*ForVTable=*/true);
  431. }
  432. void CodeGenVTables::EmitThunks(GlobalDecl GD)
  433. {
  434. const CXXMethodDecl *MD =
  435. cast<CXXMethodDecl>(GD.getDecl())->getCanonicalDecl();
  436. // We don't need to generate thunks for the base destructor.
  437. if (isa<CXXDestructorDecl>(MD) && GD.getDtorType() == Dtor_Base)
  438. return;
  439. const VTableContextBase::ThunkInfoVectorTy *ThunkInfoVector =
  440. VTContext->getThunkInfo(GD);
  441. if (!ThunkInfoVector)
  442. return;
  443. for (const ThunkInfo& Thunk : *ThunkInfoVector)
  444. emitThunk(GD, Thunk, /*ForVTable=*/false);
  445. }
  446. void CodeGenVTables::addVTableComponent(
  447. ConstantArrayBuilder &builder, const VTableLayout &layout,
  448. unsigned idx, llvm::Constant *rtti, unsigned &nextVTableThunkIndex) {
  449. auto &component = layout.vtable_components()[idx];
  450. auto addOffsetConstant = [&](CharUnits offset) {
  451. builder.add(llvm::ConstantExpr::getIntToPtr(
  452. llvm::ConstantInt::get(CGM.PtrDiffTy, offset.getQuantity()),
  453. CGM.Int8PtrTy));
  454. };
  455. switch (component.getKind()) {
  456. case VTableComponent::CK_VCallOffset:
  457. return addOffsetConstant(component.getVCallOffset());
  458. case VTableComponent::CK_VBaseOffset:
  459. return addOffsetConstant(component.getVBaseOffset());
  460. case VTableComponent::CK_OffsetToTop:
  461. return addOffsetConstant(component.getOffsetToTop());
  462. case VTableComponent::CK_RTTI:
  463. return builder.add(llvm::ConstantExpr::getBitCast(rtti, CGM.Int8PtrTy));
  464. case VTableComponent::CK_FunctionPointer:
  465. case VTableComponent::CK_CompleteDtorPointer:
  466. case VTableComponent::CK_DeletingDtorPointer: {
  467. GlobalDecl GD;
  468. // Get the right global decl.
  469. switch (component.getKind()) {
  470. default:
  471. llvm_unreachable("Unexpected vtable component kind");
  472. case VTableComponent::CK_FunctionPointer:
  473. GD = component.getFunctionDecl();
  474. break;
  475. case VTableComponent::CK_CompleteDtorPointer:
  476. GD = GlobalDecl(component.getDestructorDecl(), Dtor_Complete);
  477. break;
  478. case VTableComponent::CK_DeletingDtorPointer:
  479. GD = GlobalDecl(component.getDestructorDecl(), Dtor_Deleting);
  480. break;
  481. }
  482. if (CGM.getLangOpts().CUDA) {
  483. // Emit NULL for methods we can't codegen on this
  484. // side. Otherwise we'd end up with vtable with unresolved
  485. // references.
  486. const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
  487. // OK on device side: functions w/ __device__ attribute
  488. // OK on host side: anything except __device__-only functions.
  489. bool CanEmitMethod =
  490. CGM.getLangOpts().CUDAIsDevice
  491. ? MD->hasAttr<CUDADeviceAttr>()
  492. : (MD->hasAttr<CUDAHostAttr>() || !MD->hasAttr<CUDADeviceAttr>());
  493. if (!CanEmitMethod)
  494. return builder.addNullPointer(CGM.Int8PtrTy);
  495. // Method is acceptable, continue processing as usual.
  496. }
  497. auto getSpecialVirtualFn = [&](StringRef name) {
  498. llvm::FunctionType *fnTy =
  499. llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
  500. llvm::Constant *fn = CGM.CreateRuntimeFunction(fnTy, name);
  501. if (auto f = dyn_cast<llvm::Function>(fn))
  502. f->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  503. return llvm::ConstantExpr::getBitCast(fn, CGM.Int8PtrTy);
  504. };
  505. llvm::Constant *fnPtr;
  506. // Pure virtual member functions.
  507. if (cast<CXXMethodDecl>(GD.getDecl())->isPure()) {
  508. if (!PureVirtualFn)
  509. PureVirtualFn =
  510. getSpecialVirtualFn(CGM.getCXXABI().GetPureVirtualCallName());
  511. fnPtr = PureVirtualFn;
  512. // Deleted virtual member functions.
  513. } else if (cast<CXXMethodDecl>(GD.getDecl())->isDeleted()) {
  514. if (!DeletedVirtualFn)
  515. DeletedVirtualFn =
  516. getSpecialVirtualFn(CGM.getCXXABI().GetDeletedVirtualCallName());
  517. fnPtr = DeletedVirtualFn;
  518. // Thunks.
  519. } else if (nextVTableThunkIndex < layout.vtable_thunks().size() &&
  520. layout.vtable_thunks()[nextVTableThunkIndex].first == idx) {
  521. auto &thunkInfo = layout.vtable_thunks()[nextVTableThunkIndex].second;
  522. maybeEmitThunkForVTable(GD, thunkInfo);
  523. nextVTableThunkIndex++;
  524. fnPtr = CGM.GetAddrOfThunk(GD, thunkInfo);
  525. // Otherwise we can use the method definition directly.
  526. } else {
  527. llvm::Type *fnTy = CGM.getTypes().GetFunctionTypeForVTable(GD);
  528. fnPtr = CGM.GetAddrOfFunction(GD, fnTy, /*ForVTable=*/true);
  529. }
  530. fnPtr = llvm::ConstantExpr::getBitCast(fnPtr, CGM.Int8PtrTy);
  531. builder.add(fnPtr);
  532. return;
  533. }
  534. case VTableComponent::CK_UnusedFunctionPointer:
  535. return builder.addNullPointer(CGM.Int8PtrTy);
  536. }
  537. llvm_unreachable("Unexpected vtable component kind");
  538. }
  539. llvm::Type *CodeGenVTables::getVTableType(const VTableLayout &layout) {
  540. SmallVector<llvm::Type *, 4> tys;
  541. for (unsigned i = 0, e = layout.getNumVTables(); i != e; ++i) {
  542. tys.push_back(llvm::ArrayType::get(CGM.Int8PtrTy, layout.getVTableSize(i)));
  543. }
  544. return llvm::StructType::get(CGM.getLLVMContext(), tys);
  545. }
  546. void CodeGenVTables::createVTableInitializer(ConstantStructBuilder &builder,
  547. const VTableLayout &layout,
  548. llvm::Constant *rtti) {
  549. unsigned nextVTableThunkIndex = 0;
  550. for (unsigned i = 0, e = layout.getNumVTables(); i != e; ++i) {
  551. auto vtableElem = builder.beginArray(CGM.Int8PtrTy);
  552. size_t thisIndex = layout.getVTableOffset(i);
  553. size_t nextIndex = thisIndex + layout.getVTableSize(i);
  554. for (unsigned i = thisIndex; i != nextIndex; ++i) {
  555. addVTableComponent(vtableElem, layout, i, rtti, nextVTableThunkIndex);
  556. }
  557. vtableElem.finishAndAddTo(builder);
  558. }
  559. }
  560. llvm::GlobalVariable *
  561. CodeGenVTables::GenerateConstructionVTable(const CXXRecordDecl *RD,
  562. const BaseSubobject &Base,
  563. bool BaseIsVirtual,
  564. llvm::GlobalVariable::LinkageTypes Linkage,
  565. VTableAddressPointsMapTy& AddressPoints) {
  566. if (CGDebugInfo *DI = CGM.getModuleDebugInfo())
  567. DI->completeClassData(Base.getBase());
  568. std::unique_ptr<VTableLayout> VTLayout(
  569. getItaniumVTableContext().createConstructionVTableLayout(
  570. Base.getBase(), Base.getBaseOffset(), BaseIsVirtual, RD));
  571. // Add the address points.
  572. AddressPoints = VTLayout->getAddressPoints();
  573. // Get the mangled construction vtable name.
  574. SmallString<256> OutName;
  575. llvm::raw_svector_ostream Out(OutName);
  576. cast<ItaniumMangleContext>(CGM.getCXXABI().getMangleContext())
  577. .mangleCXXCtorVTable(RD, Base.getBaseOffset().getQuantity(),
  578. Base.getBase(), Out);
  579. StringRef Name = OutName.str();
  580. llvm::Type *VTType = getVTableType(*VTLayout);
  581. // Construction vtable symbols are not part of the Itanium ABI, so we cannot
  582. // guarantee that they actually will be available externally. Instead, when
  583. // emitting an available_externally VTT, we provide references to an internal
  584. // linkage construction vtable. The ABI only requires complete-object vtables
  585. // to be the same for all instances of a type, not construction vtables.
  586. if (Linkage == llvm::GlobalVariable::AvailableExternallyLinkage)
  587. Linkage = llvm::GlobalVariable::InternalLinkage;
  588. // Create the variable that will hold the construction vtable.
  589. llvm::GlobalVariable *VTable =
  590. CGM.CreateOrReplaceCXXRuntimeVariable(Name, VTType, Linkage);
  591. CGM.setGlobalVisibility(VTable, RD);
  592. // V-tables are always unnamed_addr.
  593. VTable->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  594. llvm::Constant *RTTI = CGM.GetAddrOfRTTIDescriptor(
  595. CGM.getContext().getTagDeclType(Base.getBase()));
  596. // Create and set the initializer.
  597. ConstantInitBuilder builder(CGM);
  598. auto components = builder.beginStruct();
  599. createVTableInitializer(components, *VTLayout, RTTI);
  600. components.finishAndSetAsInitializer(VTable);
  601. CGM.EmitVTableTypeMetadata(VTable, *VTLayout.get());
  602. return VTable;
  603. }
  604. static bool shouldEmitAvailableExternallyVTable(const CodeGenModule &CGM,
  605. const CXXRecordDecl *RD) {
  606. return CGM.getCodeGenOpts().OptimizationLevel > 0 &&
  607. CGM.getCXXABI().canSpeculativelyEmitVTable(RD);
  608. }
  609. /// Compute the required linkage of the vtable for the given class.
  610. ///
  611. /// Note that we only call this at the end of the translation unit.
  612. llvm::GlobalVariable::LinkageTypes
  613. CodeGenModule::getVTableLinkage(const CXXRecordDecl *RD) {
  614. if (!RD->isExternallyVisible())
  615. return llvm::GlobalVariable::InternalLinkage;
  616. // We're at the end of the translation unit, so the current key
  617. // function is fully correct.
  618. const CXXMethodDecl *keyFunction = Context.getCurrentKeyFunction(RD);
  619. if (keyFunction && !RD->hasAttr<DLLImportAttr>()) {
  620. // If this class has a key function, use that to determine the
  621. // linkage of the vtable.
  622. const FunctionDecl *def = nullptr;
  623. if (keyFunction->hasBody(def))
  624. keyFunction = cast<CXXMethodDecl>(def);
  625. switch (keyFunction->getTemplateSpecializationKind()) {
  626. case TSK_Undeclared:
  627. case TSK_ExplicitSpecialization:
  628. assert((def || CodeGenOpts.OptimizationLevel > 0 ||
  629. CodeGenOpts.getDebugInfo() != codegenoptions::NoDebugInfo) &&
  630. "Shouldn't query vtable linkage without key function, "
  631. "optimizations, or debug info");
  632. if (!def && CodeGenOpts.OptimizationLevel > 0)
  633. return llvm::GlobalVariable::AvailableExternallyLinkage;
  634. if (keyFunction->isInlined())
  635. return !Context.getLangOpts().AppleKext ?
  636. llvm::GlobalVariable::LinkOnceODRLinkage :
  637. llvm::Function::InternalLinkage;
  638. return llvm::GlobalVariable::ExternalLinkage;
  639. case TSK_ImplicitInstantiation:
  640. return !Context.getLangOpts().AppleKext ?
  641. llvm::GlobalVariable::LinkOnceODRLinkage :
  642. llvm::Function::InternalLinkage;
  643. case TSK_ExplicitInstantiationDefinition:
  644. return !Context.getLangOpts().AppleKext ?
  645. llvm::GlobalVariable::WeakODRLinkage :
  646. llvm::Function::InternalLinkage;
  647. case TSK_ExplicitInstantiationDeclaration:
  648. llvm_unreachable("Should not have been asked to emit this");
  649. }
  650. }
  651. // -fapple-kext mode does not support weak linkage, so we must use
  652. // internal linkage.
  653. if (Context.getLangOpts().AppleKext)
  654. return llvm::Function::InternalLinkage;
  655. llvm::GlobalVariable::LinkageTypes DiscardableODRLinkage =
  656. llvm::GlobalValue::LinkOnceODRLinkage;
  657. llvm::GlobalVariable::LinkageTypes NonDiscardableODRLinkage =
  658. llvm::GlobalValue::WeakODRLinkage;
  659. if (RD->hasAttr<DLLExportAttr>()) {
  660. // Cannot discard exported vtables.
  661. DiscardableODRLinkage = NonDiscardableODRLinkage;
  662. } else if (RD->hasAttr<DLLImportAttr>()) {
  663. // Imported vtables are available externally.
  664. DiscardableODRLinkage = llvm::GlobalVariable::AvailableExternallyLinkage;
  665. NonDiscardableODRLinkage = llvm::GlobalVariable::AvailableExternallyLinkage;
  666. }
  667. switch (RD->getTemplateSpecializationKind()) {
  668. case TSK_Undeclared:
  669. case TSK_ExplicitSpecialization:
  670. case TSK_ImplicitInstantiation:
  671. return DiscardableODRLinkage;
  672. case TSK_ExplicitInstantiationDeclaration:
  673. // Explicit instantiations in MSVC do not provide vtables, so we must emit
  674. // our own.
  675. if (getTarget().getCXXABI().isMicrosoft())
  676. return DiscardableODRLinkage;
  677. return shouldEmitAvailableExternallyVTable(*this, RD)
  678. ? llvm::GlobalVariable::AvailableExternallyLinkage
  679. : llvm::GlobalVariable::ExternalLinkage;
  680. case TSK_ExplicitInstantiationDefinition:
  681. return NonDiscardableODRLinkage;
  682. }
  683. llvm_unreachable("Invalid TemplateSpecializationKind!");
  684. }
  685. /// This is a callback from Sema to tell us that that a particular vtable is
  686. /// required to be emitted in this translation unit.
  687. ///
  688. /// This is only called for vtables that _must_ be emitted (mainly due to key
  689. /// functions). For weak vtables, CodeGen tracks when they are needed and
  690. /// emits them as-needed.
  691. void CodeGenModule::EmitVTable(CXXRecordDecl *theClass) {
  692. VTables.GenerateClassData(theClass);
  693. }
  694. void
  695. CodeGenVTables::GenerateClassData(const CXXRecordDecl *RD) {
  696. if (CGDebugInfo *DI = CGM.getModuleDebugInfo())
  697. DI->completeClassData(RD);
  698. if (RD->getNumVBases())
  699. CGM.getCXXABI().emitVirtualInheritanceTables(RD);
  700. CGM.getCXXABI().emitVTableDefinitions(*this, RD);
  701. }
  702. /// At this point in the translation unit, does it appear that can we
  703. /// rely on the vtable being defined elsewhere in the program?
  704. ///
  705. /// The response is really only definitive when called at the end of
  706. /// the translation unit.
  707. ///
  708. /// The only semantic restriction here is that the object file should
  709. /// not contain a vtable definition when that vtable is defined
  710. /// strongly elsewhere. Otherwise, we'd just like to avoid emitting
  711. /// vtables when unnecessary.
  712. bool CodeGenVTables::isVTableExternal(const CXXRecordDecl *RD) {
  713. assert(RD->isDynamicClass() && "Non-dynamic classes have no VTable.");
  714. // We always synthesize vtables if they are needed in the MS ABI. MSVC doesn't
  715. // emit them even if there is an explicit template instantiation.
  716. if (CGM.getTarget().getCXXABI().isMicrosoft())
  717. return false;
  718. // If we have an explicit instantiation declaration (and not a
  719. // definition), the vtable is defined elsewhere.
  720. TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
  721. if (TSK == TSK_ExplicitInstantiationDeclaration)
  722. return true;
  723. // Otherwise, if the class is an instantiated template, the
  724. // vtable must be defined here.
  725. if (TSK == TSK_ImplicitInstantiation ||
  726. TSK == TSK_ExplicitInstantiationDefinition)
  727. return false;
  728. // Otherwise, if the class doesn't have a key function (possibly
  729. // anymore), the vtable must be defined here.
  730. const CXXMethodDecl *keyFunction = CGM.getContext().getCurrentKeyFunction(RD);
  731. if (!keyFunction)
  732. return false;
  733. // Otherwise, if we don't have a definition of the key function, the
  734. // vtable must be defined somewhere else.
  735. return !keyFunction->hasBody();
  736. }
  737. /// Given that we're currently at the end of the translation unit, and
  738. /// we've emitted a reference to the vtable for this class, should
  739. /// we define that vtable?
  740. static bool shouldEmitVTableAtEndOfTranslationUnit(CodeGenModule &CGM,
  741. const CXXRecordDecl *RD) {
  742. // If vtable is internal then it has to be done.
  743. if (!CGM.getVTables().isVTableExternal(RD))
  744. return true;
  745. // If it's external then maybe we will need it as available_externally.
  746. return shouldEmitAvailableExternallyVTable(CGM, RD);
  747. }
  748. /// Given that at some point we emitted a reference to one or more
  749. /// vtables, and that we are now at the end of the translation unit,
  750. /// decide whether we should emit them.
  751. void CodeGenModule::EmitDeferredVTables() {
  752. #ifndef NDEBUG
  753. // Remember the size of DeferredVTables, because we're going to assume
  754. // that this entire operation doesn't modify it.
  755. size_t savedSize = DeferredVTables.size();
  756. #endif
  757. for (const CXXRecordDecl *RD : DeferredVTables)
  758. if (shouldEmitVTableAtEndOfTranslationUnit(*this, RD))
  759. VTables.GenerateClassData(RD);
  760. assert(savedSize == DeferredVTables.size() &&
  761. "deferred extra vtables during vtable emission?");
  762. DeferredVTables.clear();
  763. }
  764. bool CodeGenModule::HasHiddenLTOVisibility(const CXXRecordDecl *RD) {
  765. LinkageInfo LV = RD->getLinkageAndVisibility();
  766. if (!isExternallyVisible(LV.getLinkage()))
  767. return true;
  768. if (RD->hasAttr<LTOVisibilityPublicAttr>() || RD->hasAttr<UuidAttr>())
  769. return false;
  770. if (getTriple().isOSBinFormatCOFF()) {
  771. if (RD->hasAttr<DLLExportAttr>() || RD->hasAttr<DLLImportAttr>())
  772. return false;
  773. } else {
  774. if (LV.getVisibility() != HiddenVisibility)
  775. return false;
  776. }
  777. if (getCodeGenOpts().LTOVisibilityPublicStd) {
  778. const DeclContext *DC = RD;
  779. while (1) {
  780. auto *D = cast<Decl>(DC);
  781. DC = DC->getParent();
  782. if (isa<TranslationUnitDecl>(DC->getRedeclContext())) {
  783. if (auto *ND = dyn_cast<NamespaceDecl>(D))
  784. if (const IdentifierInfo *II = ND->getIdentifier())
  785. if (II->isStr("std") || II->isStr("stdext"))
  786. return false;
  787. break;
  788. }
  789. }
  790. }
  791. return true;
  792. }
  793. void CodeGenModule::EmitVTableTypeMetadata(llvm::GlobalVariable *VTable,
  794. const VTableLayout &VTLayout) {
  795. if (!getCodeGenOpts().LTOUnit)
  796. return;
  797. CharUnits PointerWidth =
  798. Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
  799. typedef std::pair<const CXXRecordDecl *, unsigned> BSEntry;
  800. std::vector<BSEntry> BitsetEntries;
  801. // Create a bit set entry for each address point.
  802. for (auto &&AP : VTLayout.getAddressPoints())
  803. BitsetEntries.push_back(
  804. std::make_pair(AP.first.getBase(),
  805. VTLayout.getVTableOffset(AP.second.VTableIndex) +
  806. AP.second.AddressPointIndex));
  807. // Sort the bit set entries for determinism.
  808. std::sort(BitsetEntries.begin(), BitsetEntries.end(),
  809. [this](const BSEntry &E1, const BSEntry &E2) {
  810. if (&E1 == &E2)
  811. return false;
  812. std::string S1;
  813. llvm::raw_string_ostream O1(S1);
  814. getCXXABI().getMangleContext().mangleTypeName(
  815. QualType(E1.first->getTypeForDecl(), 0), O1);
  816. O1.flush();
  817. std::string S2;
  818. llvm::raw_string_ostream O2(S2);
  819. getCXXABI().getMangleContext().mangleTypeName(
  820. QualType(E2.first->getTypeForDecl(), 0), O2);
  821. O2.flush();
  822. if (S1 < S2)
  823. return true;
  824. if (S1 != S2)
  825. return false;
  826. return E1.second < E2.second;
  827. });
  828. for (auto BitsetEntry : BitsetEntries)
  829. AddVTableTypeMetadata(VTable, PointerWidth * BitsetEntry.second,
  830. BitsetEntry.first);
  831. }