CGDeclCXX.cpp 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775
  1. //===--- CGDeclCXX.cpp - Emit LLVM Code for C++ declarations --------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This contains code dealing with code generation of C++ declarations
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "CodeGenFunction.h"
  13. #include "CGCXXABI.h"
  14. #include "CGObjCRuntime.h"
  15. #include "CGOpenMPRuntime.h"
  16. #include "TargetInfo.h"
  17. #include "clang/Basic/CodeGenOptions.h"
  18. #include "llvm/ADT/StringExtras.h"
  19. #include "llvm/IR/Intrinsics.h"
  20. #include "llvm/IR/MDBuilder.h"
  21. #include "llvm/Support/Path.h"
  22. using namespace clang;
  23. using namespace CodeGen;
  24. static void EmitDeclInit(CodeGenFunction &CGF, const VarDecl &D,
  25. ConstantAddress DeclPtr) {
  26. assert(
  27. (D.hasGlobalStorage() ||
  28. (D.hasLocalStorage() && CGF.getContext().getLangOpts().OpenCLCPlusPlus)) &&
  29. "VarDecl must have global or local (in the case of OpenCL) storage!");
  30. assert(!D.getType()->isReferenceType() &&
  31. "Should not call EmitDeclInit on a reference!");
  32. QualType type = D.getType();
  33. LValue lv = CGF.MakeAddrLValue(DeclPtr, type);
  34. const Expr *Init = D.getInit();
  35. switch (CGF.getEvaluationKind(type)) {
  36. case TEK_Scalar: {
  37. CodeGenModule &CGM = CGF.CGM;
  38. if (lv.isObjCStrong())
  39. CGM.getObjCRuntime().EmitObjCGlobalAssign(CGF, CGF.EmitScalarExpr(Init),
  40. DeclPtr, D.getTLSKind());
  41. else if (lv.isObjCWeak())
  42. CGM.getObjCRuntime().EmitObjCWeakAssign(CGF, CGF.EmitScalarExpr(Init),
  43. DeclPtr);
  44. else
  45. CGF.EmitScalarInit(Init, &D, lv, false);
  46. return;
  47. }
  48. case TEK_Complex:
  49. CGF.EmitComplexExprIntoLValue(Init, lv, /*isInit*/ true);
  50. return;
  51. case TEK_Aggregate:
  52. CGF.EmitAggExpr(Init, AggValueSlot::forLValue(lv,AggValueSlot::IsDestructed,
  53. AggValueSlot::DoesNotNeedGCBarriers,
  54. AggValueSlot::IsNotAliased,
  55. AggValueSlot::DoesNotOverlap));
  56. return;
  57. }
  58. llvm_unreachable("bad evaluation kind");
  59. }
  60. /// Emit code to cause the destruction of the given variable with
  61. /// static storage duration.
  62. static void EmitDeclDestroy(CodeGenFunction &CGF, const VarDecl &D,
  63. ConstantAddress Addr) {
  64. // Honor __attribute__((no_destroy)) and bail instead of attempting
  65. // to emit a reference to a possibly nonexistent destructor, which
  66. // in turn can cause a crash. This will result in a global constructor
  67. // that isn't balanced out by a destructor call as intended by the
  68. // attribute. This also checks for -fno-c++-static-destructors and
  69. // bails even if the attribute is not present.
  70. QualType::DestructionKind DtorKind = D.needsDestruction(CGF.getContext());
  71. // FIXME: __attribute__((cleanup)) ?
  72. switch (DtorKind) {
  73. case QualType::DK_none:
  74. return;
  75. case QualType::DK_cxx_destructor:
  76. break;
  77. case QualType::DK_objc_strong_lifetime:
  78. case QualType::DK_objc_weak_lifetime:
  79. case QualType::DK_nontrivial_c_struct:
  80. // We don't care about releasing objects during process teardown.
  81. assert(!D.getTLSKind() && "should have rejected this");
  82. return;
  83. }
  84. llvm::FunctionCallee Func;
  85. llvm::Constant *Argument;
  86. CodeGenModule &CGM = CGF.CGM;
  87. QualType Type = D.getType();
  88. // Special-case non-array C++ destructors, if they have the right signature.
  89. // Under some ABIs, destructors return this instead of void, and cannot be
  90. // passed directly to __cxa_atexit if the target does not allow this
  91. // mismatch.
  92. const CXXRecordDecl *Record = Type->getAsCXXRecordDecl();
  93. bool CanRegisterDestructor =
  94. Record && (!CGM.getCXXABI().HasThisReturn(
  95. GlobalDecl(Record->getDestructor(), Dtor_Complete)) ||
  96. CGM.getCXXABI().canCallMismatchedFunctionType());
  97. // If __cxa_atexit is disabled via a flag, a different helper function is
  98. // generated elsewhere which uses atexit instead, and it takes the destructor
  99. // directly.
  100. bool UsingExternalHelper = !CGM.getCodeGenOpts().CXAAtExit;
  101. if (Record && (CanRegisterDestructor || UsingExternalHelper)) {
  102. assert(!Record->hasTrivialDestructor());
  103. CXXDestructorDecl *Dtor = Record->getDestructor();
  104. Func = CGM.getAddrAndTypeOfCXXStructor(GlobalDecl(Dtor, Dtor_Complete));
  105. if (CGF.getContext().getLangOpts().OpenCL) {
  106. auto DestAS =
  107. CGM.getTargetCodeGenInfo().getAddrSpaceOfCxaAtexitPtrParam();
  108. auto DestTy = CGF.getTypes().ConvertType(Type)->getPointerTo(
  109. CGM.getContext().getTargetAddressSpace(DestAS));
  110. auto SrcAS = D.getType().getQualifiers().getAddressSpace();
  111. if (DestAS == SrcAS)
  112. Argument = llvm::ConstantExpr::getBitCast(Addr.getPointer(), DestTy);
  113. else
  114. // FIXME: On addr space mismatch we are passing NULL. The generation
  115. // of the global destructor function should be adjusted accordingly.
  116. Argument = llvm::ConstantPointerNull::get(DestTy);
  117. } else {
  118. Argument = llvm::ConstantExpr::getBitCast(
  119. Addr.getPointer(), CGF.getTypes().ConvertType(Type)->getPointerTo());
  120. }
  121. // Otherwise, the standard logic requires a helper function.
  122. } else {
  123. Func = CodeGenFunction(CGM)
  124. .generateDestroyHelper(Addr, Type, CGF.getDestroyer(DtorKind),
  125. CGF.needsEHCleanup(DtorKind), &D);
  126. Argument = llvm::Constant::getNullValue(CGF.Int8PtrTy);
  127. }
  128. CGM.getCXXABI().registerGlobalDtor(CGF, D, Func, Argument);
  129. }
  130. /// Emit code to cause the variable at the given address to be considered as
  131. /// constant from this point onwards.
  132. static void EmitDeclInvariant(CodeGenFunction &CGF, const VarDecl &D,
  133. llvm::Constant *Addr) {
  134. return CGF.EmitInvariantStart(
  135. Addr, CGF.getContext().getTypeSizeInChars(D.getType()));
  136. }
  137. void CodeGenFunction::EmitInvariantStart(llvm::Constant *Addr, CharUnits Size) {
  138. // Do not emit the intrinsic if we're not optimizing.
  139. if (!CGM.getCodeGenOpts().OptimizationLevel)
  140. return;
  141. // Grab the llvm.invariant.start intrinsic.
  142. llvm::Intrinsic::ID InvStartID = llvm::Intrinsic::invariant_start;
  143. // Overloaded address space type.
  144. llvm::Type *ObjectPtr[1] = {Int8PtrTy};
  145. llvm::Function *InvariantStart = CGM.getIntrinsic(InvStartID, ObjectPtr);
  146. // Emit a call with the size in bytes of the object.
  147. uint64_t Width = Size.getQuantity();
  148. llvm::Value *Args[2] = { llvm::ConstantInt::getSigned(Int64Ty, Width),
  149. llvm::ConstantExpr::getBitCast(Addr, Int8PtrTy)};
  150. Builder.CreateCall(InvariantStart, Args);
  151. }
  152. void CodeGenFunction::EmitCXXGlobalVarDeclInit(const VarDecl &D,
  153. llvm::Constant *DeclPtr,
  154. bool PerformInit) {
  155. const Expr *Init = D.getInit();
  156. QualType T = D.getType();
  157. // The address space of a static local variable (DeclPtr) may be different
  158. // from the address space of the "this" argument of the constructor. In that
  159. // case, we need an addrspacecast before calling the constructor.
  160. //
  161. // struct StructWithCtor {
  162. // __device__ StructWithCtor() {...}
  163. // };
  164. // __device__ void foo() {
  165. // __shared__ StructWithCtor s;
  166. // ...
  167. // }
  168. //
  169. // For example, in the above CUDA code, the static local variable s has a
  170. // "shared" address space qualifier, but the constructor of StructWithCtor
  171. // expects "this" in the "generic" address space.
  172. unsigned ExpectedAddrSpace = getContext().getTargetAddressSpace(T);
  173. unsigned ActualAddrSpace = DeclPtr->getType()->getPointerAddressSpace();
  174. if (ActualAddrSpace != ExpectedAddrSpace) {
  175. llvm::Type *LTy = CGM.getTypes().ConvertTypeForMem(T);
  176. llvm::PointerType *PTy = llvm::PointerType::get(LTy, ExpectedAddrSpace);
  177. DeclPtr = llvm::ConstantExpr::getAddrSpaceCast(DeclPtr, PTy);
  178. }
  179. ConstantAddress DeclAddr(DeclPtr, getContext().getDeclAlign(&D));
  180. if (!T->isReferenceType()) {
  181. if (getLangOpts().OpenMP && !getLangOpts().OpenMPSimd &&
  182. D.hasAttr<OMPThreadPrivateDeclAttr>()) {
  183. (void)CGM.getOpenMPRuntime().emitThreadPrivateVarDefinition(
  184. &D, DeclAddr, D.getAttr<OMPThreadPrivateDeclAttr>()->getLocation(),
  185. PerformInit, this);
  186. }
  187. if (PerformInit)
  188. EmitDeclInit(*this, D, DeclAddr);
  189. if (CGM.isTypeConstant(D.getType(), true))
  190. EmitDeclInvariant(*this, D, DeclPtr);
  191. else
  192. EmitDeclDestroy(*this, D, DeclAddr);
  193. return;
  194. }
  195. assert(PerformInit && "cannot have constant initializer which needs "
  196. "destruction for reference");
  197. RValue RV = EmitReferenceBindingToExpr(Init);
  198. EmitStoreOfScalar(RV.getScalarVal(), DeclAddr, false, T);
  199. }
  200. /// Create a stub function, suitable for being passed to atexit,
  201. /// which passes the given address to the given destructor function.
  202. llvm::Function *CodeGenFunction::createAtExitStub(const VarDecl &VD,
  203. llvm::FunctionCallee dtor,
  204. llvm::Constant *addr) {
  205. // Get the destructor function type, void(*)(void).
  206. llvm::FunctionType *ty = llvm::FunctionType::get(CGM.VoidTy, false);
  207. SmallString<256> FnName;
  208. {
  209. llvm::raw_svector_ostream Out(FnName);
  210. CGM.getCXXABI().getMangleContext().mangleDynamicAtExitDestructor(&VD, Out);
  211. }
  212. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction();
  213. llvm::Function *fn = CGM.CreateGlobalInitOrDestructFunction(
  214. ty, FnName.str(), FI, VD.getLocation());
  215. CodeGenFunction CGF(CGM);
  216. CGF.StartFunction(GlobalDecl(&VD, DynamicInitKind::AtExit),
  217. CGM.getContext().VoidTy, fn, FI, FunctionArgList());
  218. llvm::CallInst *call = CGF.Builder.CreateCall(dtor, addr);
  219. // Make sure the call and the callee agree on calling convention.
  220. if (auto *dtorFn = dyn_cast<llvm::Function>(
  221. dtor.getCallee()->stripPointerCastsAndAliases()))
  222. call->setCallingConv(dtorFn->getCallingConv());
  223. CGF.FinishFunction();
  224. return fn;
  225. }
  226. /// Register a global destructor using the C atexit runtime function.
  227. void CodeGenFunction::registerGlobalDtorWithAtExit(const VarDecl &VD,
  228. llvm::FunctionCallee dtor,
  229. llvm::Constant *addr) {
  230. // Create a function which calls the destructor.
  231. llvm::Constant *dtorStub = createAtExitStub(VD, dtor, addr);
  232. registerGlobalDtorWithAtExit(dtorStub);
  233. }
  234. void CodeGenFunction::registerGlobalDtorWithAtExit(llvm::Constant *dtorStub) {
  235. // extern "C" int atexit(void (*f)(void));
  236. llvm::FunctionType *atexitTy =
  237. llvm::FunctionType::get(IntTy, dtorStub->getType(), false);
  238. llvm::FunctionCallee atexit =
  239. CGM.CreateRuntimeFunction(atexitTy, "atexit", llvm::AttributeList(),
  240. /*Local=*/true);
  241. if (llvm::Function *atexitFn = dyn_cast<llvm::Function>(atexit.getCallee()))
  242. atexitFn->setDoesNotThrow();
  243. EmitNounwindRuntimeCall(atexit, dtorStub);
  244. }
  245. void CodeGenFunction::EmitCXXGuardedInit(const VarDecl &D,
  246. llvm::GlobalVariable *DeclPtr,
  247. bool PerformInit) {
  248. // If we've been asked to forbid guard variables, emit an error now.
  249. // This diagnostic is hard-coded for Darwin's use case; we can find
  250. // better phrasing if someone else needs it.
  251. if (CGM.getCodeGenOpts().ForbidGuardVariables)
  252. CGM.Error(D.getLocation(),
  253. "this initialization requires a guard variable, which "
  254. "the kernel does not support");
  255. CGM.getCXXABI().EmitGuardedInit(*this, D, DeclPtr, PerformInit);
  256. }
  257. void CodeGenFunction::EmitCXXGuardedInitBranch(llvm::Value *NeedsInit,
  258. llvm::BasicBlock *InitBlock,
  259. llvm::BasicBlock *NoInitBlock,
  260. GuardKind Kind,
  261. const VarDecl *D) {
  262. assert((Kind == GuardKind::TlsGuard || D) && "no guarded variable");
  263. // A guess at how many times we will enter the initialization of a
  264. // variable, depending on the kind of variable.
  265. static const uint64_t InitsPerTLSVar = 1024;
  266. static const uint64_t InitsPerLocalVar = 1024 * 1024;
  267. llvm::MDNode *Weights;
  268. if (Kind == GuardKind::VariableGuard && !D->isLocalVarDecl()) {
  269. // For non-local variables, don't apply any weighting for now. Due to our
  270. // use of COMDATs, we expect there to be at most one initialization of the
  271. // variable per DSO, but we have no way to know how many DSOs will try to
  272. // initialize the variable.
  273. Weights = nullptr;
  274. } else {
  275. uint64_t NumInits;
  276. // FIXME: For the TLS case, collect and use profiling information to
  277. // determine a more accurate brach weight.
  278. if (Kind == GuardKind::TlsGuard || D->getTLSKind())
  279. NumInits = InitsPerTLSVar;
  280. else
  281. NumInits = InitsPerLocalVar;
  282. // The probability of us entering the initializer is
  283. // 1 / (total number of times we attempt to initialize the variable).
  284. llvm::MDBuilder MDHelper(CGM.getLLVMContext());
  285. Weights = MDHelper.createBranchWeights(1, NumInits - 1);
  286. }
  287. Builder.CreateCondBr(NeedsInit, InitBlock, NoInitBlock, Weights);
  288. }
  289. llvm::Function *CodeGenModule::CreateGlobalInitOrDestructFunction(
  290. llvm::FunctionType *FTy, const Twine &Name, const CGFunctionInfo &FI,
  291. SourceLocation Loc, bool TLS) {
  292. llvm::Function *Fn =
  293. llvm::Function::Create(FTy, llvm::GlobalValue::InternalLinkage,
  294. Name, &getModule());
  295. if (!getLangOpts().AppleKext && !TLS) {
  296. // Set the section if needed.
  297. if (const char *Section = getTarget().getStaticInitSectionSpecifier())
  298. Fn->setSection(Section);
  299. }
  300. SetInternalFunctionAttributes(GlobalDecl(), Fn, FI);
  301. Fn->setCallingConv(getRuntimeCC());
  302. if (!getLangOpts().Exceptions)
  303. Fn->setDoesNotThrow();
  304. if (getLangOpts().Sanitize.has(SanitizerKind::Address) &&
  305. !isInSanitizerBlacklist(SanitizerKind::Address, Fn, Loc))
  306. Fn->addFnAttr(llvm::Attribute::SanitizeAddress);
  307. if (getLangOpts().Sanitize.has(SanitizerKind::KernelAddress) &&
  308. !isInSanitizerBlacklist(SanitizerKind::KernelAddress, Fn, Loc))
  309. Fn->addFnAttr(llvm::Attribute::SanitizeAddress);
  310. if (getLangOpts().Sanitize.has(SanitizerKind::HWAddress) &&
  311. !isInSanitizerBlacklist(SanitizerKind::HWAddress, Fn, Loc))
  312. Fn->addFnAttr(llvm::Attribute::SanitizeHWAddress);
  313. if (getLangOpts().Sanitize.has(SanitizerKind::KernelHWAddress) &&
  314. !isInSanitizerBlacklist(SanitizerKind::KernelHWAddress, Fn, Loc))
  315. Fn->addFnAttr(llvm::Attribute::SanitizeHWAddress);
  316. if (getLangOpts().Sanitize.has(SanitizerKind::MemTag) &&
  317. !isInSanitizerBlacklist(SanitizerKind::MemTag, Fn, Loc))
  318. Fn->addFnAttr(llvm::Attribute::SanitizeMemTag);
  319. if (getLangOpts().Sanitize.has(SanitizerKind::Thread) &&
  320. !isInSanitizerBlacklist(SanitizerKind::Thread, Fn, Loc))
  321. Fn->addFnAttr(llvm::Attribute::SanitizeThread);
  322. if (getLangOpts().Sanitize.has(SanitizerKind::Memory) &&
  323. !isInSanitizerBlacklist(SanitizerKind::Memory, Fn, Loc))
  324. Fn->addFnAttr(llvm::Attribute::SanitizeMemory);
  325. if (getLangOpts().Sanitize.has(SanitizerKind::KernelMemory) &&
  326. !isInSanitizerBlacklist(SanitizerKind::KernelMemory, Fn, Loc))
  327. Fn->addFnAttr(llvm::Attribute::SanitizeMemory);
  328. if (getLangOpts().Sanitize.has(SanitizerKind::SafeStack) &&
  329. !isInSanitizerBlacklist(SanitizerKind::SafeStack, Fn, Loc))
  330. Fn->addFnAttr(llvm::Attribute::SafeStack);
  331. if (getLangOpts().Sanitize.has(SanitizerKind::ShadowCallStack) &&
  332. !isInSanitizerBlacklist(SanitizerKind::ShadowCallStack, Fn, Loc))
  333. Fn->addFnAttr(llvm::Attribute::ShadowCallStack);
  334. auto RASignKind = getCodeGenOpts().getSignReturnAddress();
  335. if (RASignKind != CodeGenOptions::SignReturnAddressScope::None) {
  336. Fn->addFnAttr("sign-return-address",
  337. RASignKind == CodeGenOptions::SignReturnAddressScope::All
  338. ? "all"
  339. : "non-leaf");
  340. auto RASignKey = getCodeGenOpts().getSignReturnAddressKey();
  341. Fn->addFnAttr("sign-return-address-key",
  342. RASignKey == CodeGenOptions::SignReturnAddressKeyValue::AKey
  343. ? "a_key"
  344. : "b_key");
  345. }
  346. if (getCodeGenOpts().BranchTargetEnforcement)
  347. Fn->addFnAttr("branch-target-enforcement");
  348. return Fn;
  349. }
  350. /// Create a global pointer to a function that will initialize a global
  351. /// variable. The user has requested that this pointer be emitted in a specific
  352. /// section.
  353. void CodeGenModule::EmitPointerToInitFunc(const VarDecl *D,
  354. llvm::GlobalVariable *GV,
  355. llvm::Function *InitFunc,
  356. InitSegAttr *ISA) {
  357. llvm::GlobalVariable *PtrArray = new llvm::GlobalVariable(
  358. TheModule, InitFunc->getType(), /*isConstant=*/true,
  359. llvm::GlobalValue::PrivateLinkage, InitFunc, "__cxx_init_fn_ptr");
  360. PtrArray->setSection(ISA->getSection());
  361. addUsedGlobal(PtrArray);
  362. // If the GV is already in a comdat group, then we have to join it.
  363. if (llvm::Comdat *C = GV->getComdat())
  364. PtrArray->setComdat(C);
  365. }
  366. void
  367. CodeGenModule::EmitCXXGlobalVarDeclInitFunc(const VarDecl *D,
  368. llvm::GlobalVariable *Addr,
  369. bool PerformInit) {
  370. // According to E.2.3.1 in CUDA-7.5 Programming guide: __device__,
  371. // __constant__ and __shared__ variables defined in namespace scope,
  372. // that are of class type, cannot have a non-empty constructor. All
  373. // the checks have been done in Sema by now. Whatever initializers
  374. // are allowed are empty and we just need to ignore them here.
  375. if (getLangOpts().CUDA && getLangOpts().CUDAIsDevice &&
  376. (D->hasAttr<CUDADeviceAttr>() || D->hasAttr<CUDAConstantAttr>() ||
  377. D->hasAttr<CUDASharedAttr>()))
  378. return;
  379. if (getLangOpts().OpenMP &&
  380. getOpenMPRuntime().emitDeclareTargetVarDefinition(D, Addr, PerformInit))
  381. return;
  382. // Check if we've already initialized this decl.
  383. auto I = DelayedCXXInitPosition.find(D);
  384. if (I != DelayedCXXInitPosition.end() && I->second == ~0U)
  385. return;
  386. llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, false);
  387. SmallString<256> FnName;
  388. {
  389. llvm::raw_svector_ostream Out(FnName);
  390. getCXXABI().getMangleContext().mangleDynamicInitializer(D, Out);
  391. }
  392. // Create a variable initialization function.
  393. llvm::Function *Fn =
  394. CreateGlobalInitOrDestructFunction(FTy, FnName.str(),
  395. getTypes().arrangeNullaryFunction(),
  396. D->getLocation());
  397. auto *ISA = D->getAttr<InitSegAttr>();
  398. CodeGenFunction(*this).GenerateCXXGlobalVarDeclInitFunc(Fn, D, Addr,
  399. PerformInit);
  400. llvm::GlobalVariable *COMDATKey =
  401. supportsCOMDAT() && D->isExternallyVisible() ? Addr : nullptr;
  402. if (D->getTLSKind()) {
  403. // FIXME: Should we support init_priority for thread_local?
  404. // FIXME: We only need to register one __cxa_thread_atexit function for the
  405. // entire TU.
  406. CXXThreadLocalInits.push_back(Fn);
  407. CXXThreadLocalInitVars.push_back(D);
  408. } else if (PerformInit && ISA) {
  409. EmitPointerToInitFunc(D, Addr, Fn, ISA);
  410. } else if (auto *IPA = D->getAttr<InitPriorityAttr>()) {
  411. OrderGlobalInits Key(IPA->getPriority(), PrioritizedCXXGlobalInits.size());
  412. PrioritizedCXXGlobalInits.push_back(std::make_pair(Key, Fn));
  413. } else if (isTemplateInstantiation(D->getTemplateSpecializationKind()) ||
  414. getContext().GetGVALinkageForVariable(D) == GVA_DiscardableODR) {
  415. // C++ [basic.start.init]p2:
  416. // Definitions of explicitly specialized class template static data
  417. // members have ordered initialization. Other class template static data
  418. // members (i.e., implicitly or explicitly instantiated specializations)
  419. // have unordered initialization.
  420. //
  421. // As a consequence, we can put them into their own llvm.global_ctors entry.
  422. //
  423. // If the global is externally visible, put the initializer into a COMDAT
  424. // group with the global being initialized. On most platforms, this is a
  425. // minor startup time optimization. In the MS C++ ABI, there are no guard
  426. // variables, so this COMDAT key is required for correctness.
  427. AddGlobalCtor(Fn, 65535, COMDATKey);
  428. if (getTarget().getCXXABI().isMicrosoft() && COMDATKey) {
  429. // In The MS C++, MS add template static data member in the linker
  430. // drective.
  431. addUsedGlobal(COMDATKey);
  432. }
  433. } else if (D->hasAttr<SelectAnyAttr>()) {
  434. // SelectAny globals will be comdat-folded. Put the initializer into a
  435. // COMDAT group associated with the global, so the initializers get folded
  436. // too.
  437. AddGlobalCtor(Fn, 65535, COMDATKey);
  438. } else {
  439. I = DelayedCXXInitPosition.find(D); // Re-do lookup in case of re-hash.
  440. if (I == DelayedCXXInitPosition.end()) {
  441. CXXGlobalInits.push_back(Fn);
  442. } else if (I->second != ~0U) {
  443. assert(I->second < CXXGlobalInits.size() &&
  444. CXXGlobalInits[I->second] == nullptr);
  445. CXXGlobalInits[I->second] = Fn;
  446. }
  447. }
  448. // Remember that we already emitted the initializer for this global.
  449. DelayedCXXInitPosition[D] = ~0U;
  450. }
  451. void CodeGenModule::EmitCXXThreadLocalInitFunc() {
  452. getCXXABI().EmitThreadLocalInitFuncs(
  453. *this, CXXThreadLocals, CXXThreadLocalInits, CXXThreadLocalInitVars);
  454. CXXThreadLocalInits.clear();
  455. CXXThreadLocalInitVars.clear();
  456. CXXThreadLocals.clear();
  457. }
  458. void
  459. CodeGenModule::EmitCXXGlobalInitFunc() {
  460. while (!CXXGlobalInits.empty() && !CXXGlobalInits.back())
  461. CXXGlobalInits.pop_back();
  462. if (CXXGlobalInits.empty() && PrioritizedCXXGlobalInits.empty())
  463. return;
  464. llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, false);
  465. const CGFunctionInfo &FI = getTypes().arrangeNullaryFunction();
  466. // Create our global initialization function.
  467. if (!PrioritizedCXXGlobalInits.empty()) {
  468. SmallVector<llvm::Function *, 8> LocalCXXGlobalInits;
  469. llvm::array_pod_sort(PrioritizedCXXGlobalInits.begin(),
  470. PrioritizedCXXGlobalInits.end());
  471. // Iterate over "chunks" of ctors with same priority and emit each chunk
  472. // into separate function. Note - everything is sorted first by priority,
  473. // second - by lex order, so we emit ctor functions in proper order.
  474. for (SmallVectorImpl<GlobalInitData >::iterator
  475. I = PrioritizedCXXGlobalInits.begin(),
  476. E = PrioritizedCXXGlobalInits.end(); I != E; ) {
  477. SmallVectorImpl<GlobalInitData >::iterator
  478. PrioE = std::upper_bound(I + 1, E, *I, GlobalInitPriorityCmp());
  479. LocalCXXGlobalInits.clear();
  480. unsigned Priority = I->first.priority;
  481. // Compute the function suffix from priority. Prepend with zeroes to make
  482. // sure the function names are also ordered as priorities.
  483. std::string PrioritySuffix = llvm::utostr(Priority);
  484. // Priority is always <= 65535 (enforced by sema).
  485. PrioritySuffix = std::string(6-PrioritySuffix.size(), '0')+PrioritySuffix;
  486. llvm::Function *Fn = CreateGlobalInitOrDestructFunction(
  487. FTy, "_GLOBAL__I_" + PrioritySuffix, FI);
  488. for (; I < PrioE; ++I)
  489. LocalCXXGlobalInits.push_back(I->second);
  490. CodeGenFunction(*this).GenerateCXXGlobalInitFunc(Fn, LocalCXXGlobalInits);
  491. AddGlobalCtor(Fn, Priority);
  492. }
  493. PrioritizedCXXGlobalInits.clear();
  494. }
  495. // Include the filename in the symbol name. Including "sub_" matches gcc and
  496. // makes sure these symbols appear lexicographically behind the symbols with
  497. // priority emitted above.
  498. SmallString<128> FileName = llvm::sys::path::filename(getModule().getName());
  499. if (FileName.empty())
  500. FileName = "<null>";
  501. for (size_t i = 0; i < FileName.size(); ++i) {
  502. // Replace everything that's not [a-zA-Z0-9._] with a _. This set happens
  503. // to be the set of C preprocessing numbers.
  504. if (!isPreprocessingNumberBody(FileName[i]))
  505. FileName[i] = '_';
  506. }
  507. llvm::Function *Fn = CreateGlobalInitOrDestructFunction(
  508. FTy, llvm::Twine("_GLOBAL__sub_I_", FileName), FI);
  509. CodeGenFunction(*this).GenerateCXXGlobalInitFunc(Fn, CXXGlobalInits);
  510. AddGlobalCtor(Fn);
  511. // In OpenCL global init functions must be converted to kernels in order to
  512. // be able to launch them from the host.
  513. // FIXME: Some more work might be needed to handle destructors correctly.
  514. // Current initialization function makes use of function pointers callbacks.
  515. // We can't support function pointers especially between host and device.
  516. // However it seems global destruction has little meaning without any
  517. // dynamic resource allocation on the device and program scope variables are
  518. // destroyed by the runtime when program is released.
  519. if (getLangOpts().OpenCL) {
  520. GenOpenCLArgMetadata(Fn);
  521. Fn->setCallingConv(llvm::CallingConv::SPIR_KERNEL);
  522. }
  523. CXXGlobalInits.clear();
  524. }
  525. void CodeGenModule::EmitCXXGlobalDtorFunc() {
  526. if (CXXGlobalDtors.empty())
  527. return;
  528. llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, false);
  529. // Create our global destructor function.
  530. const CGFunctionInfo &FI = getTypes().arrangeNullaryFunction();
  531. llvm::Function *Fn =
  532. CreateGlobalInitOrDestructFunction(FTy, "_GLOBAL__D_a", FI);
  533. CodeGenFunction(*this).GenerateCXXGlobalDtorsFunc(Fn, CXXGlobalDtors);
  534. AddGlobalDtor(Fn);
  535. }
  536. /// Emit the code necessary to initialize the given global variable.
  537. void CodeGenFunction::GenerateCXXGlobalVarDeclInitFunc(llvm::Function *Fn,
  538. const VarDecl *D,
  539. llvm::GlobalVariable *Addr,
  540. bool PerformInit) {
  541. // Check if we need to emit debug info for variable initializer.
  542. if (D->hasAttr<NoDebugAttr>())
  543. DebugInfo = nullptr; // disable debug info indefinitely for this function
  544. CurEHLocation = D->getBeginLoc();
  545. StartFunction(GlobalDecl(D, DynamicInitKind::Initializer),
  546. getContext().VoidTy, Fn, getTypes().arrangeNullaryFunction(),
  547. FunctionArgList(), D->getLocation(),
  548. D->getInit()->getExprLoc());
  549. // Use guarded initialization if the global variable is weak. This
  550. // occurs for, e.g., instantiated static data members and
  551. // definitions explicitly marked weak.
  552. //
  553. // Also use guarded initialization for a variable with dynamic TLS and
  554. // unordered initialization. (If the initialization is ordered, the ABI
  555. // layer will guard the whole-TU initialization for us.)
  556. if (Addr->hasWeakLinkage() || Addr->hasLinkOnceLinkage() ||
  557. (D->getTLSKind() == VarDecl::TLS_Dynamic &&
  558. isTemplateInstantiation(D->getTemplateSpecializationKind()))) {
  559. EmitCXXGuardedInit(*D, Addr, PerformInit);
  560. } else {
  561. EmitCXXGlobalVarDeclInit(*D, Addr, PerformInit);
  562. }
  563. FinishFunction();
  564. }
  565. void
  566. CodeGenFunction::GenerateCXXGlobalInitFunc(llvm::Function *Fn,
  567. ArrayRef<llvm::Function *> Decls,
  568. ConstantAddress Guard) {
  569. {
  570. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  571. StartFunction(GlobalDecl(), getContext().VoidTy, Fn,
  572. getTypes().arrangeNullaryFunction(), FunctionArgList());
  573. // Emit an artificial location for this function.
  574. auto AL = ApplyDebugLocation::CreateArtificial(*this);
  575. llvm::BasicBlock *ExitBlock = nullptr;
  576. if (Guard.isValid()) {
  577. // If we have a guard variable, check whether we've already performed
  578. // these initializations. This happens for TLS initialization functions.
  579. llvm::Value *GuardVal = Builder.CreateLoad(Guard);
  580. llvm::Value *Uninit = Builder.CreateIsNull(GuardVal,
  581. "guard.uninitialized");
  582. llvm::BasicBlock *InitBlock = createBasicBlock("init");
  583. ExitBlock = createBasicBlock("exit");
  584. EmitCXXGuardedInitBranch(Uninit, InitBlock, ExitBlock,
  585. GuardKind::TlsGuard, nullptr);
  586. EmitBlock(InitBlock);
  587. // Mark as initialized before initializing anything else. If the
  588. // initializers use previously-initialized thread_local vars, that's
  589. // probably supposed to be OK, but the standard doesn't say.
  590. Builder.CreateStore(llvm::ConstantInt::get(GuardVal->getType(),1), Guard);
  591. // The guard variable can't ever change again.
  592. EmitInvariantStart(
  593. Guard.getPointer(),
  594. CharUnits::fromQuantity(
  595. CGM.getDataLayout().getTypeAllocSize(GuardVal->getType())));
  596. }
  597. RunCleanupsScope Scope(*this);
  598. // When building in Objective-C++ ARC mode, create an autorelease pool
  599. // around the global initializers.
  600. if (getLangOpts().ObjCAutoRefCount && getLangOpts().CPlusPlus) {
  601. llvm::Value *token = EmitObjCAutoreleasePoolPush();
  602. EmitObjCAutoreleasePoolCleanup(token);
  603. }
  604. for (unsigned i = 0, e = Decls.size(); i != e; ++i)
  605. if (Decls[i])
  606. EmitRuntimeCall(Decls[i]);
  607. Scope.ForceCleanup();
  608. if (ExitBlock) {
  609. Builder.CreateBr(ExitBlock);
  610. EmitBlock(ExitBlock);
  611. }
  612. }
  613. FinishFunction();
  614. }
  615. void CodeGenFunction::GenerateCXXGlobalDtorsFunc(
  616. llvm::Function *Fn,
  617. const std::vector<std::tuple<llvm::FunctionType *, llvm::WeakTrackingVH,
  618. llvm::Constant *>> &DtorsAndObjects) {
  619. {
  620. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  621. StartFunction(GlobalDecl(), getContext().VoidTy, Fn,
  622. getTypes().arrangeNullaryFunction(), FunctionArgList());
  623. // Emit an artificial location for this function.
  624. auto AL = ApplyDebugLocation::CreateArtificial(*this);
  625. // Emit the dtors, in reverse order from construction.
  626. for (unsigned i = 0, e = DtorsAndObjects.size(); i != e; ++i) {
  627. llvm::FunctionType *CalleeTy;
  628. llvm::Value *Callee;
  629. llvm::Constant *Arg;
  630. std::tie(CalleeTy, Callee, Arg) = DtorsAndObjects[e - i - 1];
  631. llvm::CallInst *CI = Builder.CreateCall(CalleeTy, Callee, Arg);
  632. // Make sure the call and the callee agree on calling convention.
  633. if (llvm::Function *F = dyn_cast<llvm::Function>(Callee))
  634. CI->setCallingConv(F->getCallingConv());
  635. }
  636. }
  637. FinishFunction();
  638. }
  639. /// generateDestroyHelper - Generates a helper function which, when
  640. /// invoked, destroys the given object. The address of the object
  641. /// should be in global memory.
  642. llvm::Function *CodeGenFunction::generateDestroyHelper(
  643. Address addr, QualType type, Destroyer *destroyer,
  644. bool useEHCleanupForArray, const VarDecl *VD) {
  645. FunctionArgList args;
  646. ImplicitParamDecl Dst(getContext(), getContext().VoidPtrTy,
  647. ImplicitParamDecl::Other);
  648. args.push_back(&Dst);
  649. const CGFunctionInfo &FI =
  650. CGM.getTypes().arrangeBuiltinFunctionDeclaration(getContext().VoidTy, args);
  651. llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(FI);
  652. llvm::Function *fn = CGM.CreateGlobalInitOrDestructFunction(
  653. FTy, "__cxx_global_array_dtor", FI, VD->getLocation());
  654. CurEHLocation = VD->getBeginLoc();
  655. StartFunction(VD, getContext().VoidTy, fn, FI, args);
  656. emitDestroy(addr, type, destroyer, useEHCleanupForArray);
  657. FinishFunction();
  658. return fn;
  659. }