CGDeclCXX.cpp 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778
  1. //===--- CGDeclCXX.cpp - Emit LLVM Code for C++ declarations --------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This contains code dealing with code generation of C++ declarations
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "CodeGenFunction.h"
  13. #include "CGCXXABI.h"
  14. #include "CGObjCRuntime.h"
  15. #include "CGOpenMPRuntime.h"
  16. #include "TargetInfo.h"
  17. #include "clang/Basic/CodeGenOptions.h"
  18. #include "llvm/ADT/StringExtras.h"
  19. #include "llvm/IR/Intrinsics.h"
  20. #include "llvm/IR/MDBuilder.h"
  21. #include "llvm/Support/Path.h"
  22. using namespace clang;
  23. using namespace CodeGen;
  24. static void EmitDeclInit(CodeGenFunction &CGF, const VarDecl &D,
  25. ConstantAddress DeclPtr) {
  26. assert(
  27. (D.hasGlobalStorage() ||
  28. (D.hasLocalStorage() && CGF.getContext().getLangOpts().OpenCLCPlusPlus)) &&
  29. "VarDecl must have global or local (in the case of OpenCL) storage!");
  30. assert(!D.getType()->isReferenceType() &&
  31. "Should not call EmitDeclInit on a reference!");
  32. QualType type = D.getType();
  33. LValue lv = CGF.MakeAddrLValue(DeclPtr, type);
  34. const Expr *Init = D.getInit();
  35. switch (CGF.getEvaluationKind(type)) {
  36. case TEK_Scalar: {
  37. CodeGenModule &CGM = CGF.CGM;
  38. if (lv.isObjCStrong())
  39. CGM.getObjCRuntime().EmitObjCGlobalAssign(CGF, CGF.EmitScalarExpr(Init),
  40. DeclPtr, D.getTLSKind());
  41. else if (lv.isObjCWeak())
  42. CGM.getObjCRuntime().EmitObjCWeakAssign(CGF, CGF.EmitScalarExpr(Init),
  43. DeclPtr);
  44. else
  45. CGF.EmitScalarInit(Init, &D, lv, false);
  46. return;
  47. }
  48. case TEK_Complex:
  49. CGF.EmitComplexExprIntoLValue(Init, lv, /*isInit*/ true);
  50. return;
  51. case TEK_Aggregate:
  52. CGF.EmitAggExpr(Init, AggValueSlot::forLValue(lv,AggValueSlot::IsDestructed,
  53. AggValueSlot::DoesNotNeedGCBarriers,
  54. AggValueSlot::IsNotAliased,
  55. AggValueSlot::DoesNotOverlap));
  56. return;
  57. }
  58. llvm_unreachable("bad evaluation kind");
  59. }
  60. /// Emit code to cause the destruction of the given variable with
  61. /// static storage duration.
  62. static void EmitDeclDestroy(CodeGenFunction &CGF, const VarDecl &D,
  63. ConstantAddress Addr) {
  64. // Honor __attribute__((no_destroy)) and bail instead of attempting
  65. // to emit a reference to a possibly nonexistent destructor, which
  66. // in turn can cause a crash. This will result in a global constructor
  67. // that isn't balanced out by a destructor call as intended by the
  68. // attribute. This also checks for -fno-c++-static-destructors and
  69. // bails even if the attribute is not present.
  70. if (D.isNoDestroy(CGF.getContext()))
  71. return;
  72. CodeGenModule &CGM = CGF.CGM;
  73. // FIXME: __attribute__((cleanup)) ?
  74. QualType Type = D.getType();
  75. QualType::DestructionKind DtorKind = Type.isDestructedType();
  76. switch (DtorKind) {
  77. case QualType::DK_none:
  78. return;
  79. case QualType::DK_cxx_destructor:
  80. break;
  81. case QualType::DK_objc_strong_lifetime:
  82. case QualType::DK_objc_weak_lifetime:
  83. case QualType::DK_nontrivial_c_struct:
  84. // We don't care about releasing objects during process teardown.
  85. assert(!D.getTLSKind() && "should have rejected this");
  86. return;
  87. }
  88. llvm::FunctionCallee Func;
  89. llvm::Constant *Argument;
  90. // Special-case non-array C++ destructors, if they have the right signature.
  91. // Under some ABIs, destructors return this instead of void, and cannot be
  92. // passed directly to __cxa_atexit if the target does not allow this
  93. // mismatch.
  94. const CXXRecordDecl *Record = Type->getAsCXXRecordDecl();
  95. bool CanRegisterDestructor =
  96. Record && (!CGM.getCXXABI().HasThisReturn(
  97. GlobalDecl(Record->getDestructor(), Dtor_Complete)) ||
  98. CGM.getCXXABI().canCallMismatchedFunctionType());
  99. // If __cxa_atexit is disabled via a flag, a different helper function is
  100. // generated elsewhere which uses atexit instead, and it takes the destructor
  101. // directly.
  102. bool UsingExternalHelper = !CGM.getCodeGenOpts().CXAAtExit;
  103. if (Record && (CanRegisterDestructor || UsingExternalHelper)) {
  104. assert(!Record->hasTrivialDestructor());
  105. CXXDestructorDecl *Dtor = Record->getDestructor();
  106. Func = CGM.getAddrAndTypeOfCXXStructor(GlobalDecl(Dtor, Dtor_Complete));
  107. if (CGF.getContext().getLangOpts().OpenCL) {
  108. auto DestAS =
  109. CGM.getTargetCodeGenInfo().getAddrSpaceOfCxaAtexitPtrParam();
  110. auto DestTy = CGF.getTypes().ConvertType(Type)->getPointerTo(
  111. CGM.getContext().getTargetAddressSpace(DestAS));
  112. auto SrcAS = D.getType().getQualifiers().getAddressSpace();
  113. if (DestAS == SrcAS)
  114. Argument = llvm::ConstantExpr::getBitCast(Addr.getPointer(), DestTy);
  115. else
  116. // FIXME: On addr space mismatch we are passing NULL. The generation
  117. // of the global destructor function should be adjusted accordingly.
  118. Argument = llvm::ConstantPointerNull::get(DestTy);
  119. } else {
  120. Argument = llvm::ConstantExpr::getBitCast(
  121. Addr.getPointer(), CGF.getTypes().ConvertType(Type)->getPointerTo());
  122. }
  123. // Otherwise, the standard logic requires a helper function.
  124. } else {
  125. Func = CodeGenFunction(CGM)
  126. .generateDestroyHelper(Addr, Type, CGF.getDestroyer(DtorKind),
  127. CGF.needsEHCleanup(DtorKind), &D);
  128. Argument = llvm::Constant::getNullValue(CGF.Int8PtrTy);
  129. }
  130. CGM.getCXXABI().registerGlobalDtor(CGF, D, Func, Argument);
  131. }
  132. /// Emit code to cause the variable at the given address to be considered as
  133. /// constant from this point onwards.
  134. static void EmitDeclInvariant(CodeGenFunction &CGF, const VarDecl &D,
  135. llvm::Constant *Addr) {
  136. return CGF.EmitInvariantStart(
  137. Addr, CGF.getContext().getTypeSizeInChars(D.getType()));
  138. }
  139. void CodeGenFunction::EmitInvariantStart(llvm::Constant *Addr, CharUnits Size) {
  140. // Do not emit the intrinsic if we're not optimizing.
  141. if (!CGM.getCodeGenOpts().OptimizationLevel)
  142. return;
  143. // Grab the llvm.invariant.start intrinsic.
  144. llvm::Intrinsic::ID InvStartID = llvm::Intrinsic::invariant_start;
  145. // Overloaded address space type.
  146. llvm::Type *ObjectPtr[1] = {Int8PtrTy};
  147. llvm::Function *InvariantStart = CGM.getIntrinsic(InvStartID, ObjectPtr);
  148. // Emit a call with the size in bytes of the object.
  149. uint64_t Width = Size.getQuantity();
  150. llvm::Value *Args[2] = { llvm::ConstantInt::getSigned(Int64Ty, Width),
  151. llvm::ConstantExpr::getBitCast(Addr, Int8PtrTy)};
  152. Builder.CreateCall(InvariantStart, Args);
  153. }
  154. void CodeGenFunction::EmitCXXGlobalVarDeclInit(const VarDecl &D,
  155. llvm::Constant *DeclPtr,
  156. bool PerformInit) {
  157. const Expr *Init = D.getInit();
  158. QualType T = D.getType();
  159. // The address space of a static local variable (DeclPtr) may be different
  160. // from the address space of the "this" argument of the constructor. In that
  161. // case, we need an addrspacecast before calling the constructor.
  162. //
  163. // struct StructWithCtor {
  164. // __device__ StructWithCtor() {...}
  165. // };
  166. // __device__ void foo() {
  167. // __shared__ StructWithCtor s;
  168. // ...
  169. // }
  170. //
  171. // For example, in the above CUDA code, the static local variable s has a
  172. // "shared" address space qualifier, but the constructor of StructWithCtor
  173. // expects "this" in the "generic" address space.
  174. unsigned ExpectedAddrSpace = getContext().getTargetAddressSpace(T);
  175. unsigned ActualAddrSpace = DeclPtr->getType()->getPointerAddressSpace();
  176. if (ActualAddrSpace != ExpectedAddrSpace) {
  177. llvm::Type *LTy = CGM.getTypes().ConvertTypeForMem(T);
  178. llvm::PointerType *PTy = llvm::PointerType::get(LTy, ExpectedAddrSpace);
  179. DeclPtr = llvm::ConstantExpr::getAddrSpaceCast(DeclPtr, PTy);
  180. }
  181. ConstantAddress DeclAddr(DeclPtr, getContext().getDeclAlign(&D));
  182. if (!T->isReferenceType()) {
  183. if (getLangOpts().OpenMP && !getLangOpts().OpenMPSimd &&
  184. D.hasAttr<OMPThreadPrivateDeclAttr>()) {
  185. (void)CGM.getOpenMPRuntime().emitThreadPrivateVarDefinition(
  186. &D, DeclAddr, D.getAttr<OMPThreadPrivateDeclAttr>()->getLocation(),
  187. PerformInit, this);
  188. }
  189. if (PerformInit)
  190. EmitDeclInit(*this, D, DeclAddr);
  191. if (CGM.isTypeConstant(D.getType(), true))
  192. EmitDeclInvariant(*this, D, DeclPtr);
  193. else
  194. EmitDeclDestroy(*this, D, DeclAddr);
  195. return;
  196. }
  197. assert(PerformInit && "cannot have constant initializer which needs "
  198. "destruction for reference");
  199. RValue RV = EmitReferenceBindingToExpr(Init);
  200. EmitStoreOfScalar(RV.getScalarVal(), DeclAddr, false, T);
  201. }
  202. /// Create a stub function, suitable for being passed to atexit,
  203. /// which passes the given address to the given destructor function.
  204. llvm::Function *CodeGenFunction::createAtExitStub(const VarDecl &VD,
  205. llvm::FunctionCallee dtor,
  206. llvm::Constant *addr) {
  207. // Get the destructor function type, void(*)(void).
  208. llvm::FunctionType *ty = llvm::FunctionType::get(CGM.VoidTy, false);
  209. SmallString<256> FnName;
  210. {
  211. llvm::raw_svector_ostream Out(FnName);
  212. CGM.getCXXABI().getMangleContext().mangleDynamicAtExitDestructor(&VD, Out);
  213. }
  214. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction();
  215. llvm::Function *fn = CGM.CreateGlobalInitOrDestructFunction(
  216. ty, FnName.str(), FI, VD.getLocation());
  217. CodeGenFunction CGF(CGM);
  218. CGF.StartFunction(GlobalDecl(&VD, DynamicInitKind::AtExit),
  219. CGM.getContext().VoidTy, fn, FI, FunctionArgList());
  220. llvm::CallInst *call = CGF.Builder.CreateCall(dtor, addr);
  221. // Make sure the call and the callee agree on calling convention.
  222. if (llvm::Function *dtorFn =
  223. dyn_cast<llvm::Function>(dtor.getCallee()->stripPointerCasts()))
  224. call->setCallingConv(dtorFn->getCallingConv());
  225. CGF.FinishFunction();
  226. return fn;
  227. }
  228. /// Register a global destructor using the C atexit runtime function.
  229. void CodeGenFunction::registerGlobalDtorWithAtExit(const VarDecl &VD,
  230. llvm::FunctionCallee dtor,
  231. llvm::Constant *addr) {
  232. // Create a function which calls the destructor.
  233. llvm::Constant *dtorStub = createAtExitStub(VD, dtor, addr);
  234. registerGlobalDtorWithAtExit(dtorStub);
  235. }
  236. void CodeGenFunction::registerGlobalDtorWithAtExit(llvm::Constant *dtorStub) {
  237. // extern "C" int atexit(void (*f)(void));
  238. llvm::FunctionType *atexitTy =
  239. llvm::FunctionType::get(IntTy, dtorStub->getType(), false);
  240. llvm::FunctionCallee atexit =
  241. CGM.CreateRuntimeFunction(atexitTy, "atexit", llvm::AttributeList(),
  242. /*Local=*/true);
  243. if (llvm::Function *atexitFn = dyn_cast<llvm::Function>(atexit.getCallee()))
  244. atexitFn->setDoesNotThrow();
  245. EmitNounwindRuntimeCall(atexit, dtorStub);
  246. }
  247. void CodeGenFunction::EmitCXXGuardedInit(const VarDecl &D,
  248. llvm::GlobalVariable *DeclPtr,
  249. bool PerformInit) {
  250. // If we've been asked to forbid guard variables, emit an error now.
  251. // This diagnostic is hard-coded for Darwin's use case; we can find
  252. // better phrasing if someone else needs it.
  253. if (CGM.getCodeGenOpts().ForbidGuardVariables)
  254. CGM.Error(D.getLocation(),
  255. "this initialization requires a guard variable, which "
  256. "the kernel does not support");
  257. CGM.getCXXABI().EmitGuardedInit(*this, D, DeclPtr, PerformInit);
  258. }
  259. void CodeGenFunction::EmitCXXGuardedInitBranch(llvm::Value *NeedsInit,
  260. llvm::BasicBlock *InitBlock,
  261. llvm::BasicBlock *NoInitBlock,
  262. GuardKind Kind,
  263. const VarDecl *D) {
  264. assert((Kind == GuardKind::TlsGuard || D) && "no guarded variable");
  265. // A guess at how many times we will enter the initialization of a
  266. // variable, depending on the kind of variable.
  267. static const uint64_t InitsPerTLSVar = 1024;
  268. static const uint64_t InitsPerLocalVar = 1024 * 1024;
  269. llvm::MDNode *Weights;
  270. if (Kind == GuardKind::VariableGuard && !D->isLocalVarDecl()) {
  271. // For non-local variables, don't apply any weighting for now. Due to our
  272. // use of COMDATs, we expect there to be at most one initialization of the
  273. // variable per DSO, but we have no way to know how many DSOs will try to
  274. // initialize the variable.
  275. Weights = nullptr;
  276. } else {
  277. uint64_t NumInits;
  278. // FIXME: For the TLS case, collect and use profiling information to
  279. // determine a more accurate brach weight.
  280. if (Kind == GuardKind::TlsGuard || D->getTLSKind())
  281. NumInits = InitsPerTLSVar;
  282. else
  283. NumInits = InitsPerLocalVar;
  284. // The probability of us entering the initializer is
  285. // 1 / (total number of times we attempt to initialize the variable).
  286. llvm::MDBuilder MDHelper(CGM.getLLVMContext());
  287. Weights = MDHelper.createBranchWeights(1, NumInits - 1);
  288. }
  289. Builder.CreateCondBr(NeedsInit, InitBlock, NoInitBlock, Weights);
  290. }
  291. llvm::Function *CodeGenModule::CreateGlobalInitOrDestructFunction(
  292. llvm::FunctionType *FTy, const Twine &Name, const CGFunctionInfo &FI,
  293. SourceLocation Loc, bool TLS) {
  294. llvm::Function *Fn =
  295. llvm::Function::Create(FTy, llvm::GlobalValue::InternalLinkage,
  296. Name, &getModule());
  297. if (!getLangOpts().AppleKext && !TLS) {
  298. // Set the section if needed.
  299. if (const char *Section = getTarget().getStaticInitSectionSpecifier())
  300. Fn->setSection(Section);
  301. }
  302. SetInternalFunctionAttributes(GlobalDecl(), Fn, FI);
  303. Fn->setCallingConv(getRuntimeCC());
  304. if (!getLangOpts().Exceptions)
  305. Fn->setDoesNotThrow();
  306. if (getLangOpts().Sanitize.has(SanitizerKind::Address) &&
  307. !isInSanitizerBlacklist(SanitizerKind::Address, Fn, Loc))
  308. Fn->addFnAttr(llvm::Attribute::SanitizeAddress);
  309. if (getLangOpts().Sanitize.has(SanitizerKind::KernelAddress) &&
  310. !isInSanitizerBlacklist(SanitizerKind::KernelAddress, Fn, Loc))
  311. Fn->addFnAttr(llvm::Attribute::SanitizeAddress);
  312. if (getLangOpts().Sanitize.has(SanitizerKind::HWAddress) &&
  313. !isInSanitizerBlacklist(SanitizerKind::HWAddress, Fn, Loc))
  314. Fn->addFnAttr(llvm::Attribute::SanitizeHWAddress);
  315. if (getLangOpts().Sanitize.has(SanitizerKind::KernelHWAddress) &&
  316. !isInSanitizerBlacklist(SanitizerKind::KernelHWAddress, Fn, Loc))
  317. Fn->addFnAttr(llvm::Attribute::SanitizeHWAddress);
  318. if (getLangOpts().Sanitize.has(SanitizerKind::MemTag) &&
  319. !isInSanitizerBlacklist(SanitizerKind::MemTag, Fn, Loc))
  320. Fn->addFnAttr(llvm::Attribute::SanitizeMemTag);
  321. if (getLangOpts().Sanitize.has(SanitizerKind::Thread) &&
  322. !isInSanitizerBlacklist(SanitizerKind::Thread, Fn, Loc))
  323. Fn->addFnAttr(llvm::Attribute::SanitizeThread);
  324. if (getLangOpts().Sanitize.has(SanitizerKind::Memory) &&
  325. !isInSanitizerBlacklist(SanitizerKind::Memory, Fn, Loc))
  326. Fn->addFnAttr(llvm::Attribute::SanitizeMemory);
  327. if (getLangOpts().Sanitize.has(SanitizerKind::KernelMemory) &&
  328. !isInSanitizerBlacklist(SanitizerKind::KernelMemory, Fn, Loc))
  329. Fn->addFnAttr(llvm::Attribute::SanitizeMemory);
  330. if (getLangOpts().Sanitize.has(SanitizerKind::SafeStack) &&
  331. !isInSanitizerBlacklist(SanitizerKind::SafeStack, Fn, Loc))
  332. Fn->addFnAttr(llvm::Attribute::SafeStack);
  333. if (getLangOpts().Sanitize.has(SanitizerKind::ShadowCallStack) &&
  334. !isInSanitizerBlacklist(SanitizerKind::ShadowCallStack, Fn, Loc))
  335. Fn->addFnAttr(llvm::Attribute::ShadowCallStack);
  336. auto RASignKind = getCodeGenOpts().getSignReturnAddress();
  337. if (RASignKind != CodeGenOptions::SignReturnAddressScope::None) {
  338. Fn->addFnAttr("sign-return-address",
  339. RASignKind == CodeGenOptions::SignReturnAddressScope::All
  340. ? "all"
  341. : "non-leaf");
  342. auto RASignKey = getCodeGenOpts().getSignReturnAddressKey();
  343. Fn->addFnAttr("sign-return-address-key",
  344. RASignKey == CodeGenOptions::SignReturnAddressKeyValue::AKey
  345. ? "a_key"
  346. : "b_key");
  347. }
  348. if (getCodeGenOpts().BranchTargetEnforcement)
  349. Fn->addFnAttr("branch-target-enforcement");
  350. return Fn;
  351. }
  352. /// Create a global pointer to a function that will initialize a global
  353. /// variable. The user has requested that this pointer be emitted in a specific
  354. /// section.
  355. void CodeGenModule::EmitPointerToInitFunc(const VarDecl *D,
  356. llvm::GlobalVariable *GV,
  357. llvm::Function *InitFunc,
  358. InitSegAttr *ISA) {
  359. llvm::GlobalVariable *PtrArray = new llvm::GlobalVariable(
  360. TheModule, InitFunc->getType(), /*isConstant=*/true,
  361. llvm::GlobalValue::PrivateLinkage, InitFunc, "__cxx_init_fn_ptr");
  362. PtrArray->setSection(ISA->getSection());
  363. addUsedGlobal(PtrArray);
  364. // If the GV is already in a comdat group, then we have to join it.
  365. if (llvm::Comdat *C = GV->getComdat())
  366. PtrArray->setComdat(C);
  367. }
  368. void
  369. CodeGenModule::EmitCXXGlobalVarDeclInitFunc(const VarDecl *D,
  370. llvm::GlobalVariable *Addr,
  371. bool PerformInit) {
  372. // According to E.2.3.1 in CUDA-7.5 Programming guide: __device__,
  373. // __constant__ and __shared__ variables defined in namespace scope,
  374. // that are of class type, cannot have a non-empty constructor. All
  375. // the checks have been done in Sema by now. Whatever initializers
  376. // are allowed are empty and we just need to ignore them here.
  377. if (getLangOpts().CUDA && getLangOpts().CUDAIsDevice &&
  378. (D->hasAttr<CUDADeviceAttr>() || D->hasAttr<CUDAConstantAttr>() ||
  379. D->hasAttr<CUDASharedAttr>()))
  380. return;
  381. if (getLangOpts().OpenMP &&
  382. getOpenMPRuntime().emitDeclareTargetVarDefinition(D, Addr, PerformInit))
  383. return;
  384. // Check if we've already initialized this decl.
  385. auto I = DelayedCXXInitPosition.find(D);
  386. if (I != DelayedCXXInitPosition.end() && I->second == ~0U)
  387. return;
  388. llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, false);
  389. SmallString<256> FnName;
  390. {
  391. llvm::raw_svector_ostream Out(FnName);
  392. getCXXABI().getMangleContext().mangleDynamicInitializer(D, Out);
  393. }
  394. // Create a variable initialization function.
  395. llvm::Function *Fn =
  396. CreateGlobalInitOrDestructFunction(FTy, FnName.str(),
  397. getTypes().arrangeNullaryFunction(),
  398. D->getLocation());
  399. auto *ISA = D->getAttr<InitSegAttr>();
  400. CodeGenFunction(*this).GenerateCXXGlobalVarDeclInitFunc(Fn, D, Addr,
  401. PerformInit);
  402. llvm::GlobalVariable *COMDATKey =
  403. supportsCOMDAT() && D->isExternallyVisible() ? Addr : nullptr;
  404. if (D->getTLSKind()) {
  405. // FIXME: Should we support init_priority for thread_local?
  406. // FIXME: We only need to register one __cxa_thread_atexit function for the
  407. // entire TU.
  408. CXXThreadLocalInits.push_back(Fn);
  409. CXXThreadLocalInitVars.push_back(D);
  410. } else if (PerformInit && ISA) {
  411. EmitPointerToInitFunc(D, Addr, Fn, ISA);
  412. } else if (auto *IPA = D->getAttr<InitPriorityAttr>()) {
  413. OrderGlobalInits Key(IPA->getPriority(), PrioritizedCXXGlobalInits.size());
  414. PrioritizedCXXGlobalInits.push_back(std::make_pair(Key, Fn));
  415. } else if (isTemplateInstantiation(D->getTemplateSpecializationKind()) ||
  416. getContext().GetGVALinkageForVariable(D) == GVA_DiscardableODR) {
  417. // C++ [basic.start.init]p2:
  418. // Definitions of explicitly specialized class template static data
  419. // members have ordered initialization. Other class template static data
  420. // members (i.e., implicitly or explicitly instantiated specializations)
  421. // have unordered initialization.
  422. //
  423. // As a consequence, we can put them into their own llvm.global_ctors entry.
  424. //
  425. // If the global is externally visible, put the initializer into a COMDAT
  426. // group with the global being initialized. On most platforms, this is a
  427. // minor startup time optimization. In the MS C++ ABI, there are no guard
  428. // variables, so this COMDAT key is required for correctness.
  429. AddGlobalCtor(Fn, 65535, COMDATKey);
  430. if (getTarget().getCXXABI().isMicrosoft() && COMDATKey) {
  431. // In The MS C++, MS add template static data member in the linker
  432. // drective.
  433. addUsedGlobal(COMDATKey);
  434. }
  435. } else if (D->hasAttr<SelectAnyAttr>()) {
  436. // SelectAny globals will be comdat-folded. Put the initializer into a
  437. // COMDAT group associated with the global, so the initializers get folded
  438. // too.
  439. AddGlobalCtor(Fn, 65535, COMDATKey);
  440. } else {
  441. I = DelayedCXXInitPosition.find(D); // Re-do lookup in case of re-hash.
  442. if (I == DelayedCXXInitPosition.end()) {
  443. CXXGlobalInits.push_back(Fn);
  444. } else if (I->second != ~0U) {
  445. assert(I->second < CXXGlobalInits.size() &&
  446. CXXGlobalInits[I->second] == nullptr);
  447. CXXGlobalInits[I->second] = Fn;
  448. }
  449. }
  450. // Remember that we already emitted the initializer for this global.
  451. DelayedCXXInitPosition[D] = ~0U;
  452. }
  453. void CodeGenModule::EmitCXXThreadLocalInitFunc() {
  454. getCXXABI().EmitThreadLocalInitFuncs(
  455. *this, CXXThreadLocals, CXXThreadLocalInits, CXXThreadLocalInitVars);
  456. CXXThreadLocalInits.clear();
  457. CXXThreadLocalInitVars.clear();
  458. CXXThreadLocals.clear();
  459. }
  460. void
  461. CodeGenModule::EmitCXXGlobalInitFunc() {
  462. while (!CXXGlobalInits.empty() && !CXXGlobalInits.back())
  463. CXXGlobalInits.pop_back();
  464. if (CXXGlobalInits.empty() && PrioritizedCXXGlobalInits.empty())
  465. return;
  466. llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, false);
  467. const CGFunctionInfo &FI = getTypes().arrangeNullaryFunction();
  468. // Create our global initialization function.
  469. if (!PrioritizedCXXGlobalInits.empty()) {
  470. SmallVector<llvm::Function *, 8> LocalCXXGlobalInits;
  471. llvm::array_pod_sort(PrioritizedCXXGlobalInits.begin(),
  472. PrioritizedCXXGlobalInits.end());
  473. // Iterate over "chunks" of ctors with same priority and emit each chunk
  474. // into separate function. Note - everything is sorted first by priority,
  475. // second - by lex order, so we emit ctor functions in proper order.
  476. for (SmallVectorImpl<GlobalInitData >::iterator
  477. I = PrioritizedCXXGlobalInits.begin(),
  478. E = PrioritizedCXXGlobalInits.end(); I != E; ) {
  479. SmallVectorImpl<GlobalInitData >::iterator
  480. PrioE = std::upper_bound(I + 1, E, *I, GlobalInitPriorityCmp());
  481. LocalCXXGlobalInits.clear();
  482. unsigned Priority = I->first.priority;
  483. // Compute the function suffix from priority. Prepend with zeroes to make
  484. // sure the function names are also ordered as priorities.
  485. std::string PrioritySuffix = llvm::utostr(Priority);
  486. // Priority is always <= 65535 (enforced by sema).
  487. PrioritySuffix = std::string(6-PrioritySuffix.size(), '0')+PrioritySuffix;
  488. llvm::Function *Fn = CreateGlobalInitOrDestructFunction(
  489. FTy, "_GLOBAL__I_" + PrioritySuffix, FI);
  490. for (; I < PrioE; ++I)
  491. LocalCXXGlobalInits.push_back(I->second);
  492. CodeGenFunction(*this).GenerateCXXGlobalInitFunc(Fn, LocalCXXGlobalInits);
  493. AddGlobalCtor(Fn, Priority);
  494. }
  495. PrioritizedCXXGlobalInits.clear();
  496. }
  497. // Include the filename in the symbol name. Including "sub_" matches gcc and
  498. // makes sure these symbols appear lexicographically behind the symbols with
  499. // priority emitted above.
  500. SmallString<128> FileName = llvm::sys::path::filename(getModule().getName());
  501. if (FileName.empty())
  502. FileName = "<null>";
  503. for (size_t i = 0; i < FileName.size(); ++i) {
  504. // Replace everything that's not [a-zA-Z0-9._] with a _. This set happens
  505. // to be the set of C preprocessing numbers.
  506. if (!isPreprocessingNumberBody(FileName[i]))
  507. FileName[i] = '_';
  508. }
  509. llvm::Function *Fn = CreateGlobalInitOrDestructFunction(
  510. FTy, llvm::Twine("_GLOBAL__sub_I_", FileName), FI);
  511. CodeGenFunction(*this).GenerateCXXGlobalInitFunc(Fn, CXXGlobalInits);
  512. AddGlobalCtor(Fn);
  513. // In OpenCL global init functions must be converted to kernels in order to
  514. // be able to launch them from the host.
  515. // FIXME: Some more work might be needed to handle destructors correctly.
  516. // Current initialization function makes use of function pointers callbacks.
  517. // We can't support function pointers especially between host and device.
  518. // However it seems global destruction has little meaning without any
  519. // dynamic resource allocation on the device and program scope variables are
  520. // destroyed by the runtime when program is released.
  521. if (getLangOpts().OpenCL) {
  522. GenOpenCLArgMetadata(Fn);
  523. Fn->setCallingConv(llvm::CallingConv::SPIR_KERNEL);
  524. }
  525. CXXGlobalInits.clear();
  526. }
  527. void CodeGenModule::EmitCXXGlobalDtorFunc() {
  528. if (CXXGlobalDtors.empty())
  529. return;
  530. llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, false);
  531. // Create our global destructor function.
  532. const CGFunctionInfo &FI = getTypes().arrangeNullaryFunction();
  533. llvm::Function *Fn =
  534. CreateGlobalInitOrDestructFunction(FTy, "_GLOBAL__D_a", FI);
  535. CodeGenFunction(*this).GenerateCXXGlobalDtorsFunc(Fn, CXXGlobalDtors);
  536. AddGlobalDtor(Fn);
  537. }
  538. /// Emit the code necessary to initialize the given global variable.
  539. void CodeGenFunction::GenerateCXXGlobalVarDeclInitFunc(llvm::Function *Fn,
  540. const VarDecl *D,
  541. llvm::GlobalVariable *Addr,
  542. bool PerformInit) {
  543. // Check if we need to emit debug info for variable initializer.
  544. if (D->hasAttr<NoDebugAttr>())
  545. DebugInfo = nullptr; // disable debug info indefinitely for this function
  546. CurEHLocation = D->getBeginLoc();
  547. StartFunction(GlobalDecl(D, DynamicInitKind::Initializer),
  548. getContext().VoidTy, Fn, getTypes().arrangeNullaryFunction(),
  549. FunctionArgList(), D->getLocation(),
  550. D->getInit()->getExprLoc());
  551. // Use guarded initialization if the global variable is weak. This
  552. // occurs for, e.g., instantiated static data members and
  553. // definitions explicitly marked weak.
  554. //
  555. // Also use guarded initialization for a variable with dynamic TLS and
  556. // unordered initialization. (If the initialization is ordered, the ABI
  557. // layer will guard the whole-TU initialization for us.)
  558. if (Addr->hasWeakLinkage() || Addr->hasLinkOnceLinkage() ||
  559. (D->getTLSKind() == VarDecl::TLS_Dynamic &&
  560. isTemplateInstantiation(D->getTemplateSpecializationKind()))) {
  561. EmitCXXGuardedInit(*D, Addr, PerformInit);
  562. } else {
  563. EmitCXXGlobalVarDeclInit(*D, Addr, PerformInit);
  564. }
  565. FinishFunction();
  566. }
  567. void
  568. CodeGenFunction::GenerateCXXGlobalInitFunc(llvm::Function *Fn,
  569. ArrayRef<llvm::Function *> Decls,
  570. ConstantAddress Guard) {
  571. {
  572. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  573. StartFunction(GlobalDecl(), getContext().VoidTy, Fn,
  574. getTypes().arrangeNullaryFunction(), FunctionArgList());
  575. // Emit an artificial location for this function.
  576. auto AL = ApplyDebugLocation::CreateArtificial(*this);
  577. llvm::BasicBlock *ExitBlock = nullptr;
  578. if (Guard.isValid()) {
  579. // If we have a guard variable, check whether we've already performed
  580. // these initializations. This happens for TLS initialization functions.
  581. llvm::Value *GuardVal = Builder.CreateLoad(Guard);
  582. llvm::Value *Uninit = Builder.CreateIsNull(GuardVal,
  583. "guard.uninitialized");
  584. llvm::BasicBlock *InitBlock = createBasicBlock("init");
  585. ExitBlock = createBasicBlock("exit");
  586. EmitCXXGuardedInitBranch(Uninit, InitBlock, ExitBlock,
  587. GuardKind::TlsGuard, nullptr);
  588. EmitBlock(InitBlock);
  589. // Mark as initialized before initializing anything else. If the
  590. // initializers use previously-initialized thread_local vars, that's
  591. // probably supposed to be OK, but the standard doesn't say.
  592. Builder.CreateStore(llvm::ConstantInt::get(GuardVal->getType(),1), Guard);
  593. // The guard variable can't ever change again.
  594. EmitInvariantStart(
  595. Guard.getPointer(),
  596. CharUnits::fromQuantity(
  597. CGM.getDataLayout().getTypeAllocSize(GuardVal->getType())));
  598. }
  599. RunCleanupsScope Scope(*this);
  600. // When building in Objective-C++ ARC mode, create an autorelease pool
  601. // around the global initializers.
  602. if (getLangOpts().ObjCAutoRefCount && getLangOpts().CPlusPlus) {
  603. llvm::Value *token = EmitObjCAutoreleasePoolPush();
  604. EmitObjCAutoreleasePoolCleanup(token);
  605. }
  606. for (unsigned i = 0, e = Decls.size(); i != e; ++i)
  607. if (Decls[i])
  608. EmitRuntimeCall(Decls[i]);
  609. Scope.ForceCleanup();
  610. if (ExitBlock) {
  611. Builder.CreateBr(ExitBlock);
  612. EmitBlock(ExitBlock);
  613. }
  614. }
  615. FinishFunction();
  616. }
  617. void CodeGenFunction::GenerateCXXGlobalDtorsFunc(
  618. llvm::Function *Fn,
  619. const std::vector<std::tuple<llvm::FunctionType *, llvm::WeakTrackingVH,
  620. llvm::Constant *>> &DtorsAndObjects) {
  621. {
  622. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  623. StartFunction(GlobalDecl(), getContext().VoidTy, Fn,
  624. getTypes().arrangeNullaryFunction(), FunctionArgList());
  625. // Emit an artificial location for this function.
  626. auto AL = ApplyDebugLocation::CreateArtificial(*this);
  627. // Emit the dtors, in reverse order from construction.
  628. for (unsigned i = 0, e = DtorsAndObjects.size(); i != e; ++i) {
  629. llvm::FunctionType *CalleeTy;
  630. llvm::Value *Callee;
  631. llvm::Constant *Arg;
  632. std::tie(CalleeTy, Callee, Arg) = DtorsAndObjects[e - i - 1];
  633. llvm::CallInst *CI = Builder.CreateCall(CalleeTy, Callee, Arg);
  634. // Make sure the call and the callee agree on calling convention.
  635. if (llvm::Function *F = dyn_cast<llvm::Function>(Callee))
  636. CI->setCallingConv(F->getCallingConv());
  637. }
  638. }
  639. FinishFunction();
  640. }
  641. /// generateDestroyHelper - Generates a helper function which, when
  642. /// invoked, destroys the given object. The address of the object
  643. /// should be in global memory.
  644. llvm::Function *CodeGenFunction::generateDestroyHelper(
  645. Address addr, QualType type, Destroyer *destroyer,
  646. bool useEHCleanupForArray, const VarDecl *VD) {
  647. FunctionArgList args;
  648. ImplicitParamDecl Dst(getContext(), getContext().VoidPtrTy,
  649. ImplicitParamDecl::Other);
  650. args.push_back(&Dst);
  651. const CGFunctionInfo &FI =
  652. CGM.getTypes().arrangeBuiltinFunctionDeclaration(getContext().VoidTy, args);
  653. llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(FI);
  654. llvm::Function *fn = CGM.CreateGlobalInitOrDestructFunction(
  655. FTy, "__cxx_global_array_dtor", FI, VD->getLocation());
  656. CurEHLocation = VD->getBeginLoc();
  657. StartFunction(VD, getContext().VoidTy, fn, FI, args);
  658. emitDestroy(addr, type, destroyer, useEHCleanupForArray);
  659. FinishFunction();
  660. return fn;
  661. }