CGExprCXX.cpp 78 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026
  1. //===--- CGExprCXX.cpp - Emit LLVM Code for C++ expressions ---------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code dealing with code generation of C++ expressions
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CodeGenFunction.h"
  14. #include "CGCUDARuntime.h"
  15. #include "CGHLSLRuntime.h" // HLSL Change
  16. #include "CGCXXABI.h"
  17. #include "CGDebugInfo.h"
  18. #include "CGObjCRuntime.h"
  19. #include "clang/CodeGen/CGFunctionInfo.h"
  20. #include "clang/Frontend/CodeGenOptions.h"
  21. #include "llvm/IR/CallSite.h"
  22. #include "llvm/IR/Intrinsics.h"
  23. using namespace clang;
  24. using namespace CodeGen;
  25. static RequiredArgs commonEmitCXXMemberOrOperatorCall(
  26. CodeGenFunction &CGF, const CXXMethodDecl *MD, llvm::Value *Callee,
  27. ReturnValueSlot ReturnValue, llvm::Value *This, llvm::Value *ImplicitParam,
  28. QualType ImplicitParamTy, const CallExpr *CE, CallArgList &Args,
  29. ArrayRef<const Stmt *> argList// HLSL Change - use updated argList for out parameter.
  30. ) {
  31. assert(CE == nullptr || isa<CXXMemberCallExpr>(CE) ||
  32. isa<CXXOperatorCallExpr>(CE));
  33. assert(MD->isInstance() &&
  34. "Trying to emit a member or operator call expr on a static method!");
  35. // C++11 [class.mfct.non-static]p2:
  36. // If a non-static member function of a class X is called for an object that
  37. // is not of type X, or of a type derived from X, the behavior is undefined.
  38. SourceLocation CallLoc;
  39. if (CE)
  40. CallLoc = CE->getExprLoc();
  41. CGF.EmitTypeCheck(
  42. isa<CXXConstructorDecl>(MD) ? CodeGenFunction::TCK_ConstructorCall
  43. : CodeGenFunction::TCK_MemberCall,
  44. CallLoc, This, CGF.getContext().getRecordType(MD->getParent()));
  45. // Push the this ptr.
  46. Args.add(RValue::get(This), MD->getThisType(CGF.getContext()));
  47. // If there is an implicit parameter (e.g. VTT), emit it.
  48. if (ImplicitParam) {
  49. Args.add(RValue::get(ImplicitParam), ImplicitParamTy);
  50. }
  51. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  52. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, Args.size());
  53. // And the rest of the call args.
  54. if (CE) {
  55. // Special case: skip first argument of CXXOperatorCall (it is "this").
  56. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(CE) ? 1 : 0;
  57. CGF.EmitCallArgs(Args, FPT,
  58. argList.begin() + ArgsToSkip, // HLSL Change - use updated argList for out parameter.
  59. argList.end(), // HLSL Change - use updated argList for out parameter.
  60. CE->getDirectCallee());
  61. } else {
  62. assert(
  63. FPT->getNumParams() == 0 &&
  64. "No CallExpr specified for function with non-zero number of arguments");
  65. }
  66. return required;
  67. }
  68. RValue CodeGenFunction::EmitCXXMemberOrOperatorCall(
  69. const CXXMethodDecl *MD, llvm::Value *Callee, ReturnValueSlot ReturnValue,
  70. llvm::Value *This, llvm::Value *ImplicitParam, QualType ImplicitParamTy,
  71. const CallExpr *CE) {
  72. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  73. CallArgList Args;
  74. // HLSL Change Begins
  75. llvm::SmallVector<LValue, 8> castArgList;
  76. llvm::SmallVector<LValue, 8> lifetimeCleanupList;
  77. // The argList of the CallExpr, may be update for out parameter
  78. llvm::SmallVector<const Stmt *, 8> argList(CE->arg_begin(), CE->arg_end());
  79. // out param conversion
  80. CodeGenFunction::HLSLOutParamScope OutParamScope(*this);
  81. auto MapTemp = [&](const VarDecl *LocalVD, llvm::Value *TmpArg) {
  82. OutParamScope.addTemp(LocalVD, TmpArg);
  83. };
  84. if (getLangOpts().HLSL) {
  85. if (const FunctionDecl *FD = CE->getDirectCallee())
  86. CGM.getHLSLRuntime().EmitHLSLOutParamConversionInit(*this, FD, CE,
  87. castArgList, argList, lifetimeCleanupList, MapTemp);
  88. }
  89. // HLSL Change Ends
  90. RequiredArgs required = commonEmitCXXMemberOrOperatorCall(
  91. *this, MD, Callee, ReturnValue, This, ImplicitParam, ImplicitParamTy, CE,
  92. Args, argList); // HLSL Change - use updated argList.
  93. RValue CallVal = EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, required),
  94. Callee, ReturnValue, Args, MD);
  95. // HLSL Change Begins
  96. // out param conversion
  97. // conversion and copy back after the call
  98. if (getLangOpts().HLSL)
  99. CGM.getHLSLRuntime().EmitHLSLOutParamConversionCopyBack(*this, castArgList, lifetimeCleanupList);
  100. // HLSL Change Ends
  101. return CallVal;
  102. }
  103. RValue CodeGenFunction::EmitCXXStructorCall(
  104. const CXXMethodDecl *MD, llvm::Value *Callee, ReturnValueSlot ReturnValue,
  105. llvm::Value *This, llvm::Value *ImplicitParam, QualType ImplicitParamTy,
  106. const CallExpr *CE, StructorType Type) {
  107. CallArgList Args;
  108. // HLSL Change Begins
  109. llvm::SmallVector<LValue, 8> castArgList;
  110. llvm::SmallVector<LValue, 8> lifetimeCleanupList;
  111. // The argList of the CallExpr, may be update for out parameter
  112. llvm::SmallVector<const Stmt *, 8> argList(CE->arg_begin(), CE->arg_end());
  113. // out param conversion
  114. CodeGenFunction::HLSLOutParamScope OutParamScope(*this);
  115. auto MapTemp = [&](const VarDecl *LocalVD, llvm::Value *TmpArg) {
  116. OutParamScope.addTemp(LocalVD, TmpArg);
  117. };
  118. if (getLangOpts().HLSL) {
  119. if (const FunctionDecl *FD = CE->getDirectCallee())
  120. CGM.getHLSLRuntime().EmitHLSLOutParamConversionInit(*this, FD, CE,
  121. castArgList, argList, lifetimeCleanupList, MapTemp);
  122. }
  123. // HLSL Change Ends
  124. commonEmitCXXMemberOrOperatorCall(*this, MD, Callee, ReturnValue, This,
  125. ImplicitParam, ImplicitParamTy, CE, Args,
  126. argList); // HLSL Change - use updated argList.
  127. RValue CallVal = EmitCall(CGM.getTypes().arrangeCXXStructorDeclaration(MD, Type),
  128. Callee, ReturnValue, Args, MD);
  129. // HLSL Change Begins
  130. // out param conversion
  131. // conversion and copy back after the call
  132. if (getLangOpts().HLSL)
  133. CGM.getHLSLRuntime().EmitHLSLOutParamConversionCopyBack(*this, castArgList, lifetimeCleanupList);
  134. // HLSL Change Ends
  135. return CallVal;
  136. }
  137. static CXXRecordDecl *getCXXRecord(const Expr *E) {
  138. QualType T = E->getType();
  139. if (const PointerType *PTy = T->getAs<PointerType>())
  140. T = PTy->getPointeeType();
  141. const RecordType *Ty = T->castAs<RecordType>();
  142. return cast<CXXRecordDecl>(Ty->getDecl());
  143. }
  144. // Note: This function also emit constructor calls to support a MSVC
  145. // extensions allowing explicit constructor function call.
  146. RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE,
  147. ReturnValueSlot ReturnValue) {
  148. const Expr *callee = CE->getCallee()->IgnoreParens();
  149. if (isa<BinaryOperator>(callee))
  150. return EmitCXXMemberPointerCallExpr(CE, ReturnValue);
  151. const MemberExpr *ME = cast<MemberExpr>(callee);
  152. const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl());
  153. if (MD->isStatic()) {
  154. // The method is static, emit it as we would a regular call.
  155. llvm::Value *Callee = CGM.GetAddrOfFunction(MD);
  156. return EmitCall(getContext().getPointerType(MD->getType()), Callee, CE,
  157. ReturnValue);
  158. }
  159. bool HasQualifier = ME->hasQualifier();
  160. NestedNameSpecifier *Qualifier = HasQualifier ? ME->getQualifier() : nullptr;
  161. bool IsArrow = ME->isArrow();
  162. const Expr *Base = ME->getBase();
  163. return EmitCXXMemberOrOperatorMemberCallExpr(
  164. CE, MD, ReturnValue, HasQualifier, Qualifier, IsArrow, Base);
  165. }
  166. RValue CodeGenFunction::EmitCXXMemberOrOperatorMemberCallExpr(
  167. const CallExpr *CE, const CXXMethodDecl *MD, ReturnValueSlot ReturnValue,
  168. bool HasQualifier, NestedNameSpecifier *Qualifier, bool IsArrow,
  169. const Expr *Base) {
  170. assert(isa<CXXMemberCallExpr>(CE) || isa<CXXOperatorCallExpr>(CE));
  171. // HLSL Change Begins
  172. if (hlsl::IsHLSLMatType(Base->getType())) {
  173. if (const CXXOperatorCallExpr *opCall = dyn_cast<CXXOperatorCallExpr>(CE)) {
  174. assert(opCall->getOperator() == OverloadedOperatorKind::OO_Subscript &&
  175. "must be subscript");
  176. llvm::Value *This = nullptr;
  177. if (Base->getValueKind() != ExprValueKind::VK_RValue) {
  178. This = EmitLValue(Base).getAddress();
  179. } else {
  180. llvm::Value *Val = EmitScalarExpr(Base);
  181. This = CreateTempAlloca(Val->getType());
  182. CGM.getHLSLRuntime().EmitHLSLMatrixStore(*this, Val, This, Base->getType());
  183. }
  184. llvm::Value *Idx = EmitScalarExpr(CE->getArg(1));
  185. llvm::Type *RetTy =
  186. ConvertType(getContext().getLValueReferenceType(CE->getType()));
  187. llvm::Value *matSub = CGM.getHLSLRuntime().EmitHLSLMatrixSubscript(
  188. *this, RetTy, This, Idx, Base->getType());
  189. return RValue::get(matSub);
  190. }
  191. }
  192. if (hlsl::IsHLSLVecType(Base->getType())) {
  193. if (const CXXOperatorCallExpr *opCall = dyn_cast<CXXOperatorCallExpr>(CE)) {
  194. assert(opCall->getOperator() == OverloadedOperatorKind::OO_Subscript &&
  195. "must be subscript");
  196. llvm::Value *This = nullptr;
  197. if (Base->getValueKind() != ExprValueKind::VK_RValue) {
  198. LValue LV = EmitLValue(Base);
  199. if (LV.isSimple()) {
  200. This = LV.getAddress();
  201. if (isa<ExtMatrixElementExpr>(Base)) {
  202. llvm::Value *Val = Builder.CreateLoad(This);
  203. This = CreateTempAlloca(Val->getType());
  204. Builder.CreateStore(Val, This);
  205. }
  206. } else {
  207. assert(LV.isExtVectorElt() && "must be ext vector here");
  208. This = LV.getExtVectorAddr();
  209. llvm::Constant *Elts = LV.getExtVectorElts();
  210. llvm::Type *Ty = ConvertType(LV.getType());
  211. llvm::Constant *zero = Builder.getInt32(0);
  212. llvm::Value *TmpThis = CreateTempAlloca(Ty);
  213. for (unsigned i = 0; i < Ty->getVectorNumElements(); i++) {
  214. llvm::Value *EltIdx = Elts->getAggregateElement(i);
  215. llvm::Value *EltGEP = Builder.CreateGEP(This, {zero, EltIdx});
  216. llvm::Value *TmpEltGEP =
  217. Builder.CreateGEP(TmpThis, {zero, Builder.getInt32(i)});
  218. llvm::Value *Elt = Builder.CreateLoad(EltGEP);
  219. Builder.CreateStore(Elt, TmpEltGEP);
  220. }
  221. This = TmpThis;
  222. }
  223. } else {
  224. llvm::Value *Val = EmitScalarExpr(Base);
  225. This = CreateTempAlloca(Val->getType());
  226. Builder.CreateStore(Val, This);
  227. }
  228. bool isBool = false;
  229. if (llvm::IntegerType *IT =
  230. dyn_cast<llvm::IntegerType>(This->getType()
  231. ->getPointerElementType()
  232. ->getVectorElementType())) {
  233. if (IT->getBitWidth() == 1) {
  234. isBool = true;
  235. }
  236. }
  237. llvm::Value *Idx = EmitScalarExpr(CE->getArg(1));
  238. llvm::Constant *zero = llvm::ConstantInt::get(Idx->getType(), 0);
  239. llvm::Value *Elt = Builder.CreateGEP(This, {zero, Idx});
  240. if (isBool) {
  241. // bool pointer is not i1 *.
  242. llvm::Type *BoolTy = llvm::IntegerType::get(
  243. getLLVMContext(), getContext().getTypeSize(CE->getType()));
  244. Elt = Builder.CreateBitCast(
  245. Elt, llvm::PointerType::get(
  246. BoolTy, Elt->getType()->getPointerAddressSpace()));
  247. }
  248. return RValue::get(Elt);
  249. }
  250. }
  251. if (hlsl::IsHLSLOutputPatchType(Base->getType()) ||
  252. hlsl::IsHLSLInputPatchType(Base->getType())) {
  253. if (const CXXOperatorCallExpr *opCall = dyn_cast<CXXOperatorCallExpr>(CE)) {
  254. assert(opCall->getOperator() == OverloadedOperatorKind::OO_Subscript &&
  255. "must be subscript");
  256. llvm::Value *This = EmitLValue(Base).getAddress();
  257. llvm::Value *Idx = EmitScalarExpr(CE->getArg(1));
  258. llvm::Constant *zero = llvm::ConstantInt::get(Idx->getType(), 0);
  259. llvm::Value *Elt = Builder.CreateGEP(This, { zero, Idx });
  260. return RValue::get(Elt);
  261. }
  262. }
  263. // HLSL Change Ends
  264. // Compute the object pointer.
  265. bool CanUseVirtualCall = MD->isVirtual() && !HasQualifier;
  266. const CXXMethodDecl *DevirtualizedMethod = nullptr;
  267. if (CanUseVirtualCall && CanDevirtualizeMemberFunctionCall(Base, MD)) {
  268. const CXXRecordDecl *BestDynamicDecl = Base->getBestDynamicClassType();
  269. DevirtualizedMethod = MD->getCorrespondingMethodInClass(BestDynamicDecl);
  270. assert(DevirtualizedMethod);
  271. const CXXRecordDecl *DevirtualizedClass = DevirtualizedMethod->getParent();
  272. const Expr *Inner = Base->ignoreParenBaseCasts();
  273. if (DevirtualizedMethod->getReturnType().getCanonicalType() !=
  274. MD->getReturnType().getCanonicalType())
  275. // If the return types are not the same, this might be a case where more
  276. // code needs to run to compensate for it. For example, the derived
  277. // method might return a type that inherits form from the return
  278. // type of MD and has a prefix.
  279. // For now we just avoid devirtualizing these covariant cases.
  280. DevirtualizedMethod = nullptr;
  281. else if (getCXXRecord(Inner) == DevirtualizedClass)
  282. // If the class of the Inner expression is where the dynamic method
  283. // is defined, build the this pointer from it.
  284. Base = Inner;
  285. else if (getCXXRecord(Base) != DevirtualizedClass) {
  286. // If the method is defined in a class that is not the best dynamic
  287. // one or the one of the full expression, we would have to build
  288. // a derived-to-base cast to compute the correct this pointer, but
  289. // we don't have support for that yet, so do a virtual call.
  290. DevirtualizedMethod = nullptr;
  291. }
  292. }
  293. llvm::Value *This;
  294. if (IsArrow)
  295. This = EmitScalarExpr(Base);
  296. else
  297. This = EmitLValue(Base).getAddress();
  298. if (MD->isTrivial() || (MD->isDefaulted() && MD->getParent()->isUnion())) {
  299. if (isa<CXXDestructorDecl>(MD)) return RValue::get(nullptr);
  300. if (isa<CXXConstructorDecl>(MD) &&
  301. cast<CXXConstructorDecl>(MD)->isDefaultConstructor())
  302. return RValue::get(nullptr);
  303. if (!MD->getParent()->mayInsertExtraPadding()) {
  304. if (MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()) {
  305. // We don't like to generate the trivial copy/move assignment operator
  306. // when it isn't necessary; just produce the proper effect here.
  307. // Special case: skip first argument of CXXOperatorCall (it is "this").
  308. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(CE) ? 1 : 0;
  309. llvm::Value *RHS =
  310. EmitLValue(*(CE->arg_begin() + ArgsToSkip)).getAddress();
  311. EmitAggregateAssign(This, RHS, CE->getType());
  312. return RValue::get(This);
  313. }
  314. if (isa<CXXConstructorDecl>(MD) &&
  315. cast<CXXConstructorDecl>(MD)->isCopyOrMoveConstructor()) {
  316. // Trivial move and copy ctor are the same.
  317. assert(CE->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
  318. llvm::Value *RHS = EmitLValue(*CE->arg_begin()).getAddress();
  319. EmitAggregateCopy(This, RHS, CE->arg_begin()->getType());
  320. return RValue::get(This);
  321. }
  322. llvm_unreachable("unknown trivial member function");
  323. }
  324. }
  325. // Compute the function type we're calling.
  326. const CXXMethodDecl *CalleeDecl =
  327. DevirtualizedMethod ? DevirtualizedMethod : MD;
  328. const CGFunctionInfo *FInfo = nullptr;
  329. if (const auto *Dtor = dyn_cast<CXXDestructorDecl>(CalleeDecl))
  330. FInfo = &CGM.getTypes().arrangeCXXStructorDeclaration(
  331. Dtor, StructorType::Complete);
  332. else if (const auto *Ctor = dyn_cast<CXXConstructorDecl>(CalleeDecl))
  333. FInfo = &CGM.getTypes().arrangeCXXStructorDeclaration(
  334. Ctor, StructorType::Complete);
  335. else
  336. FInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(CalleeDecl);
  337. llvm::FunctionType *Ty = CGM.getTypes().GetFunctionType(*FInfo);
  338. // C++ [class.virtual]p12:
  339. // Explicit qualification with the scope operator (5.1) suppresses the
  340. // virtual call mechanism.
  341. //
  342. // We also don't emit a virtual call if the base expression has a record type
  343. // because then we know what the type is.
  344. bool UseVirtualCall = CanUseVirtualCall && !DevirtualizedMethod;
  345. llvm::Value *Callee;
  346. if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD)) {
  347. assert(CE->arg_begin() == CE->arg_end() &&
  348. "Destructor shouldn't have explicit parameters");
  349. assert(ReturnValue.isNull() && "Destructor shouldn't have return value");
  350. if (UseVirtualCall) {
  351. CGM.getCXXABI().EmitVirtualDestructorCall(
  352. *this, Dtor, Dtor_Complete, This, cast<CXXMemberCallExpr>(CE));
  353. } else {
  354. if (getLangOpts().AppleKext && MD->isVirtual() && HasQualifier)
  355. Callee = BuildAppleKextVirtualCall(MD, Qualifier, Ty);
  356. else if (!DevirtualizedMethod)
  357. Callee =
  358. CGM.getAddrOfCXXStructor(Dtor, StructorType::Complete, FInfo, Ty);
  359. else {
  360. const CXXDestructorDecl *DDtor =
  361. cast<CXXDestructorDecl>(DevirtualizedMethod);
  362. Callee = CGM.GetAddrOfFunction(GlobalDecl(DDtor, Dtor_Complete), Ty);
  363. }
  364. EmitCXXMemberOrOperatorCall(MD, Callee, ReturnValue, This,
  365. /*ImplicitParam=*/nullptr, QualType(), CE);
  366. }
  367. return RValue::get(nullptr);
  368. }
  369. if (const CXXConstructorDecl *Ctor = dyn_cast<CXXConstructorDecl>(MD)) {
  370. Callee = CGM.GetAddrOfFunction(GlobalDecl(Ctor, Ctor_Complete), Ty);
  371. } else if (UseVirtualCall) {
  372. Callee = CGM.getCXXABI().getVirtualFunctionPointer(*this, MD, This, Ty,
  373. CE->getLocStart());
  374. } else {
  375. if (SanOpts.has(SanitizerKind::CFINVCall) &&
  376. MD->getParent()->isDynamicClass()) {
  377. llvm::Value *VTable = GetVTablePtr(This, Int8PtrTy);
  378. EmitVTablePtrCheckForCall(MD, VTable, CFITCK_NVCall, CE->getLocStart());
  379. }
  380. if (getLangOpts().AppleKext && MD->isVirtual() && HasQualifier)
  381. Callee = BuildAppleKextVirtualCall(MD, Qualifier, Ty);
  382. else if (!DevirtualizedMethod)
  383. Callee = CGM.GetAddrOfFunction(MD, Ty);
  384. else {
  385. Callee = CGM.GetAddrOfFunction(DevirtualizedMethod, Ty);
  386. }
  387. }
  388. if (MD->isVirtual()) {
  389. This = CGM.getCXXABI().adjustThisArgumentForVirtualFunctionCall(
  390. *this, MD, This, UseVirtualCall);
  391. }
  392. return EmitCXXMemberOrOperatorCall(MD, Callee, ReturnValue, This,
  393. /*ImplicitParam=*/nullptr, QualType(), CE);
  394. }
  395. RValue
  396. CodeGenFunction::EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
  397. ReturnValueSlot ReturnValue) {
  398. const BinaryOperator *BO =
  399. cast<BinaryOperator>(E->getCallee()->IgnoreParens());
  400. const Expr *BaseExpr = BO->getLHS();
  401. const Expr *MemFnExpr = BO->getRHS();
  402. const MemberPointerType *MPT =
  403. MemFnExpr->getType()->castAs<MemberPointerType>();
  404. const FunctionProtoType *FPT =
  405. MPT->getPointeeType()->castAs<FunctionProtoType>();
  406. const CXXRecordDecl *RD =
  407. cast<CXXRecordDecl>(MPT->getClass()->getAs<RecordType>()->getDecl());
  408. // Get the member function pointer.
  409. llvm::Value *MemFnPtr = EmitScalarExpr(MemFnExpr);
  410. // Emit the 'this' pointer.
  411. llvm::Value *This;
  412. if (BO->getOpcode() == BO_PtrMemI)
  413. This = EmitScalarExpr(BaseExpr);
  414. else
  415. This = EmitLValue(BaseExpr).getAddress();
  416. EmitTypeCheck(TCK_MemberCall, E->getExprLoc(), This,
  417. QualType(MPT->getClass(), 0));
  418. // Ask the ABI to load the callee. Note that This is modified.
  419. llvm::Value *Callee =
  420. CGM.getCXXABI().EmitLoadOfMemberFunctionPointer(*this, BO, This, MemFnPtr, MPT);
  421. CallArgList Args;
  422. QualType ThisType =
  423. getContext().getPointerType(getContext().getTagDeclType(RD));
  424. // Push the this ptr.
  425. Args.add(RValue::get(This), ThisType);
  426. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, 1);
  427. // And the rest of the call args
  428. EmitCallArgs(Args, FPT, E->arg_begin(), E->arg_end(), E->getDirectCallee());
  429. return EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, required),
  430. Callee, ReturnValue, Args);
  431. }
  432. RValue
  433. CodeGenFunction::EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
  434. const CXXMethodDecl *MD,
  435. ReturnValueSlot ReturnValue) {
  436. assert(MD->isInstance() &&
  437. "Trying to emit a member call expr on a static method!");
  438. return EmitCXXMemberOrOperatorMemberCallExpr(
  439. E, MD, ReturnValue, /*HasQualifier=*/false, /*Qualifier=*/nullptr,
  440. /*IsArrow=*/false, E->getArg(0));
  441. }
  442. RValue CodeGenFunction::EmitCUDAKernelCallExpr(const CUDAKernelCallExpr *E,
  443. ReturnValueSlot ReturnValue) {
  444. return CGM.getCUDARuntime().EmitCUDAKernelCallExpr(*this, E, ReturnValue);
  445. }
  446. // HLSL Change Begins
  447. RValue CodeGenFunction::EmitHLSLBuiltinCallExpr(const FunctionDecl *FD,
  448. const CallExpr *E,
  449. ReturnValueSlot ReturnValue) {
  450. return CGM.getHLSLRuntime().EmitHLSLBuiltinCallExpr(*this, FD, E,
  451. ReturnValue);
  452. }
  453. // HLSL Change Ends
  454. static void EmitNullBaseClassInitialization(CodeGenFunction &CGF,
  455. llvm::Value *DestPtr,
  456. const CXXRecordDecl *Base) {
  457. if (Base->isEmpty())
  458. return;
  459. DestPtr = CGF.EmitCastToVoidPtr(DestPtr);
  460. const ASTRecordLayout &Layout = CGF.getContext().getASTRecordLayout(Base);
  461. CharUnits Size = Layout.getNonVirtualSize();
  462. CharUnits Align = Layout.getNonVirtualAlignment();
  463. llvm::Value *SizeVal = CGF.CGM.getSize(Size);
  464. // If the type contains a pointer to data member we can't memset it to zero.
  465. // Instead, create a null constant and copy it to the destination.
  466. // TODO: there are other patterns besides zero that we can usefully memset,
  467. // like -1, which happens to be the pattern used by member-pointers.
  468. // TODO: isZeroInitializable can be over-conservative in the case where a
  469. // virtual base contains a member pointer.
  470. if (!CGF.CGM.getTypes().isZeroInitializable(Base)) {
  471. llvm::Constant *NullConstant = CGF.CGM.EmitNullConstantForBase(Base);
  472. llvm::GlobalVariable *NullVariable =
  473. new llvm::GlobalVariable(CGF.CGM.getModule(), NullConstant->getType(),
  474. /*isConstant=*/true,
  475. llvm::GlobalVariable::PrivateLinkage,
  476. NullConstant, Twine());
  477. NullVariable->setAlignment(Align.getQuantity());
  478. llvm::Value *SrcPtr = CGF.EmitCastToVoidPtr(NullVariable);
  479. // Get and call the appropriate llvm.memcpy overload.
  480. CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, SizeVal, Align.getQuantity());
  481. return;
  482. }
  483. // Otherwise, just memset the whole thing to zero. This is legal
  484. // because in LLVM, all default initializers (other than the ones we just
  485. // handled above) are guaranteed to have a bit pattern of all zeros.
  486. CGF.Builder.CreateMemSet(DestPtr, CGF.Builder.getInt8(0), SizeVal,
  487. Align.getQuantity());
  488. }
  489. void
  490. CodeGenFunction::EmitCXXConstructExpr(const CXXConstructExpr *E,
  491. AggValueSlot Dest) {
  492. assert(!Dest.isIgnored() && "Must have a destination!");
  493. const CXXConstructorDecl *CD = E->getConstructor();
  494. // If we require zero initialization before (or instead of) calling the
  495. // constructor, as can be the case with a non-user-provided default
  496. // constructor, emit the zero initialization now, unless destination is
  497. // already zeroed.
  498. if (E->requiresZeroInitialization() && !Dest.isZeroed()) {
  499. switch (E->getConstructionKind()) {
  500. case CXXConstructExpr::CK_Delegating:
  501. case CXXConstructExpr::CK_Complete:
  502. EmitNullInitialization(Dest.getAddr(), E->getType());
  503. break;
  504. case CXXConstructExpr::CK_VirtualBase:
  505. case CXXConstructExpr::CK_NonVirtualBase:
  506. EmitNullBaseClassInitialization(*this, Dest.getAddr(), CD->getParent());
  507. break;
  508. }
  509. }
  510. // If this is a call to a trivial default constructor, do nothing.
  511. if (CD->isTrivial() && CD->isDefaultConstructor())
  512. return;
  513. // Elide the constructor if we're constructing from a temporary.
  514. // The temporary check is required because Sema sets this on NRVO
  515. // returns.
  516. if (getLangOpts().ElideConstructors && E->isElidable()) {
  517. assert(getContext().hasSameUnqualifiedType(E->getType(),
  518. E->getArg(0)->getType()));
  519. if (E->getArg(0)->isTemporaryObject(getContext(), CD->getParent())) {
  520. EmitAggExpr(E->getArg(0), Dest);
  521. return;
  522. }
  523. }
  524. if (const ConstantArrayType *arrayType
  525. = getContext().getAsConstantArrayType(E->getType())) {
  526. EmitCXXAggrConstructorCall(CD, arrayType, Dest.getAddr(), E);
  527. } else {
  528. CXXCtorType Type = Ctor_Complete;
  529. bool ForVirtualBase = false;
  530. bool Delegating = false;
  531. switch (E->getConstructionKind()) {
  532. case CXXConstructExpr::CK_Delegating:
  533. // We should be emitting a constructor; GlobalDecl will assert this
  534. Type = CurGD.getCtorType();
  535. Delegating = true;
  536. break;
  537. case CXXConstructExpr::CK_Complete:
  538. Type = Ctor_Complete;
  539. break;
  540. case CXXConstructExpr::CK_VirtualBase:
  541. ForVirtualBase = true;
  542. // fall-through
  543. case CXXConstructExpr::CK_NonVirtualBase:
  544. Type = Ctor_Base;
  545. }
  546. // Call the constructor.
  547. EmitCXXConstructorCall(CD, Type, ForVirtualBase, Delegating, Dest.getAddr(),
  548. E);
  549. }
  550. }
  551. void
  552. CodeGenFunction::EmitSynthesizedCXXCopyCtor(llvm::Value *Dest,
  553. llvm::Value *Src,
  554. const Expr *Exp) {
  555. if (const ExprWithCleanups *E = dyn_cast<ExprWithCleanups>(Exp))
  556. Exp = E->getSubExpr();
  557. assert(isa<CXXConstructExpr>(Exp) &&
  558. "EmitSynthesizedCXXCopyCtor - unknown copy ctor expr");
  559. const CXXConstructExpr* E = cast<CXXConstructExpr>(Exp);
  560. const CXXConstructorDecl *CD = E->getConstructor();
  561. RunCleanupsScope Scope(*this);
  562. // If we require zero initialization before (or instead of) calling the
  563. // constructor, as can be the case with a non-user-provided default
  564. // constructor, emit the zero initialization now.
  565. // FIXME. Do I still need this for a copy ctor synthesis?
  566. if (E->requiresZeroInitialization())
  567. EmitNullInitialization(Dest, E->getType());
  568. assert(!getContext().getAsConstantArrayType(E->getType())
  569. && "EmitSynthesizedCXXCopyCtor - Copied-in Array");
  570. EmitSynthesizedCXXCopyCtorCall(CD, Dest, Src, E);
  571. }
  572. static CharUnits CalculateCookiePadding(CodeGenFunction &CGF,
  573. const CXXNewExpr *E) {
  574. if (!E->isArray())
  575. return CharUnits::Zero();
  576. // No cookie is required if the operator new[] being used is the
  577. // reserved placement operator new[].
  578. if (E->getOperatorNew()->isReservedGlobalPlacementOperator())
  579. return CharUnits::Zero();
  580. return CGF.CGM.getCXXABI().GetArrayCookieSize(E);
  581. }
  582. static llvm::Value *EmitCXXNewAllocSize(CodeGenFunction &CGF,
  583. const CXXNewExpr *e,
  584. unsigned minElements,
  585. llvm::Value *&numElements,
  586. llvm::Value *&sizeWithoutCookie) {
  587. QualType type = e->getAllocatedType();
  588. if (!e->isArray()) {
  589. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  590. sizeWithoutCookie
  591. = llvm::ConstantInt::get(CGF.SizeTy, typeSize.getQuantity());
  592. return sizeWithoutCookie;
  593. }
  594. // The width of size_t.
  595. unsigned sizeWidth = CGF.SizeTy->getBitWidth();
  596. // Figure out the cookie size.
  597. llvm::APInt cookieSize(sizeWidth,
  598. CalculateCookiePadding(CGF, e).getQuantity());
  599. // Emit the array size expression.
  600. // We multiply the size of all dimensions for NumElements.
  601. // e.g for 'int[2][3]', ElemType is 'int' and NumElements is 6.
  602. numElements = CGF.EmitScalarExpr(e->getArraySize());
  603. assert(isa<llvm::IntegerType>(numElements->getType()));
  604. // The number of elements can be have an arbitrary integer type;
  605. // essentially, we need to multiply it by a constant factor, add a
  606. // cookie size, and verify that the result is representable as a
  607. // size_t. That's just a gloss, though, and it's wrong in one
  608. // important way: if the count is negative, it's an error even if
  609. // the cookie size would bring the total size >= 0.
  610. bool isSigned
  611. = e->getArraySize()->getType()->isSignedIntegerOrEnumerationType();
  612. llvm::IntegerType *numElementsType
  613. = cast<llvm::IntegerType>(numElements->getType());
  614. unsigned numElementsWidth = numElementsType->getBitWidth();
  615. // Compute the constant factor.
  616. llvm::APInt arraySizeMultiplier(sizeWidth, 1);
  617. while (const ConstantArrayType *CAT
  618. = CGF.getContext().getAsConstantArrayType(type)) {
  619. type = CAT->getElementType();
  620. arraySizeMultiplier *= CAT->getSize();
  621. }
  622. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  623. llvm::APInt typeSizeMultiplier(sizeWidth, typeSize.getQuantity());
  624. typeSizeMultiplier *= arraySizeMultiplier;
  625. // This will be a size_t.
  626. llvm::Value *size;
  627. // If someone is doing 'new int[42]' there is no need to do a dynamic check.
  628. // Don't bloat the -O0 code.
  629. if (llvm::ConstantInt *numElementsC =
  630. dyn_cast<llvm::ConstantInt>(numElements)) {
  631. const llvm::APInt &count = numElementsC->getValue();
  632. bool hasAnyOverflow = false;
  633. // If 'count' was a negative number, it's an overflow.
  634. if (isSigned && count.isNegative())
  635. hasAnyOverflow = true;
  636. // We want to do all this arithmetic in size_t. If numElements is
  637. // wider than that, check whether it's already too big, and if so,
  638. // overflow.
  639. else if (numElementsWidth > sizeWidth &&
  640. numElementsWidth - sizeWidth > count.countLeadingZeros())
  641. hasAnyOverflow = true;
  642. // Okay, compute a count at the right width.
  643. llvm::APInt adjustedCount = count.zextOrTrunc(sizeWidth);
  644. // If there is a brace-initializer, we cannot allocate fewer elements than
  645. // there are initializers. If we do, that's treated like an overflow.
  646. if (adjustedCount.ult(minElements))
  647. hasAnyOverflow = true;
  648. // Scale numElements by that. This might overflow, but we don't
  649. // care because it only overflows if allocationSize does, too, and
  650. // if that overflows then we shouldn't use this.
  651. numElements = llvm::ConstantInt::get(CGF.SizeTy,
  652. adjustedCount * arraySizeMultiplier);
  653. // Compute the size before cookie, and track whether it overflowed.
  654. bool overflow;
  655. llvm::APInt allocationSize
  656. = adjustedCount.umul_ov(typeSizeMultiplier, overflow);
  657. hasAnyOverflow |= overflow;
  658. // Add in the cookie, and check whether it's overflowed.
  659. if (cookieSize != 0) {
  660. // Save the current size without a cookie. This shouldn't be
  661. // used if there was overflow.
  662. sizeWithoutCookie = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  663. allocationSize = allocationSize.uadd_ov(cookieSize, overflow);
  664. hasAnyOverflow |= overflow;
  665. }
  666. // On overflow, produce a -1 so operator new will fail.
  667. if (hasAnyOverflow) {
  668. size = llvm::Constant::getAllOnesValue(CGF.SizeTy);
  669. } else {
  670. size = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  671. }
  672. // Otherwise, we might need to use the overflow intrinsics.
  673. } else {
  674. // There are up to five conditions we need to test for:
  675. // 1) if isSigned, we need to check whether numElements is negative;
  676. // 2) if numElementsWidth > sizeWidth, we need to check whether
  677. // numElements is larger than something representable in size_t;
  678. // 3) if minElements > 0, we need to check whether numElements is smaller
  679. // than that.
  680. // 4) we need to compute
  681. // sizeWithoutCookie := numElements * typeSizeMultiplier
  682. // and check whether it overflows; and
  683. // 5) if we need a cookie, we need to compute
  684. // size := sizeWithoutCookie + cookieSize
  685. // and check whether it overflows.
  686. llvm::Value *hasOverflow = nullptr;
  687. // If numElementsWidth > sizeWidth, then one way or another, we're
  688. // going to have to do a comparison for (2), and this happens to
  689. // take care of (1), too.
  690. if (numElementsWidth > sizeWidth) {
  691. llvm::APInt threshold(numElementsWidth, 1);
  692. threshold <<= sizeWidth;
  693. llvm::Value *thresholdV
  694. = llvm::ConstantInt::get(numElementsType, threshold);
  695. hasOverflow = CGF.Builder.CreateICmpUGE(numElements, thresholdV);
  696. numElements = CGF.Builder.CreateTrunc(numElements, CGF.SizeTy);
  697. // Otherwise, if we're signed, we want to sext up to size_t.
  698. } else if (isSigned) {
  699. if (numElementsWidth < sizeWidth)
  700. numElements = CGF.Builder.CreateSExt(numElements, CGF.SizeTy);
  701. // If there's a non-1 type size multiplier, then we can do the
  702. // signedness check at the same time as we do the multiply
  703. // because a negative number times anything will cause an
  704. // unsigned overflow. Otherwise, we have to do it here. But at least
  705. // in this case, we can subsume the >= minElements check.
  706. if (typeSizeMultiplier == 1)
  707. hasOverflow = CGF.Builder.CreateICmpSLT(numElements,
  708. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  709. // Otherwise, zext up to size_t if necessary.
  710. } else if (numElementsWidth < sizeWidth) {
  711. numElements = CGF.Builder.CreateZExt(numElements, CGF.SizeTy);
  712. }
  713. assert(numElements->getType() == CGF.SizeTy);
  714. if (minElements) {
  715. // Don't allow allocation of fewer elements than we have initializers.
  716. if (!hasOverflow) {
  717. hasOverflow = CGF.Builder.CreateICmpULT(numElements,
  718. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  719. } else if (numElementsWidth > sizeWidth) {
  720. // The other existing overflow subsumes this check.
  721. // We do an unsigned comparison, since any signed value < -1 is
  722. // taken care of either above or below.
  723. hasOverflow = CGF.Builder.CreateOr(hasOverflow,
  724. CGF.Builder.CreateICmpULT(numElements,
  725. llvm::ConstantInt::get(CGF.SizeTy, minElements)));
  726. }
  727. }
  728. size = numElements;
  729. // Multiply by the type size if necessary. This multiplier
  730. // includes all the factors for nested arrays.
  731. //
  732. // This step also causes numElements to be scaled up by the
  733. // nested-array factor if necessary. Overflow on this computation
  734. // can be ignored because the result shouldn't be used if
  735. // allocation fails.
  736. if (typeSizeMultiplier != 1) {
  737. llvm::Value *umul_with_overflow
  738. = CGF.CGM.getIntrinsic(llvm::Intrinsic::umul_with_overflow, CGF.SizeTy);
  739. llvm::Value *tsmV =
  740. llvm::ConstantInt::get(CGF.SizeTy, typeSizeMultiplier);
  741. llvm::Value *result =
  742. CGF.Builder.CreateCall(umul_with_overflow, {size, tsmV});
  743. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  744. if (hasOverflow)
  745. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  746. else
  747. hasOverflow = overflowed;
  748. size = CGF.Builder.CreateExtractValue(result, 0);
  749. // Also scale up numElements by the array size multiplier.
  750. if (arraySizeMultiplier != 1) {
  751. // If the base element type size is 1, then we can re-use the
  752. // multiply we just did.
  753. if (typeSize.isOne()) {
  754. assert(arraySizeMultiplier == typeSizeMultiplier);
  755. numElements = size;
  756. // Otherwise we need a separate multiply.
  757. } else {
  758. llvm::Value *asmV =
  759. llvm::ConstantInt::get(CGF.SizeTy, arraySizeMultiplier);
  760. numElements = CGF.Builder.CreateMul(numElements, asmV);
  761. }
  762. }
  763. } else {
  764. // numElements doesn't need to be scaled.
  765. assert(arraySizeMultiplier == 1);
  766. }
  767. // Add in the cookie size if necessary.
  768. if (cookieSize != 0) {
  769. sizeWithoutCookie = size;
  770. llvm::Value *uadd_with_overflow
  771. = CGF.CGM.getIntrinsic(llvm::Intrinsic::uadd_with_overflow, CGF.SizeTy);
  772. llvm::Value *cookieSizeV = llvm::ConstantInt::get(CGF.SizeTy, cookieSize);
  773. llvm::Value *result =
  774. CGF.Builder.CreateCall(uadd_with_overflow, {size, cookieSizeV});
  775. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  776. if (hasOverflow)
  777. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  778. else
  779. hasOverflow = overflowed;
  780. size = CGF.Builder.CreateExtractValue(result, 0);
  781. }
  782. // If we had any possibility of dynamic overflow, make a select to
  783. // overwrite 'size' with an all-ones value, which should cause
  784. // operator new to throw.
  785. if (hasOverflow)
  786. size = CGF.Builder.CreateSelect(hasOverflow,
  787. llvm::Constant::getAllOnesValue(CGF.SizeTy),
  788. size);
  789. }
  790. if (cookieSize == 0)
  791. sizeWithoutCookie = size;
  792. else
  793. assert(sizeWithoutCookie && "didn't set sizeWithoutCookie?");
  794. return size;
  795. }
  796. static void StoreAnyExprIntoOneUnit(CodeGenFunction &CGF, const Expr *Init,
  797. QualType AllocType, llvm::Value *NewPtr) {
  798. // FIXME: Refactor with EmitExprAsInit.
  799. CharUnits Alignment = CGF.getContext().getTypeAlignInChars(AllocType);
  800. switch (CGF.getEvaluationKind(AllocType)) {
  801. case TEK_Scalar:
  802. CGF.EmitScalarInit(Init, nullptr,
  803. CGF.MakeAddrLValue(NewPtr, AllocType, Alignment), false);
  804. return;
  805. case TEK_Complex:
  806. CGF.EmitComplexExprIntoLValue(Init, CGF.MakeAddrLValue(NewPtr, AllocType,
  807. Alignment),
  808. /*isInit*/ true);
  809. return;
  810. case TEK_Aggregate: {
  811. AggValueSlot Slot
  812. = AggValueSlot::forAddr(NewPtr, Alignment, AllocType.getQualifiers(),
  813. AggValueSlot::IsDestructed,
  814. AggValueSlot::DoesNotNeedGCBarriers,
  815. AggValueSlot::IsNotAliased);
  816. CGF.EmitAggExpr(Init, Slot);
  817. return;
  818. }
  819. }
  820. llvm_unreachable("bad evaluation kind");
  821. }
  822. void CodeGenFunction::EmitNewArrayInitializer(
  823. const CXXNewExpr *E, QualType ElementType, llvm::Type *ElementTy,
  824. llvm::Value *BeginPtr, llvm::Value *NumElements,
  825. llvm::Value *AllocSizeWithoutCookie) {
  826. // If we have a type with trivial initialization and no initializer,
  827. // there's nothing to do.
  828. if (!E->hasInitializer())
  829. return;
  830. llvm::Value *CurPtr = BeginPtr;
  831. unsigned InitListElements = 0;
  832. const Expr *Init = E->getInitializer();
  833. llvm::AllocaInst *EndOfInit = nullptr;
  834. QualType::DestructionKind DtorKind = ElementType.isDestructedType();
  835. EHScopeStack::stable_iterator Cleanup;
  836. llvm::Instruction *CleanupDominator = nullptr;
  837. // If the initializer is an initializer list, first do the explicit elements.
  838. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(Init)) {
  839. InitListElements = ILE->getNumInits();
  840. // If this is a multi-dimensional array new, we will initialize multiple
  841. // elements with each init list element.
  842. QualType AllocType = E->getAllocatedType();
  843. if (const ConstantArrayType *CAT = dyn_cast_or_null<ConstantArrayType>(
  844. AllocType->getAsArrayTypeUnsafe())) {
  845. unsigned AS = CurPtr->getType()->getPointerAddressSpace();
  846. ElementTy = ConvertTypeForMem(AllocType);
  847. llvm::Type *AllocPtrTy = ElementTy->getPointerTo(AS);
  848. CurPtr = Builder.CreateBitCast(CurPtr, AllocPtrTy);
  849. InitListElements *= getContext().getConstantArrayElementCount(CAT);
  850. }
  851. // Enter a partial-destruction Cleanup if necessary.
  852. if (needsEHCleanup(DtorKind)) {
  853. // In principle we could tell the Cleanup where we are more
  854. // directly, but the control flow can get so varied here that it
  855. // would actually be quite complex. Therefore we go through an
  856. // alloca.
  857. EndOfInit = CreateTempAlloca(BeginPtr->getType(), "array.init.end");
  858. CleanupDominator = Builder.CreateStore(BeginPtr, EndOfInit);
  859. pushIrregularPartialArrayCleanup(BeginPtr, EndOfInit, ElementType,
  860. getDestroyer(DtorKind));
  861. Cleanup = EHStack.stable_begin();
  862. }
  863. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i) {
  864. // Tell the cleanup that it needs to destroy up to this
  865. // element. TODO: some of these stores can be trivially
  866. // observed to be unnecessary.
  867. if (EndOfInit)
  868. Builder.CreateStore(Builder.CreateBitCast(CurPtr, BeginPtr->getType()),
  869. EndOfInit);
  870. // FIXME: If the last initializer is an incomplete initializer list for
  871. // an array, and we have an array filler, we can fold together the two
  872. // initialization loops.
  873. StoreAnyExprIntoOneUnit(*this, ILE->getInit(i),
  874. ILE->getInit(i)->getType(), CurPtr);
  875. CurPtr = Builder.CreateConstInBoundsGEP1_32(ElementTy, CurPtr, 1,
  876. "array.exp.next");
  877. }
  878. // The remaining elements are filled with the array filler expression.
  879. Init = ILE->getArrayFiller();
  880. // Extract the initializer for the individual array elements by pulling
  881. // out the array filler from all the nested initializer lists. This avoids
  882. // generating a nested loop for the initialization.
  883. while (Init && Init->getType()->isConstantArrayType()) {
  884. auto *SubILE = dyn_cast<InitListExpr>(Init);
  885. if (!SubILE)
  886. break;
  887. assert(SubILE->getNumInits() == 0 && "explicit inits in array filler?");
  888. Init = SubILE->getArrayFiller();
  889. }
  890. // Switch back to initializing one base element at a time.
  891. CurPtr = Builder.CreateBitCast(CurPtr, BeginPtr->getType());
  892. }
  893. // Attempt to perform zero-initialization using memset.
  894. auto TryMemsetInitialization = [&]() -> bool {
  895. // FIXME: If the type is a pointer-to-data-member under the Itanium ABI,
  896. // we can initialize with a memset to -1.
  897. if (!CGM.getTypes().isZeroInitializable(ElementType))
  898. return false;
  899. // Optimization: since zero initialization will just set the memory
  900. // to all zeroes, generate a single memset to do it in one shot.
  901. // Subtract out the size of any elements we've already initialized.
  902. auto *RemainingSize = AllocSizeWithoutCookie;
  903. if (InitListElements) {
  904. // We know this can't overflow; we check this when doing the allocation.
  905. auto *InitializedSize = llvm::ConstantInt::get(
  906. RemainingSize->getType(),
  907. getContext().getTypeSizeInChars(ElementType).getQuantity() *
  908. InitListElements);
  909. RemainingSize = Builder.CreateSub(RemainingSize, InitializedSize);
  910. }
  911. // Create the memset.
  912. CharUnits Alignment = getContext().getTypeAlignInChars(ElementType);
  913. Builder.CreateMemSet(CurPtr, Builder.getInt8(0), RemainingSize,
  914. Alignment.getQuantity(), false);
  915. return true;
  916. };
  917. // If all elements have already been initialized, skip any further
  918. // initialization.
  919. llvm::ConstantInt *ConstNum = dyn_cast<llvm::ConstantInt>(NumElements);
  920. if (ConstNum && ConstNum->getZExtValue() <= InitListElements) {
  921. // If there was a Cleanup, deactivate it.
  922. if (CleanupDominator)
  923. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  924. return;
  925. }
  926. assert(Init && "have trailing elements to initialize but no initializer");
  927. // If this is a constructor call, try to optimize it out, and failing that
  928. // emit a single loop to initialize all remaining elements.
  929. if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
  930. CXXConstructorDecl *Ctor = CCE->getConstructor();
  931. if (Ctor->isTrivial()) {
  932. // If new expression did not specify value-initialization, then there
  933. // is no initialization.
  934. if (!CCE->requiresZeroInitialization() || Ctor->getParent()->isEmpty())
  935. return;
  936. if (TryMemsetInitialization())
  937. return;
  938. }
  939. // Store the new Cleanup position for irregular Cleanups.
  940. //
  941. // FIXME: Share this cleanup with the constructor call emission rather than
  942. // having it create a cleanup of its own.
  943. if (EndOfInit) Builder.CreateStore(CurPtr, EndOfInit);
  944. // Emit a constructor call loop to initialize the remaining elements.
  945. if (InitListElements)
  946. NumElements = Builder.CreateSub(
  947. NumElements,
  948. llvm::ConstantInt::get(NumElements->getType(), InitListElements));
  949. EmitCXXAggrConstructorCall(Ctor, NumElements, CurPtr, CCE,
  950. CCE->requiresZeroInitialization());
  951. return;
  952. }
  953. // If this is value-initialization, we can usually use memset.
  954. ImplicitValueInitExpr IVIE(ElementType);
  955. if (isa<ImplicitValueInitExpr>(Init)) {
  956. if (TryMemsetInitialization())
  957. return;
  958. // Switch to an ImplicitValueInitExpr for the element type. This handles
  959. // only one case: multidimensional array new of pointers to members. In
  960. // all other cases, we already have an initializer for the array element.
  961. Init = &IVIE;
  962. }
  963. // At this point we should have found an initializer for the individual
  964. // elements of the array.
  965. assert(getContext().hasSameUnqualifiedType(ElementType, Init->getType()) &&
  966. "got wrong type of element to initialize");
  967. // If we have an empty initializer list, we can usually use memset.
  968. if (auto *ILE = dyn_cast<InitListExpr>(Init))
  969. if (ILE->getNumInits() == 0 && TryMemsetInitialization())
  970. return;
  971. // If we have a struct whose every field is value-initialized, we can
  972. // usually use memset.
  973. if (auto *ILE = dyn_cast<InitListExpr>(Init)) {
  974. if (const RecordType *RType = ILE->getType()->getAs<RecordType>()) {
  975. if (RType->getDecl()->isStruct()) {
  976. unsigned NumFields = 0;
  977. for (auto *Field : RType->getDecl()->fields())
  978. if (!Field->isUnnamedBitfield())
  979. ++NumFields;
  980. if (ILE->getNumInits() == NumFields)
  981. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i)
  982. if (!isa<ImplicitValueInitExpr>(ILE->getInit(i)))
  983. --NumFields;
  984. if (ILE->getNumInits() == NumFields && TryMemsetInitialization())
  985. return;
  986. }
  987. }
  988. }
  989. // Create the loop blocks.
  990. llvm::BasicBlock *EntryBB = Builder.GetInsertBlock();
  991. llvm::BasicBlock *LoopBB = createBasicBlock("new.loop");
  992. llvm::BasicBlock *ContBB = createBasicBlock("new.loop.end");
  993. // Find the end of the array, hoisted out of the loop.
  994. llvm::Value *EndPtr =
  995. Builder.CreateInBoundsGEP(BeginPtr, NumElements, "array.end");
  996. // If the number of elements isn't constant, we have to now check if there is
  997. // anything left to initialize.
  998. if (!ConstNum) {
  999. llvm::Value *IsEmpty = Builder.CreateICmpEQ(CurPtr, EndPtr,
  1000. "array.isempty");
  1001. Builder.CreateCondBr(IsEmpty, ContBB, LoopBB);
  1002. }
  1003. // Enter the loop.
  1004. EmitBlock(LoopBB);
  1005. // Set up the current-element phi.
  1006. llvm::PHINode *CurPtrPhi =
  1007. Builder.CreatePHI(CurPtr->getType(), 2, "array.cur");
  1008. CurPtrPhi->addIncoming(CurPtr, EntryBB);
  1009. CurPtr = CurPtrPhi;
  1010. // Store the new Cleanup position for irregular Cleanups.
  1011. if (EndOfInit) Builder.CreateStore(CurPtr, EndOfInit);
  1012. // Enter a partial-destruction Cleanup if necessary.
  1013. if (!CleanupDominator && needsEHCleanup(DtorKind)) {
  1014. pushRegularPartialArrayCleanup(BeginPtr, CurPtr, ElementType,
  1015. getDestroyer(DtorKind));
  1016. Cleanup = EHStack.stable_begin();
  1017. CleanupDominator = Builder.CreateUnreachable();
  1018. }
  1019. // Emit the initializer into this element.
  1020. StoreAnyExprIntoOneUnit(*this, Init, Init->getType(), CurPtr);
  1021. // Leave the Cleanup if we entered one.
  1022. if (CleanupDominator) {
  1023. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  1024. CleanupDominator->eraseFromParent();
  1025. }
  1026. // Advance to the next element by adjusting the pointer type as necessary.
  1027. llvm::Value *NextPtr =
  1028. Builder.CreateConstInBoundsGEP1_32(ElementTy, CurPtr, 1, "array.next");
  1029. // Check whether we've gotten to the end of the array and, if so,
  1030. // exit the loop.
  1031. llvm::Value *IsEnd = Builder.CreateICmpEQ(NextPtr, EndPtr, "array.atend");
  1032. Builder.CreateCondBr(IsEnd, ContBB, LoopBB);
  1033. CurPtrPhi->addIncoming(NextPtr, Builder.GetInsertBlock());
  1034. EmitBlock(ContBB);
  1035. }
  1036. static void EmitNewInitializer(CodeGenFunction &CGF, const CXXNewExpr *E,
  1037. QualType ElementType, llvm::Type *ElementTy,
  1038. llvm::Value *NewPtr, llvm::Value *NumElements,
  1039. llvm::Value *AllocSizeWithoutCookie) {
  1040. ApplyDebugLocation DL(CGF, E);
  1041. if (E->isArray())
  1042. CGF.EmitNewArrayInitializer(E, ElementType, ElementTy, NewPtr, NumElements,
  1043. AllocSizeWithoutCookie);
  1044. else if (const Expr *Init = E->getInitializer())
  1045. StoreAnyExprIntoOneUnit(CGF, Init, E->getAllocatedType(), NewPtr);
  1046. }
  1047. /// Emit a call to an operator new or operator delete function, as implicitly
  1048. /// created by new-expressions and delete-expressions.
  1049. static RValue EmitNewDeleteCall(CodeGenFunction &CGF,
  1050. const FunctionDecl *Callee,
  1051. const FunctionProtoType *CalleeType,
  1052. const CallArgList &Args) {
  1053. llvm::Instruction *CallOrInvoke;
  1054. llvm::Value *CalleeAddr = CGF.CGM.GetAddrOfFunction(Callee);
  1055. RValue RV =
  1056. CGF.EmitCall(CGF.CGM.getTypes().arrangeFreeFunctionCall(
  1057. Args, CalleeType, /*chainCall=*/false),
  1058. CalleeAddr, ReturnValueSlot(), Args, Callee, &CallOrInvoke);
  1059. /// C++1y [expr.new]p10:
  1060. /// [In a new-expression,] an implementation is allowed to omit a call
  1061. /// to a replaceable global allocation function.
  1062. ///
  1063. /// We model such elidable calls with the 'builtin' attribute.
  1064. llvm::Function *Fn = dyn_cast<llvm::Function>(CalleeAddr);
  1065. if (Callee->isReplaceableGlobalAllocationFunction() &&
  1066. Fn && Fn->hasFnAttribute(llvm::Attribute::NoBuiltin)) {
  1067. // FIXME: Add addAttribute to CallSite.
  1068. if (llvm::CallInst *CI = dyn_cast<llvm::CallInst>(CallOrInvoke))
  1069. CI->addAttribute(llvm::AttributeSet::FunctionIndex,
  1070. llvm::Attribute::Builtin);
  1071. else if (llvm::InvokeInst *II = dyn_cast<llvm::InvokeInst>(CallOrInvoke))
  1072. II->addAttribute(llvm::AttributeSet::FunctionIndex,
  1073. llvm::Attribute::Builtin);
  1074. else
  1075. llvm_unreachable("unexpected kind of call instruction");
  1076. }
  1077. return RV;
  1078. }
  1079. RValue CodeGenFunction::EmitBuiltinNewDeleteCall(const FunctionProtoType *Type,
  1080. const Expr *Arg,
  1081. bool IsDelete) {
  1082. CallArgList Args;
  1083. const Stmt *ArgS = Arg;
  1084. EmitCallArgs(Args, *Type->param_type_begin(),
  1085. ConstExprIterator(&ArgS), ConstExprIterator(&ArgS + 1));
  1086. // Find the allocation or deallocation function that we're calling.
  1087. ASTContext &Ctx = getContext();
  1088. DeclarationName Name = Ctx.DeclarationNames
  1089. .getCXXOperatorName(IsDelete ? OO_Delete : OO_New);
  1090. for (auto *Decl : Ctx.getTranslationUnitDecl()->lookup(Name))
  1091. if (auto *FD = dyn_cast<FunctionDecl>(Decl))
  1092. if (Ctx.hasSameType(FD->getType(), QualType(Type, 0)))
  1093. return EmitNewDeleteCall(*this, cast<FunctionDecl>(Decl), Type, Args);
  1094. llvm_unreachable("predeclared global operator new/delete is missing");
  1095. }
  1096. namespace {
  1097. /// A cleanup to call the given 'operator delete' function upon
  1098. /// abnormal exit from a new expression.
  1099. class CallDeleteDuringNew : public EHScopeStack::Cleanup {
  1100. size_t NumPlacementArgs;
  1101. const FunctionDecl *OperatorDelete;
  1102. llvm::Value *Ptr;
  1103. llvm::Value *AllocSize;
  1104. RValue *getPlacementArgs() { return reinterpret_cast<RValue*>(this+1); }
  1105. public:
  1106. static size_t getExtraSize(size_t NumPlacementArgs) {
  1107. return NumPlacementArgs * sizeof(RValue);
  1108. }
  1109. CallDeleteDuringNew(size_t NumPlacementArgs,
  1110. const FunctionDecl *OperatorDelete,
  1111. llvm::Value *Ptr,
  1112. llvm::Value *AllocSize)
  1113. : NumPlacementArgs(NumPlacementArgs), OperatorDelete(OperatorDelete),
  1114. Ptr(Ptr), AllocSize(AllocSize) {}
  1115. void setPlacementArg(unsigned I, RValue Arg) {
  1116. assert(I < NumPlacementArgs && "index out of range");
  1117. getPlacementArgs()[I] = Arg;
  1118. }
  1119. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1120. const FunctionProtoType *FPT
  1121. = OperatorDelete->getType()->getAs<FunctionProtoType>();
  1122. assert(FPT->getNumParams() == NumPlacementArgs + 1 ||
  1123. (FPT->getNumParams() == 2 && NumPlacementArgs == 0));
  1124. CallArgList DeleteArgs;
  1125. // The first argument is always a void*.
  1126. FunctionProtoType::param_type_iterator AI = FPT->param_type_begin();
  1127. DeleteArgs.add(RValue::get(Ptr), *AI++);
  1128. // A member 'operator delete' can take an extra 'size_t' argument.
  1129. if (FPT->getNumParams() == NumPlacementArgs + 2)
  1130. DeleteArgs.add(RValue::get(AllocSize), *AI++);
  1131. // Pass the rest of the arguments, which must match exactly.
  1132. for (unsigned I = 0; I != NumPlacementArgs; ++I)
  1133. DeleteArgs.add(getPlacementArgs()[I], *AI++);
  1134. // Call 'operator delete'.
  1135. EmitNewDeleteCall(CGF, OperatorDelete, FPT, DeleteArgs);
  1136. }
  1137. };
  1138. /// A cleanup to call the given 'operator delete' function upon
  1139. /// abnormal exit from a new expression when the new expression is
  1140. /// conditional.
  1141. class CallDeleteDuringConditionalNew : public EHScopeStack::Cleanup {
  1142. size_t NumPlacementArgs;
  1143. const FunctionDecl *OperatorDelete;
  1144. DominatingValue<RValue>::saved_type Ptr;
  1145. DominatingValue<RValue>::saved_type AllocSize;
  1146. DominatingValue<RValue>::saved_type *getPlacementArgs() {
  1147. return reinterpret_cast<DominatingValue<RValue>::saved_type*>(this+1);
  1148. }
  1149. public:
  1150. static size_t getExtraSize(size_t NumPlacementArgs) {
  1151. return NumPlacementArgs * sizeof(DominatingValue<RValue>::saved_type);
  1152. }
  1153. CallDeleteDuringConditionalNew(size_t NumPlacementArgs,
  1154. const FunctionDecl *OperatorDelete,
  1155. DominatingValue<RValue>::saved_type Ptr,
  1156. DominatingValue<RValue>::saved_type AllocSize)
  1157. : NumPlacementArgs(NumPlacementArgs), OperatorDelete(OperatorDelete),
  1158. Ptr(Ptr), AllocSize(AllocSize) {}
  1159. void setPlacementArg(unsigned I, DominatingValue<RValue>::saved_type Arg) {
  1160. assert(I < NumPlacementArgs && "index out of range");
  1161. getPlacementArgs()[I] = Arg;
  1162. }
  1163. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1164. const FunctionProtoType *FPT
  1165. = OperatorDelete->getType()->getAs<FunctionProtoType>();
  1166. assert(FPT->getNumParams() == NumPlacementArgs + 1 ||
  1167. (FPT->getNumParams() == 2 && NumPlacementArgs == 0));
  1168. CallArgList DeleteArgs;
  1169. // The first argument is always a void*.
  1170. FunctionProtoType::param_type_iterator AI = FPT->param_type_begin();
  1171. DeleteArgs.add(Ptr.restore(CGF), *AI++);
  1172. // A member 'operator delete' can take an extra 'size_t' argument.
  1173. if (FPT->getNumParams() == NumPlacementArgs + 2) {
  1174. RValue RV = AllocSize.restore(CGF);
  1175. DeleteArgs.add(RV, *AI++);
  1176. }
  1177. // Pass the rest of the arguments, which must match exactly.
  1178. for (unsigned I = 0; I != NumPlacementArgs; ++I) {
  1179. RValue RV = getPlacementArgs()[I].restore(CGF);
  1180. DeleteArgs.add(RV, *AI++);
  1181. }
  1182. // Call 'operator delete'.
  1183. EmitNewDeleteCall(CGF, OperatorDelete, FPT, DeleteArgs);
  1184. }
  1185. };
  1186. }
  1187. /// Enter a cleanup to call 'operator delete' if the initializer in a
  1188. /// new-expression throws.
  1189. static void EnterNewDeleteCleanup(CodeGenFunction &CGF,
  1190. const CXXNewExpr *E,
  1191. llvm::Value *NewPtr,
  1192. llvm::Value *AllocSize,
  1193. const CallArgList &NewArgs) {
  1194. // If we're not inside a conditional branch, then the cleanup will
  1195. // dominate and we can do the easier (and more efficient) thing.
  1196. if (!CGF.isInConditionalBranch()) {
  1197. CallDeleteDuringNew *Cleanup = CGF.EHStack
  1198. .pushCleanupWithExtra<CallDeleteDuringNew>(EHCleanup,
  1199. E->getNumPlacementArgs(),
  1200. E->getOperatorDelete(),
  1201. NewPtr, AllocSize);
  1202. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I)
  1203. Cleanup->setPlacementArg(I, NewArgs[I+1].RV);
  1204. return;
  1205. }
  1206. // Otherwise, we need to save all this stuff.
  1207. DominatingValue<RValue>::saved_type SavedNewPtr =
  1208. DominatingValue<RValue>::save(CGF, RValue::get(NewPtr));
  1209. DominatingValue<RValue>::saved_type SavedAllocSize =
  1210. DominatingValue<RValue>::save(CGF, RValue::get(AllocSize));
  1211. CallDeleteDuringConditionalNew *Cleanup = CGF.EHStack
  1212. .pushCleanupWithExtra<CallDeleteDuringConditionalNew>(EHCleanup,
  1213. E->getNumPlacementArgs(),
  1214. E->getOperatorDelete(),
  1215. SavedNewPtr,
  1216. SavedAllocSize);
  1217. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I)
  1218. Cleanup->setPlacementArg(I,
  1219. DominatingValue<RValue>::save(CGF, NewArgs[I+1].RV));
  1220. CGF.initFullExprCleanup();
  1221. }
  1222. llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) {
  1223. // The element type being allocated.
  1224. QualType allocType = getContext().getBaseElementType(E->getAllocatedType());
  1225. // 1. Build a call to the allocation function.
  1226. FunctionDecl *allocator = E->getOperatorNew();
  1227. const FunctionProtoType *allocatorType =
  1228. allocator->getType()->castAs<FunctionProtoType>();
  1229. CallArgList allocatorArgs;
  1230. // The allocation size is the first argument.
  1231. QualType sizeType = getContext().getSizeType();
  1232. // If there is a brace-initializer, cannot allocate fewer elements than inits.
  1233. unsigned minElements = 0;
  1234. if (E->isArray() && E->hasInitializer()) {
  1235. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(E->getInitializer()))
  1236. minElements = ILE->getNumInits();
  1237. }
  1238. llvm::Value *numElements = nullptr;
  1239. llvm::Value *allocSizeWithoutCookie = nullptr;
  1240. llvm::Value *allocSize =
  1241. EmitCXXNewAllocSize(*this, E, minElements, numElements,
  1242. allocSizeWithoutCookie);
  1243. allocatorArgs.add(RValue::get(allocSize), sizeType);
  1244. // We start at 1 here because the first argument (the allocation size)
  1245. // has already been emitted.
  1246. EmitCallArgs(allocatorArgs, allocatorType, E->placement_arg_begin(),
  1247. E->placement_arg_end(), /* CalleeDecl */ nullptr,
  1248. /*ParamsToSkip*/ 1);
  1249. // Emit the allocation call. If the allocator is a global placement
  1250. // operator, just "inline" it directly.
  1251. RValue RV;
  1252. if (allocator->isReservedGlobalPlacementOperator()) {
  1253. assert(allocatorArgs.size() == 2);
  1254. RV = allocatorArgs[1].RV;
  1255. // TODO: kill any unnecessary computations done for the size
  1256. // argument.
  1257. } else {
  1258. RV = EmitNewDeleteCall(*this, allocator, allocatorType, allocatorArgs);
  1259. }
  1260. // Emit a null check on the allocation result if the allocation
  1261. // function is allowed to return null (because it has a non-throwing
  1262. // exception spec or is the reserved placement new) and we have an
  1263. // interesting initializer.
  1264. bool nullCheck = E->shouldNullCheckAllocation(getContext()) &&
  1265. (!allocType.isPODType(getContext()) || E->hasInitializer());
  1266. llvm::BasicBlock *nullCheckBB = nullptr;
  1267. llvm::BasicBlock *contBB = nullptr;
  1268. llvm::Value *allocation = RV.getScalarVal();
  1269. unsigned AS = allocation->getType()->getPointerAddressSpace();
  1270. // The null-check means that the initializer is conditionally
  1271. // evaluated.
  1272. ConditionalEvaluation conditional(*this);
  1273. if (nullCheck) {
  1274. conditional.begin(*this);
  1275. nullCheckBB = Builder.GetInsertBlock();
  1276. llvm::BasicBlock *notNullBB = createBasicBlock("new.notnull");
  1277. contBB = createBasicBlock("new.cont");
  1278. llvm::Value *isNull = Builder.CreateIsNull(allocation, "new.isnull");
  1279. Builder.CreateCondBr(isNull, contBB, notNullBB);
  1280. EmitBlock(notNullBB);
  1281. }
  1282. // If there's an operator delete, enter a cleanup to call it if an
  1283. // exception is thrown.
  1284. EHScopeStack::stable_iterator operatorDeleteCleanup;
  1285. llvm::Instruction *cleanupDominator = nullptr;
  1286. if (E->getOperatorDelete() &&
  1287. !E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
  1288. EnterNewDeleteCleanup(*this, E, allocation, allocSize, allocatorArgs);
  1289. operatorDeleteCleanup = EHStack.stable_begin();
  1290. cleanupDominator = Builder.CreateUnreachable();
  1291. }
  1292. assert((allocSize == allocSizeWithoutCookie) ==
  1293. CalculateCookiePadding(*this, E).isZero());
  1294. if (allocSize != allocSizeWithoutCookie) {
  1295. assert(E->isArray());
  1296. allocation = CGM.getCXXABI().InitializeArrayCookie(*this, allocation,
  1297. numElements,
  1298. E, allocType);
  1299. }
  1300. llvm::Type *elementTy = ConvertTypeForMem(allocType);
  1301. llvm::Type *elementPtrTy = elementTy->getPointerTo(AS);
  1302. llvm::Value *result = Builder.CreateBitCast(allocation, elementPtrTy);
  1303. EmitNewInitializer(*this, E, allocType, elementTy, result, numElements,
  1304. allocSizeWithoutCookie);
  1305. if (E->isArray()) {
  1306. // NewPtr is a pointer to the base element type. If we're
  1307. // allocating an array of arrays, we'll need to cast back to the
  1308. // array pointer type.
  1309. llvm::Type *resultType = ConvertTypeForMem(E->getType());
  1310. if (result->getType() != resultType)
  1311. result = Builder.CreateBitCast(result, resultType);
  1312. }
  1313. // Deactivate the 'operator delete' cleanup if we finished
  1314. // initialization.
  1315. if (operatorDeleteCleanup.isValid()) {
  1316. DeactivateCleanupBlock(operatorDeleteCleanup, cleanupDominator);
  1317. cleanupDominator->eraseFromParent();
  1318. }
  1319. if (nullCheck) {
  1320. conditional.end(*this);
  1321. llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
  1322. EmitBlock(contBB);
  1323. llvm::PHINode *PHI = Builder.CreatePHI(result->getType(), 2);
  1324. PHI->addIncoming(result, notNullBB);
  1325. PHI->addIncoming(llvm::Constant::getNullValue(result->getType()),
  1326. nullCheckBB);
  1327. result = PHI;
  1328. }
  1329. return result;
  1330. }
  1331. void CodeGenFunction::EmitDeleteCall(const FunctionDecl *DeleteFD,
  1332. llvm::Value *Ptr,
  1333. QualType DeleteTy) {
  1334. assert(DeleteFD->getOverloadedOperator() == OO_Delete);
  1335. const FunctionProtoType *DeleteFTy =
  1336. DeleteFD->getType()->getAs<FunctionProtoType>();
  1337. CallArgList DeleteArgs;
  1338. // Check if we need to pass the size to the delete operator.
  1339. llvm::Value *Size = nullptr;
  1340. QualType SizeTy;
  1341. if (DeleteFTy->getNumParams() == 2) {
  1342. SizeTy = DeleteFTy->getParamType(1);
  1343. CharUnits DeleteTypeSize = getContext().getTypeSizeInChars(DeleteTy);
  1344. Size = llvm::ConstantInt::get(ConvertType(SizeTy),
  1345. DeleteTypeSize.getQuantity());
  1346. }
  1347. QualType ArgTy = DeleteFTy->getParamType(0);
  1348. llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy));
  1349. DeleteArgs.add(RValue::get(DeletePtr), ArgTy);
  1350. if (Size)
  1351. DeleteArgs.add(RValue::get(Size), SizeTy);
  1352. // Emit the call to delete.
  1353. EmitNewDeleteCall(*this, DeleteFD, DeleteFTy, DeleteArgs);
  1354. }
  1355. namespace {
  1356. /// Calls the given 'operator delete' on a single object.
  1357. struct CallObjectDelete : EHScopeStack::Cleanup {
  1358. llvm::Value *Ptr;
  1359. const FunctionDecl *OperatorDelete;
  1360. QualType ElementType;
  1361. CallObjectDelete(llvm::Value *Ptr,
  1362. const FunctionDecl *OperatorDelete,
  1363. QualType ElementType)
  1364. : Ptr(Ptr), OperatorDelete(OperatorDelete), ElementType(ElementType) {}
  1365. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1366. CGF.EmitDeleteCall(OperatorDelete, Ptr, ElementType);
  1367. }
  1368. };
  1369. }
  1370. void
  1371. CodeGenFunction::pushCallObjectDeleteCleanup(const FunctionDecl *OperatorDelete,
  1372. llvm::Value *CompletePtr,
  1373. QualType ElementType) {
  1374. EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup, CompletePtr,
  1375. OperatorDelete, ElementType);
  1376. }
  1377. /// Emit the code for deleting a single object.
  1378. static void EmitObjectDelete(CodeGenFunction &CGF,
  1379. const CXXDeleteExpr *DE,
  1380. llvm::Value *Ptr,
  1381. QualType ElementType) {
  1382. // Find the destructor for the type, if applicable. If the
  1383. // destructor is virtual, we'll just emit the vcall and return.
  1384. const CXXDestructorDecl *Dtor = nullptr;
  1385. if (const RecordType *RT = ElementType->getAs<RecordType>()) {
  1386. CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  1387. if (RD->hasDefinition() && !RD->hasTrivialDestructor()) {
  1388. Dtor = RD->getDestructor();
  1389. if (Dtor->isVirtual()) {
  1390. CGF.CGM.getCXXABI().emitVirtualObjectDelete(CGF, DE, Ptr, ElementType,
  1391. Dtor);
  1392. return;
  1393. }
  1394. }
  1395. }
  1396. // Make sure that we call delete even if the dtor throws.
  1397. // This doesn't have to a conditional cleanup because we're going
  1398. // to pop it off in a second.
  1399. const FunctionDecl *OperatorDelete = DE->getOperatorDelete();
  1400. CGF.EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup,
  1401. Ptr, OperatorDelete, ElementType);
  1402. if (Dtor)
  1403. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
  1404. /*ForVirtualBase=*/false,
  1405. /*Delegating=*/false,
  1406. Ptr);
  1407. else if (CGF.getLangOpts().ObjCAutoRefCount &&
  1408. ElementType->isObjCLifetimeType()) {
  1409. switch (ElementType.getObjCLifetime()) {
  1410. case Qualifiers::OCL_None:
  1411. case Qualifiers::OCL_ExplicitNone:
  1412. case Qualifiers::OCL_Autoreleasing:
  1413. break;
  1414. case Qualifiers::OCL_Strong: {
  1415. // Load the pointer value.
  1416. llvm::Value *PtrValue = CGF.Builder.CreateLoad(Ptr,
  1417. ElementType.isVolatileQualified());
  1418. CGF.EmitARCRelease(PtrValue, ARCPreciseLifetime);
  1419. break;
  1420. }
  1421. case Qualifiers::OCL_Weak:
  1422. CGF.EmitARCDestroyWeak(Ptr);
  1423. break;
  1424. }
  1425. }
  1426. CGF.PopCleanupBlock();
  1427. }
  1428. namespace {
  1429. /// Calls the given 'operator delete' on an array of objects.
  1430. struct CallArrayDelete : EHScopeStack::Cleanup {
  1431. llvm::Value *Ptr;
  1432. const FunctionDecl *OperatorDelete;
  1433. llvm::Value *NumElements;
  1434. QualType ElementType;
  1435. CharUnits CookieSize;
  1436. CallArrayDelete(llvm::Value *Ptr,
  1437. const FunctionDecl *OperatorDelete,
  1438. llvm::Value *NumElements,
  1439. QualType ElementType,
  1440. CharUnits CookieSize)
  1441. : Ptr(Ptr), OperatorDelete(OperatorDelete), NumElements(NumElements),
  1442. ElementType(ElementType), CookieSize(CookieSize) {}
  1443. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1444. const FunctionProtoType *DeleteFTy =
  1445. OperatorDelete->getType()->getAs<FunctionProtoType>();
  1446. assert(DeleteFTy->getNumParams() == 1 || DeleteFTy->getNumParams() == 2);
  1447. CallArgList Args;
  1448. // Pass the pointer as the first argument.
  1449. QualType VoidPtrTy = DeleteFTy->getParamType(0);
  1450. llvm::Value *DeletePtr
  1451. = CGF.Builder.CreateBitCast(Ptr, CGF.ConvertType(VoidPtrTy));
  1452. Args.add(RValue::get(DeletePtr), VoidPtrTy);
  1453. // Pass the original requested size as the second argument.
  1454. if (DeleteFTy->getNumParams() == 2) {
  1455. QualType size_t = DeleteFTy->getParamType(1);
  1456. llvm::IntegerType *SizeTy
  1457. = cast<llvm::IntegerType>(CGF.ConvertType(size_t));
  1458. CharUnits ElementTypeSize =
  1459. CGF.CGM.getContext().getTypeSizeInChars(ElementType);
  1460. // The size of an element, multiplied by the number of elements.
  1461. llvm::Value *Size
  1462. = llvm::ConstantInt::get(SizeTy, ElementTypeSize.getQuantity());
  1463. if (NumElements)
  1464. Size = CGF.Builder.CreateMul(Size, NumElements);
  1465. // Plus the size of the cookie if applicable.
  1466. if (!CookieSize.isZero()) {
  1467. llvm::Value *CookieSizeV
  1468. = llvm::ConstantInt::get(SizeTy, CookieSize.getQuantity());
  1469. Size = CGF.Builder.CreateAdd(Size, CookieSizeV);
  1470. }
  1471. Args.add(RValue::get(Size), size_t);
  1472. }
  1473. // Emit the call to delete.
  1474. EmitNewDeleteCall(CGF, OperatorDelete, DeleteFTy, Args);
  1475. }
  1476. };
  1477. }
  1478. /// Emit the code for deleting an array of objects.
  1479. static void EmitArrayDelete(CodeGenFunction &CGF,
  1480. const CXXDeleteExpr *E,
  1481. llvm::Value *deletedPtr,
  1482. QualType elementType) {
  1483. llvm::Value *numElements = nullptr;
  1484. llvm::Value *allocatedPtr = nullptr;
  1485. CharUnits cookieSize;
  1486. CGF.CGM.getCXXABI().ReadArrayCookie(CGF, deletedPtr, E, elementType,
  1487. numElements, allocatedPtr, cookieSize);
  1488. assert(allocatedPtr && "ReadArrayCookie didn't set allocated pointer");
  1489. // Make sure that we call delete even if one of the dtors throws.
  1490. const FunctionDecl *operatorDelete = E->getOperatorDelete();
  1491. CGF.EHStack.pushCleanup<CallArrayDelete>(NormalAndEHCleanup,
  1492. allocatedPtr, operatorDelete,
  1493. numElements, elementType,
  1494. cookieSize);
  1495. // Destroy the elements.
  1496. if (QualType::DestructionKind dtorKind = elementType.isDestructedType()) {
  1497. assert(numElements && "no element count for a type with a destructor!");
  1498. llvm::Value *arrayEnd =
  1499. CGF.Builder.CreateInBoundsGEP(deletedPtr, numElements, "delete.end");
  1500. // Note that it is legal to allocate a zero-length array, and we
  1501. // can never fold the check away because the length should always
  1502. // come from a cookie.
  1503. CGF.emitArrayDestroy(deletedPtr, arrayEnd, elementType,
  1504. CGF.getDestroyer(dtorKind),
  1505. /*checkZeroLength*/ true,
  1506. CGF.needsEHCleanup(dtorKind));
  1507. }
  1508. // Pop the cleanup block.
  1509. CGF.PopCleanupBlock();
  1510. }
  1511. void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) {
  1512. const Expr *Arg = E->getArgument();
  1513. llvm::Value *Ptr = EmitScalarExpr(Arg);
  1514. // Null check the pointer.
  1515. llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull");
  1516. llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end");
  1517. llvm::Value *IsNull = Builder.CreateIsNull(Ptr, "isnull");
  1518. Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull);
  1519. EmitBlock(DeleteNotNull);
  1520. // We might be deleting a pointer to array. If so, GEP down to the
  1521. // first non-array element.
  1522. // (this assumes that A(*)[3][7] is converted to [3 x [7 x %A]]*)
  1523. QualType DeleteTy = Arg->getType()->getAs<PointerType>()->getPointeeType();
  1524. if (DeleteTy->isConstantArrayType()) {
  1525. llvm::Value *Zero = Builder.getInt32(0);
  1526. SmallVector<llvm::Value*,8> GEP;
  1527. GEP.push_back(Zero); // point at the outermost array
  1528. // For each layer of array type we're pointing at:
  1529. while (const ConstantArrayType *Arr
  1530. = getContext().getAsConstantArrayType(DeleteTy)) {
  1531. // 1. Unpeel the array type.
  1532. DeleteTy = Arr->getElementType();
  1533. // 2. GEP to the first element of the array.
  1534. GEP.push_back(Zero);
  1535. }
  1536. Ptr = Builder.CreateInBoundsGEP(Ptr, GEP, "del.first");
  1537. }
  1538. assert(ConvertTypeForMem(DeleteTy) ==
  1539. cast<llvm::PointerType>(Ptr->getType())->getElementType());
  1540. if (E->isArrayForm()) {
  1541. EmitArrayDelete(*this, E, Ptr, DeleteTy);
  1542. } else {
  1543. EmitObjectDelete(*this, E, Ptr, DeleteTy);
  1544. }
  1545. EmitBlock(DeleteEnd);
  1546. }
  1547. static bool isGLValueFromPointerDeref(const Expr *E) {
  1548. E = E->IgnoreParens();
  1549. if (const auto *CE = dyn_cast<CastExpr>(E)) {
  1550. if (!CE->getSubExpr()->isGLValue())
  1551. return false;
  1552. return isGLValueFromPointerDeref(CE->getSubExpr());
  1553. }
  1554. if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E))
  1555. return isGLValueFromPointerDeref(OVE->getSourceExpr());
  1556. if (const auto *BO = dyn_cast<BinaryOperator>(E))
  1557. if (BO->getOpcode() == BO_Comma)
  1558. return isGLValueFromPointerDeref(BO->getRHS());
  1559. if (const auto *ACO = dyn_cast<AbstractConditionalOperator>(E))
  1560. return isGLValueFromPointerDeref(ACO->getTrueExpr()) ||
  1561. isGLValueFromPointerDeref(ACO->getFalseExpr());
  1562. // C++11 [expr.sub]p1:
  1563. // The expression E1[E2] is identical (by definition) to *((E1)+(E2))
  1564. if (isa<ArraySubscriptExpr>(E))
  1565. return true;
  1566. if (const auto *UO = dyn_cast<UnaryOperator>(E))
  1567. if (UO->getOpcode() == UO_Deref)
  1568. return true;
  1569. return false;
  1570. }
  1571. static llvm::Value *EmitTypeidFromVTable(CodeGenFunction &CGF, const Expr *E,
  1572. llvm::Type *StdTypeInfoPtrTy) {
  1573. // Get the vtable pointer.
  1574. llvm::Value *ThisPtr = CGF.EmitLValue(E).getAddress();
  1575. // C++ [expr.typeid]p2:
  1576. // If the glvalue expression is obtained by applying the unary * operator to
  1577. // a pointer and the pointer is a null pointer value, the typeid expression
  1578. // throws the std::bad_typeid exception.
  1579. //
  1580. // However, this paragraph's intent is not clear. We choose a very generous
  1581. // interpretation which implores us to consider comma operators, conditional
  1582. // operators, parentheses and other such constructs.
  1583. QualType SrcRecordTy = E->getType();
  1584. if (CGF.CGM.getCXXABI().shouldTypeidBeNullChecked(
  1585. isGLValueFromPointerDeref(E), SrcRecordTy)) {
  1586. llvm::BasicBlock *BadTypeidBlock =
  1587. CGF.createBasicBlock("typeid.bad_typeid");
  1588. llvm::BasicBlock *EndBlock = CGF.createBasicBlock("typeid.end");
  1589. llvm::Value *IsNull = CGF.Builder.CreateIsNull(ThisPtr);
  1590. CGF.Builder.CreateCondBr(IsNull, BadTypeidBlock, EndBlock);
  1591. CGF.EmitBlock(BadTypeidBlock);
  1592. CGF.CGM.getCXXABI().EmitBadTypeidCall(CGF);
  1593. CGF.EmitBlock(EndBlock);
  1594. }
  1595. return CGF.CGM.getCXXABI().EmitTypeid(CGF, SrcRecordTy, ThisPtr,
  1596. StdTypeInfoPtrTy);
  1597. }
  1598. llvm::Value *CodeGenFunction::EmitCXXTypeidExpr(const CXXTypeidExpr *E) {
  1599. llvm::Type *StdTypeInfoPtrTy =
  1600. ConvertType(E->getType())->getPointerTo();
  1601. if (E->isTypeOperand()) {
  1602. llvm::Constant *TypeInfo =
  1603. CGM.GetAddrOfRTTIDescriptor(E->getTypeOperand(getContext()));
  1604. return Builder.CreateBitCast(TypeInfo, StdTypeInfoPtrTy);
  1605. }
  1606. // C++ [expr.typeid]p2:
  1607. // When typeid is applied to a glvalue expression whose type is a
  1608. // polymorphic class type, the result refers to a std::type_info object
  1609. // representing the type of the most derived object (that is, the dynamic
  1610. // type) to which the glvalue refers.
  1611. if (E->isPotentiallyEvaluated())
  1612. return EmitTypeidFromVTable(*this, E->getExprOperand(),
  1613. StdTypeInfoPtrTy);
  1614. QualType OperandTy = E->getExprOperand()->getType();
  1615. return Builder.CreateBitCast(CGM.GetAddrOfRTTIDescriptor(OperandTy),
  1616. StdTypeInfoPtrTy);
  1617. }
  1618. static llvm::Value *EmitDynamicCastToNull(CodeGenFunction &CGF,
  1619. QualType DestTy) {
  1620. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1621. if (DestTy->isPointerType())
  1622. return llvm::Constant::getNullValue(DestLTy);
  1623. /// C++ [expr.dynamic.cast]p9:
  1624. /// A failed cast to reference type throws std::bad_cast
  1625. if (!CGF.CGM.getCXXABI().EmitBadCastCall(CGF))
  1626. return nullptr;
  1627. CGF.EmitBlock(CGF.createBasicBlock("dynamic_cast.end"));
  1628. return llvm::UndefValue::get(DestLTy);
  1629. }
  1630. llvm::Value *CodeGenFunction::EmitDynamicCast(llvm::Value *Value,
  1631. const CXXDynamicCastExpr *DCE) {
  1632. QualType DestTy = DCE->getTypeAsWritten();
  1633. if (DCE->isAlwaysNull())
  1634. if (llvm::Value *T = EmitDynamicCastToNull(*this, DestTy))
  1635. return T;
  1636. QualType SrcTy = DCE->getSubExpr()->getType();
  1637. // C++ [expr.dynamic.cast]p7:
  1638. // If T is "pointer to cv void," then the result is a pointer to the most
  1639. // derived object pointed to by v.
  1640. const PointerType *DestPTy = DestTy->getAs<PointerType>();
  1641. bool isDynamicCastToVoid;
  1642. QualType SrcRecordTy;
  1643. QualType DestRecordTy;
  1644. if (DestPTy) {
  1645. isDynamicCastToVoid = DestPTy->getPointeeType()->isVoidType();
  1646. SrcRecordTy = SrcTy->castAs<PointerType>()->getPointeeType();
  1647. DestRecordTy = DestPTy->getPointeeType();
  1648. } else {
  1649. isDynamicCastToVoid = false;
  1650. SrcRecordTy = SrcTy;
  1651. DestRecordTy = DestTy->castAs<ReferenceType>()->getPointeeType();
  1652. }
  1653. assert(SrcRecordTy->isRecordType() && "source type must be a record type!");
  1654. // C++ [expr.dynamic.cast]p4:
  1655. // If the value of v is a null pointer value in the pointer case, the result
  1656. // is the null pointer value of type T.
  1657. bool ShouldNullCheckSrcValue =
  1658. CGM.getCXXABI().shouldDynamicCastCallBeNullChecked(SrcTy->isPointerType(),
  1659. SrcRecordTy);
  1660. llvm::BasicBlock *CastNull = nullptr;
  1661. llvm::BasicBlock *CastNotNull = nullptr;
  1662. llvm::BasicBlock *CastEnd = createBasicBlock("dynamic_cast.end");
  1663. if (ShouldNullCheckSrcValue) {
  1664. CastNull = createBasicBlock("dynamic_cast.null");
  1665. CastNotNull = createBasicBlock("dynamic_cast.notnull");
  1666. llvm::Value *IsNull = Builder.CreateIsNull(Value);
  1667. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  1668. EmitBlock(CastNotNull);
  1669. }
  1670. if (isDynamicCastToVoid) {
  1671. Value = CGM.getCXXABI().EmitDynamicCastToVoid(*this, Value, SrcRecordTy,
  1672. DestTy);
  1673. } else {
  1674. assert(DestRecordTy->isRecordType() &&
  1675. "destination type must be a record type!");
  1676. Value = CGM.getCXXABI().EmitDynamicCastCall(*this, Value, SrcRecordTy,
  1677. DestTy, DestRecordTy, CastEnd);
  1678. }
  1679. if (ShouldNullCheckSrcValue) {
  1680. EmitBranch(CastEnd);
  1681. EmitBlock(CastNull);
  1682. EmitBranch(CastEnd);
  1683. }
  1684. EmitBlock(CastEnd);
  1685. if (ShouldNullCheckSrcValue) {
  1686. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
  1687. PHI->addIncoming(Value, CastNotNull);
  1688. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
  1689. Value = PHI;
  1690. }
  1691. return Value;
  1692. }
  1693. void CodeGenFunction::EmitLambdaExpr(const LambdaExpr *E, AggValueSlot Slot) {
  1694. RunCleanupsScope Scope(*this);
  1695. LValue SlotLV =
  1696. MakeAddrLValue(Slot.getAddr(), E->getType(), Slot.getAlignment());
  1697. CXXRecordDecl::field_iterator CurField = E->getLambdaClass()->field_begin();
  1698. for (LambdaExpr::capture_init_iterator i = E->capture_init_begin(),
  1699. e = E->capture_init_end();
  1700. i != e; ++i, ++CurField) {
  1701. // Emit initialization
  1702. LValue LV = EmitLValueForFieldInitialization(SlotLV, *CurField);
  1703. if (CurField->hasCapturedVLAType()) {
  1704. auto VAT = CurField->getCapturedVLAType();
  1705. EmitStoreThroughLValue(RValue::get(VLASizeMap[VAT->getSizeExpr()]), LV);
  1706. } else {
  1707. ArrayRef<VarDecl *> ArrayIndexes;
  1708. if (CurField->getType()->isArrayType())
  1709. ArrayIndexes = E->getCaptureInitIndexVars(i);
  1710. EmitInitializerForField(*CurField, LV, *i, ArrayIndexes);
  1711. }
  1712. }
  1713. }