CGExprCXX.cpp 78 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024
  1. //===--- CGExprCXX.cpp - Emit LLVM Code for C++ expressions ---------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code dealing with code generation of C++ expressions
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CodeGenFunction.h"
  14. #include "CGCUDARuntime.h"
  15. #include "CGHLSLRuntime.h" // HLSL Change
  16. #include "CGCXXABI.h"
  17. #include "CGDebugInfo.h"
  18. #include "CGObjCRuntime.h"
  19. #include "clang/CodeGen/CGFunctionInfo.h"
  20. #include "clang/Frontend/CodeGenOptions.h"
  21. #include "llvm/IR/CallSite.h"
  22. #include "llvm/IR/Intrinsics.h"
  23. using namespace clang;
  24. using namespace CodeGen;
  25. static RequiredArgs commonEmitCXXMemberOrOperatorCall(
  26. CodeGenFunction &CGF, const CXXMethodDecl *MD, llvm::Value *Callee,
  27. ReturnValueSlot ReturnValue, llvm::Value *This, llvm::Value *ImplicitParam,
  28. QualType ImplicitParamTy, const CallExpr *CE, CallArgList &Args,
  29. ArrayRef<const Stmt *> argList// HLSL Change - use updated argList for out parameter.
  30. ) {
  31. assert(CE == nullptr || isa<CXXMemberCallExpr>(CE) ||
  32. isa<CXXOperatorCallExpr>(CE));
  33. assert(MD->isInstance() &&
  34. "Trying to emit a member or operator call expr on a static method!");
  35. // C++11 [class.mfct.non-static]p2:
  36. // If a non-static member function of a class X is called for an object that
  37. // is not of type X, or of a type derived from X, the behavior is undefined.
  38. SourceLocation CallLoc;
  39. if (CE)
  40. CallLoc = CE->getExprLoc();
  41. CGF.EmitTypeCheck(
  42. isa<CXXConstructorDecl>(MD) ? CodeGenFunction::TCK_ConstructorCall
  43. : CodeGenFunction::TCK_MemberCall,
  44. CallLoc, This, CGF.getContext().getRecordType(MD->getParent()));
  45. // Push the this ptr.
  46. Args.add(RValue::get(This), MD->getThisType(CGF.getContext()));
  47. // If there is an implicit parameter (e.g. VTT), emit it.
  48. if (ImplicitParam) {
  49. Args.add(RValue::get(ImplicitParam), ImplicitParamTy);
  50. }
  51. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  52. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, Args.size());
  53. // And the rest of the call args.
  54. if (CE) {
  55. // Special case: skip first argument of CXXOperatorCall (it is "this").
  56. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(CE) ? 1 : 0;
  57. CGF.EmitCallArgs(Args, FPT,
  58. argList.begin() + ArgsToSkip, // HLSL Change - use updated argList for out parameter.
  59. argList.end(), // HLSL Change - use updated argList for out parameter.
  60. CE->getDirectCallee());
  61. } else {
  62. assert(
  63. FPT->getNumParams() == 0 &&
  64. "No CallExpr specified for function with non-zero number of arguments");
  65. }
  66. return required;
  67. }
  68. RValue CodeGenFunction::EmitCXXMemberOrOperatorCall(
  69. const CXXMethodDecl *MD, llvm::Value *Callee, ReturnValueSlot ReturnValue,
  70. llvm::Value *This, llvm::Value *ImplicitParam, QualType ImplicitParamTy,
  71. const CallExpr *CE) {
  72. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  73. CallArgList Args;
  74. // HLSL Change Begins
  75. llvm::SmallVector<LValue, 8> castArgList;
  76. // The argList of the CallExpr, may be update for out parameter
  77. llvm::SmallVector<const Stmt *, 8> argList(CE->arg_begin(), CE->arg_end());
  78. // out param conversion
  79. CodeGenFunction::HLSLOutParamScope OutParamScope(*this);
  80. auto MapTemp = [&](const VarDecl *LocalVD, llvm::Value *TmpArg) {
  81. OutParamScope.addTemp(LocalVD, TmpArg);
  82. };
  83. if (getLangOpts().HLSL) {
  84. if (const FunctionDecl *FD = CE->getDirectCallee())
  85. CGM.getHLSLRuntime().EmitHLSLOutParamConversionInit(*this, FD, CE,
  86. castArgList, argList, MapTemp);
  87. }
  88. // HLSL Change Ends
  89. RequiredArgs required = commonEmitCXXMemberOrOperatorCall(
  90. *this, MD, Callee, ReturnValue, This, ImplicitParam, ImplicitParamTy, CE,
  91. Args, argList); // HLSL Change - use updated argList.
  92. RValue CallVal = EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, required),
  93. Callee, ReturnValue, Args, MD);
  94. // HLSL Change Begins
  95. // out param conversion
  96. // conversion and copy back after the call
  97. if (getLangOpts().HLSL)
  98. CGM.getHLSLRuntime().EmitHLSLOutParamConversionCopyBack(*this, castArgList);
  99. // HLSL Change Ends
  100. return CallVal;
  101. }
  102. RValue CodeGenFunction::EmitCXXStructorCall(
  103. const CXXMethodDecl *MD, llvm::Value *Callee, ReturnValueSlot ReturnValue,
  104. llvm::Value *This, llvm::Value *ImplicitParam, QualType ImplicitParamTy,
  105. const CallExpr *CE, StructorType Type) {
  106. CallArgList Args;
  107. // HLSL Change Begins
  108. llvm::SmallVector<LValue, 8> castArgList;
  109. // The argList of the CallExpr, may be update for out parameter
  110. llvm::SmallVector<const Stmt *, 8> argList(CE->arg_begin(), CE->arg_end());
  111. // out param conversion
  112. CodeGenFunction::HLSLOutParamScope OutParamScope(*this);
  113. auto MapTemp = [&](const VarDecl *LocalVD, llvm::Value *TmpArg) {
  114. OutParamScope.addTemp(LocalVD, TmpArg);
  115. };
  116. if (getLangOpts().HLSL) {
  117. if (const FunctionDecl *FD = CE->getDirectCallee())
  118. CGM.getHLSLRuntime().EmitHLSLOutParamConversionInit(*this, FD, CE,
  119. castArgList, argList, MapTemp);
  120. }
  121. // HLSL Change Ends
  122. commonEmitCXXMemberOrOperatorCall(*this, MD, Callee, ReturnValue, This,
  123. ImplicitParam, ImplicitParamTy, CE, Args,
  124. argList); // HLSL Change - use updated argList.
  125. RValue CallVal = EmitCall(CGM.getTypes().arrangeCXXStructorDeclaration(MD, Type),
  126. Callee, ReturnValue, Args, MD);
  127. // HLSL Change Begins
  128. // out param conversion
  129. // conversion and copy back after the call
  130. if (getLangOpts().HLSL)
  131. CGM.getHLSLRuntime().EmitHLSLOutParamConversionCopyBack(*this, castArgList);
  132. // HLSL Change Ends
  133. return CallVal;
  134. }
  135. static CXXRecordDecl *getCXXRecord(const Expr *E) {
  136. QualType T = E->getType();
  137. if (const PointerType *PTy = T->getAs<PointerType>())
  138. T = PTy->getPointeeType();
  139. const RecordType *Ty = T->castAs<RecordType>();
  140. return cast<CXXRecordDecl>(Ty->getDecl());
  141. }
  142. // Note: This function also emit constructor calls to support a MSVC
  143. // extensions allowing explicit constructor function call.
  144. RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE,
  145. ReturnValueSlot ReturnValue) {
  146. const Expr *callee = CE->getCallee()->IgnoreParens();
  147. if (isa<BinaryOperator>(callee))
  148. return EmitCXXMemberPointerCallExpr(CE, ReturnValue);
  149. const MemberExpr *ME = cast<MemberExpr>(callee);
  150. const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl());
  151. if (MD->isStatic()) {
  152. // The method is static, emit it as we would a regular call.
  153. llvm::Value *Callee = CGM.GetAddrOfFunction(MD);
  154. return EmitCall(getContext().getPointerType(MD->getType()), Callee, CE,
  155. ReturnValue);
  156. }
  157. bool HasQualifier = ME->hasQualifier();
  158. NestedNameSpecifier *Qualifier = HasQualifier ? ME->getQualifier() : nullptr;
  159. bool IsArrow = ME->isArrow();
  160. const Expr *Base = ME->getBase();
  161. return EmitCXXMemberOrOperatorMemberCallExpr(
  162. CE, MD, ReturnValue, HasQualifier, Qualifier, IsArrow, Base);
  163. }
  164. RValue CodeGenFunction::EmitCXXMemberOrOperatorMemberCallExpr(
  165. const CallExpr *CE, const CXXMethodDecl *MD, ReturnValueSlot ReturnValue,
  166. bool HasQualifier, NestedNameSpecifier *Qualifier, bool IsArrow,
  167. const Expr *Base) {
  168. assert(isa<CXXMemberCallExpr>(CE) || isa<CXXOperatorCallExpr>(CE));
  169. // HLSL Change Begins
  170. if (hlsl::IsHLSLMatType(Base->getType())) {
  171. if (const CXXOperatorCallExpr *opCall = dyn_cast<CXXOperatorCallExpr>(CE)) {
  172. assert(opCall->getOperator() == OverloadedOperatorKind::OO_Subscript &&
  173. "must be subscript");
  174. llvm::Value *This = nullptr;
  175. if (Base->getValueKind() != ExprValueKind::VK_RValue) {
  176. This = EmitLValue(Base).getAddress();
  177. } else {
  178. llvm::Value *Val = EmitScalarExpr(Base);
  179. This = CreateTempAlloca(Val->getType());
  180. CGM.getHLSLRuntime().EmitHLSLMatrixStore(*this, Val, This, Base->getType());
  181. }
  182. llvm::Value *Idx = EmitScalarExpr(CE->getArg(1));
  183. llvm::Type *RetTy =
  184. ConvertType(getContext().getLValueReferenceType(CE->getType()));
  185. llvm::Value *matSub = CGM.getHLSLRuntime().EmitHLSLMatrixSubscript(
  186. *this, RetTy, This, Idx, Base->getType());
  187. return RValue::get(matSub);
  188. }
  189. }
  190. if (hlsl::IsHLSLVecType(Base->getType())) {
  191. if (const CXXOperatorCallExpr *opCall = dyn_cast<CXXOperatorCallExpr>(CE)) {
  192. assert(opCall->getOperator() == OverloadedOperatorKind::OO_Subscript &&
  193. "must be subscript");
  194. llvm::Value *This = nullptr;
  195. if (Base->getValueKind() != ExprValueKind::VK_RValue) {
  196. LValue LV = EmitLValue(Base);
  197. if (LV.isSimple()) {
  198. This = LV.getAddress();
  199. if (isa<ExtMatrixElementExpr>(Base)) {
  200. llvm::Value *Val = Builder.CreateLoad(This);
  201. This = CreateTempAlloca(Val->getType());
  202. Builder.CreateStore(Val, This);
  203. }
  204. } else {
  205. assert(LV.isExtVectorElt() && "must be ext vector here");
  206. This = LV.getExtVectorAddr();
  207. llvm::Constant *Elts = LV.getExtVectorElts();
  208. llvm::Type *Ty = ConvertType(LV.getType());
  209. llvm::Constant *zero = Builder.getInt32(0);
  210. llvm::Value *TmpThis = CreateTempAlloca(Ty);
  211. for (unsigned i = 0; i < Ty->getVectorNumElements(); i++) {
  212. llvm::Value *EltIdx = Elts->getAggregateElement(i);
  213. llvm::Value *EltGEP = Builder.CreateGEP(This, {zero, EltIdx});
  214. llvm::Value *TmpEltGEP =
  215. Builder.CreateGEP(TmpThis, {zero, Builder.getInt32(i)});
  216. llvm::Value *Elt = Builder.CreateLoad(EltGEP);
  217. Builder.CreateStore(Elt, TmpEltGEP);
  218. }
  219. This = TmpThis;
  220. }
  221. } else {
  222. llvm::Value *Val = EmitScalarExpr(Base);
  223. This = CreateTempAlloca(Val->getType());
  224. Builder.CreateStore(Val, This);
  225. }
  226. bool isBool = false;
  227. if (llvm::IntegerType *IT =
  228. dyn_cast<llvm::IntegerType>(This->getType()
  229. ->getPointerElementType()
  230. ->getVectorElementType())) {
  231. if (IT->getBitWidth() == 1) {
  232. isBool = true;
  233. }
  234. }
  235. llvm::Value *Idx = EmitScalarExpr(CE->getArg(1));
  236. llvm::Constant *zero = llvm::ConstantInt::get(Idx->getType(), 0);
  237. llvm::Value *Elt = Builder.CreateGEP(This, {zero, Idx});
  238. if (isBool) {
  239. // bool pointer is not i1 *.
  240. llvm::Type *BoolTy = llvm::IntegerType::get(
  241. getLLVMContext(), getContext().getTypeSize(CE->getType()));
  242. Elt = Builder.CreateBitCast(
  243. Elt, llvm::PointerType::get(
  244. BoolTy, Elt->getType()->getPointerAddressSpace()));
  245. }
  246. return RValue::get(Elt);
  247. }
  248. }
  249. if (hlsl::IsHLSLOutputPatchType(Base->getType()) ||
  250. hlsl::IsHLSLInputPatchType(Base->getType())) {
  251. if (const CXXOperatorCallExpr *opCall = dyn_cast<CXXOperatorCallExpr>(CE)) {
  252. assert(opCall->getOperator() == OverloadedOperatorKind::OO_Subscript &&
  253. "must be subscript");
  254. llvm::Value *This = EmitLValue(Base).getAddress();
  255. llvm::Value *Idx = EmitScalarExpr(CE->getArg(1));
  256. llvm::Constant *zero = llvm::ConstantInt::get(Idx->getType(), 0);
  257. llvm::Value *Elt = Builder.CreateGEP(This, { zero, Idx });
  258. return RValue::get(Elt);
  259. }
  260. }
  261. // HLSL Change Ends
  262. // Compute the object pointer.
  263. bool CanUseVirtualCall = MD->isVirtual() && !HasQualifier;
  264. const CXXMethodDecl *DevirtualizedMethod = nullptr;
  265. if (CanUseVirtualCall && CanDevirtualizeMemberFunctionCall(Base, MD)) {
  266. const CXXRecordDecl *BestDynamicDecl = Base->getBestDynamicClassType();
  267. DevirtualizedMethod = MD->getCorrespondingMethodInClass(BestDynamicDecl);
  268. assert(DevirtualizedMethod);
  269. const CXXRecordDecl *DevirtualizedClass = DevirtualizedMethod->getParent();
  270. const Expr *Inner = Base->ignoreParenBaseCasts();
  271. if (DevirtualizedMethod->getReturnType().getCanonicalType() !=
  272. MD->getReturnType().getCanonicalType())
  273. // If the return types are not the same, this might be a case where more
  274. // code needs to run to compensate for it. For example, the derived
  275. // method might return a type that inherits form from the return
  276. // type of MD and has a prefix.
  277. // For now we just avoid devirtualizing these covariant cases.
  278. DevirtualizedMethod = nullptr;
  279. else if (getCXXRecord(Inner) == DevirtualizedClass)
  280. // If the class of the Inner expression is where the dynamic method
  281. // is defined, build the this pointer from it.
  282. Base = Inner;
  283. else if (getCXXRecord(Base) != DevirtualizedClass) {
  284. // If the method is defined in a class that is not the best dynamic
  285. // one or the one of the full expression, we would have to build
  286. // a derived-to-base cast to compute the correct this pointer, but
  287. // we don't have support for that yet, so do a virtual call.
  288. DevirtualizedMethod = nullptr;
  289. }
  290. }
  291. llvm::Value *This;
  292. if (IsArrow)
  293. This = EmitScalarExpr(Base);
  294. else
  295. This = EmitLValue(Base).getAddress();
  296. if (MD->isTrivial() || (MD->isDefaulted() && MD->getParent()->isUnion())) {
  297. if (isa<CXXDestructorDecl>(MD)) return RValue::get(nullptr);
  298. if (isa<CXXConstructorDecl>(MD) &&
  299. cast<CXXConstructorDecl>(MD)->isDefaultConstructor())
  300. return RValue::get(nullptr);
  301. if (!MD->getParent()->mayInsertExtraPadding()) {
  302. if (MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()) {
  303. // We don't like to generate the trivial copy/move assignment operator
  304. // when it isn't necessary; just produce the proper effect here.
  305. // Special case: skip first argument of CXXOperatorCall (it is "this").
  306. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(CE) ? 1 : 0;
  307. llvm::Value *RHS =
  308. EmitLValue(*(CE->arg_begin() + ArgsToSkip)).getAddress();
  309. EmitAggregateAssign(This, RHS, CE->getType());
  310. return RValue::get(This);
  311. }
  312. if (isa<CXXConstructorDecl>(MD) &&
  313. cast<CXXConstructorDecl>(MD)->isCopyOrMoveConstructor()) {
  314. // Trivial move and copy ctor are the same.
  315. assert(CE->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
  316. llvm::Value *RHS = EmitLValue(*CE->arg_begin()).getAddress();
  317. EmitAggregateCopy(This, RHS, CE->arg_begin()->getType());
  318. return RValue::get(This);
  319. }
  320. llvm_unreachable("unknown trivial member function");
  321. }
  322. }
  323. // Compute the function type we're calling.
  324. const CXXMethodDecl *CalleeDecl =
  325. DevirtualizedMethod ? DevirtualizedMethod : MD;
  326. const CGFunctionInfo *FInfo = nullptr;
  327. if (const auto *Dtor = dyn_cast<CXXDestructorDecl>(CalleeDecl))
  328. FInfo = &CGM.getTypes().arrangeCXXStructorDeclaration(
  329. Dtor, StructorType::Complete);
  330. else if (const auto *Ctor = dyn_cast<CXXConstructorDecl>(CalleeDecl))
  331. FInfo = &CGM.getTypes().arrangeCXXStructorDeclaration(
  332. Ctor, StructorType::Complete);
  333. else
  334. FInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(CalleeDecl);
  335. llvm::FunctionType *Ty = CGM.getTypes().GetFunctionType(*FInfo);
  336. // C++ [class.virtual]p12:
  337. // Explicit qualification with the scope operator (5.1) suppresses the
  338. // virtual call mechanism.
  339. //
  340. // We also don't emit a virtual call if the base expression has a record type
  341. // because then we know what the type is.
  342. bool UseVirtualCall = CanUseVirtualCall && !DevirtualizedMethod;
  343. llvm::Value *Callee;
  344. if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD)) {
  345. assert(CE->arg_begin() == CE->arg_end() &&
  346. "Destructor shouldn't have explicit parameters");
  347. assert(ReturnValue.isNull() && "Destructor shouldn't have return value");
  348. if (UseVirtualCall) {
  349. CGM.getCXXABI().EmitVirtualDestructorCall(
  350. *this, Dtor, Dtor_Complete, This, cast<CXXMemberCallExpr>(CE));
  351. } else {
  352. if (getLangOpts().AppleKext && MD->isVirtual() && HasQualifier)
  353. Callee = BuildAppleKextVirtualCall(MD, Qualifier, Ty);
  354. else if (!DevirtualizedMethod)
  355. Callee =
  356. CGM.getAddrOfCXXStructor(Dtor, StructorType::Complete, FInfo, Ty);
  357. else {
  358. const CXXDestructorDecl *DDtor =
  359. cast<CXXDestructorDecl>(DevirtualizedMethod);
  360. Callee = CGM.GetAddrOfFunction(GlobalDecl(DDtor, Dtor_Complete), Ty);
  361. }
  362. EmitCXXMemberOrOperatorCall(MD, Callee, ReturnValue, This,
  363. /*ImplicitParam=*/nullptr, QualType(), CE);
  364. }
  365. return RValue::get(nullptr);
  366. }
  367. if (const CXXConstructorDecl *Ctor = dyn_cast<CXXConstructorDecl>(MD)) {
  368. Callee = CGM.GetAddrOfFunction(GlobalDecl(Ctor, Ctor_Complete), Ty);
  369. } else if (UseVirtualCall) {
  370. Callee = CGM.getCXXABI().getVirtualFunctionPointer(*this, MD, This, Ty,
  371. CE->getLocStart());
  372. } else {
  373. if (SanOpts.has(SanitizerKind::CFINVCall) &&
  374. MD->getParent()->isDynamicClass()) {
  375. llvm::Value *VTable = GetVTablePtr(This, Int8PtrTy);
  376. EmitVTablePtrCheckForCall(MD, VTable, CFITCK_NVCall, CE->getLocStart());
  377. }
  378. if (getLangOpts().AppleKext && MD->isVirtual() && HasQualifier)
  379. Callee = BuildAppleKextVirtualCall(MD, Qualifier, Ty);
  380. else if (!DevirtualizedMethod)
  381. Callee = CGM.GetAddrOfFunction(MD, Ty);
  382. else {
  383. Callee = CGM.GetAddrOfFunction(DevirtualizedMethod, Ty);
  384. }
  385. }
  386. if (MD->isVirtual()) {
  387. This = CGM.getCXXABI().adjustThisArgumentForVirtualFunctionCall(
  388. *this, MD, This, UseVirtualCall);
  389. }
  390. return EmitCXXMemberOrOperatorCall(MD, Callee, ReturnValue, This,
  391. /*ImplicitParam=*/nullptr, QualType(), CE);
  392. }
  393. RValue
  394. CodeGenFunction::EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
  395. ReturnValueSlot ReturnValue) {
  396. const BinaryOperator *BO =
  397. cast<BinaryOperator>(E->getCallee()->IgnoreParens());
  398. const Expr *BaseExpr = BO->getLHS();
  399. const Expr *MemFnExpr = BO->getRHS();
  400. const MemberPointerType *MPT =
  401. MemFnExpr->getType()->castAs<MemberPointerType>();
  402. const FunctionProtoType *FPT =
  403. MPT->getPointeeType()->castAs<FunctionProtoType>();
  404. const CXXRecordDecl *RD =
  405. cast<CXXRecordDecl>(MPT->getClass()->getAs<RecordType>()->getDecl());
  406. // Get the member function pointer.
  407. llvm::Value *MemFnPtr = EmitScalarExpr(MemFnExpr);
  408. // Emit the 'this' pointer.
  409. llvm::Value *This;
  410. if (BO->getOpcode() == BO_PtrMemI)
  411. This = EmitScalarExpr(BaseExpr);
  412. else
  413. This = EmitLValue(BaseExpr).getAddress();
  414. EmitTypeCheck(TCK_MemberCall, E->getExprLoc(), This,
  415. QualType(MPT->getClass(), 0));
  416. // Ask the ABI to load the callee. Note that This is modified.
  417. llvm::Value *Callee =
  418. CGM.getCXXABI().EmitLoadOfMemberFunctionPointer(*this, BO, This, MemFnPtr, MPT);
  419. CallArgList Args;
  420. QualType ThisType =
  421. getContext().getPointerType(getContext().getTagDeclType(RD));
  422. // Push the this ptr.
  423. Args.add(RValue::get(This), ThisType);
  424. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, 1);
  425. // And the rest of the call args
  426. EmitCallArgs(Args, FPT, E->arg_begin(), E->arg_end(), E->getDirectCallee());
  427. return EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, required),
  428. Callee, ReturnValue, Args);
  429. }
  430. RValue
  431. CodeGenFunction::EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
  432. const CXXMethodDecl *MD,
  433. ReturnValueSlot ReturnValue) {
  434. assert(MD->isInstance() &&
  435. "Trying to emit a member call expr on a static method!");
  436. return EmitCXXMemberOrOperatorMemberCallExpr(
  437. E, MD, ReturnValue, /*HasQualifier=*/false, /*Qualifier=*/nullptr,
  438. /*IsArrow=*/false, E->getArg(0));
  439. }
  440. RValue CodeGenFunction::EmitCUDAKernelCallExpr(const CUDAKernelCallExpr *E,
  441. ReturnValueSlot ReturnValue) {
  442. return CGM.getCUDARuntime().EmitCUDAKernelCallExpr(*this, E, ReturnValue);
  443. }
  444. // HLSL Change Begins
  445. RValue CodeGenFunction::EmitHLSLBuiltinCallExpr(const FunctionDecl *FD,
  446. const CallExpr *E,
  447. ReturnValueSlot ReturnValue) {
  448. return CGM.getHLSLRuntime().EmitHLSLBuiltinCallExpr(*this, FD, E,
  449. ReturnValue);
  450. }
  451. // HLSL Change Ends
  452. static void EmitNullBaseClassInitialization(CodeGenFunction &CGF,
  453. llvm::Value *DestPtr,
  454. const CXXRecordDecl *Base) {
  455. if (Base->isEmpty())
  456. return;
  457. DestPtr = CGF.EmitCastToVoidPtr(DestPtr);
  458. const ASTRecordLayout &Layout = CGF.getContext().getASTRecordLayout(Base);
  459. CharUnits Size = Layout.getNonVirtualSize();
  460. CharUnits Align = Layout.getNonVirtualAlignment();
  461. llvm::Value *SizeVal = CGF.CGM.getSize(Size);
  462. // If the type contains a pointer to data member we can't memset it to zero.
  463. // Instead, create a null constant and copy it to the destination.
  464. // TODO: there are other patterns besides zero that we can usefully memset,
  465. // like -1, which happens to be the pattern used by member-pointers.
  466. // TODO: isZeroInitializable can be over-conservative in the case where a
  467. // virtual base contains a member pointer.
  468. if (!CGF.CGM.getTypes().isZeroInitializable(Base)) {
  469. llvm::Constant *NullConstant = CGF.CGM.EmitNullConstantForBase(Base);
  470. llvm::GlobalVariable *NullVariable =
  471. new llvm::GlobalVariable(CGF.CGM.getModule(), NullConstant->getType(),
  472. /*isConstant=*/true,
  473. llvm::GlobalVariable::PrivateLinkage,
  474. NullConstant, Twine());
  475. NullVariable->setAlignment(Align.getQuantity());
  476. llvm::Value *SrcPtr = CGF.EmitCastToVoidPtr(NullVariable);
  477. // Get and call the appropriate llvm.memcpy overload.
  478. CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, SizeVal, Align.getQuantity());
  479. return;
  480. }
  481. // Otherwise, just memset the whole thing to zero. This is legal
  482. // because in LLVM, all default initializers (other than the ones we just
  483. // handled above) are guaranteed to have a bit pattern of all zeros.
  484. CGF.Builder.CreateMemSet(DestPtr, CGF.Builder.getInt8(0), SizeVal,
  485. Align.getQuantity());
  486. }
  487. void
  488. CodeGenFunction::EmitCXXConstructExpr(const CXXConstructExpr *E,
  489. AggValueSlot Dest) {
  490. assert(!Dest.isIgnored() && "Must have a destination!");
  491. const CXXConstructorDecl *CD = E->getConstructor();
  492. // If we require zero initialization before (or instead of) calling the
  493. // constructor, as can be the case with a non-user-provided default
  494. // constructor, emit the zero initialization now, unless destination is
  495. // already zeroed.
  496. if (E->requiresZeroInitialization() && !Dest.isZeroed()) {
  497. switch (E->getConstructionKind()) {
  498. case CXXConstructExpr::CK_Delegating:
  499. case CXXConstructExpr::CK_Complete:
  500. EmitNullInitialization(Dest.getAddr(), E->getType());
  501. break;
  502. case CXXConstructExpr::CK_VirtualBase:
  503. case CXXConstructExpr::CK_NonVirtualBase:
  504. EmitNullBaseClassInitialization(*this, Dest.getAddr(), CD->getParent());
  505. break;
  506. }
  507. }
  508. // If this is a call to a trivial default constructor, do nothing.
  509. if (CD->isTrivial() && CD->isDefaultConstructor())
  510. return;
  511. // Elide the constructor if we're constructing from a temporary.
  512. // The temporary check is required because Sema sets this on NRVO
  513. // returns.
  514. if (getLangOpts().ElideConstructors && E->isElidable()) {
  515. assert(getContext().hasSameUnqualifiedType(E->getType(),
  516. E->getArg(0)->getType()));
  517. if (E->getArg(0)->isTemporaryObject(getContext(), CD->getParent())) {
  518. EmitAggExpr(E->getArg(0), Dest);
  519. return;
  520. }
  521. }
  522. if (const ConstantArrayType *arrayType
  523. = getContext().getAsConstantArrayType(E->getType())) {
  524. EmitCXXAggrConstructorCall(CD, arrayType, Dest.getAddr(), E);
  525. } else {
  526. CXXCtorType Type = Ctor_Complete;
  527. bool ForVirtualBase = false;
  528. bool Delegating = false;
  529. switch (E->getConstructionKind()) {
  530. case CXXConstructExpr::CK_Delegating:
  531. // We should be emitting a constructor; GlobalDecl will assert this
  532. Type = CurGD.getCtorType();
  533. Delegating = true;
  534. break;
  535. case CXXConstructExpr::CK_Complete:
  536. Type = Ctor_Complete;
  537. break;
  538. case CXXConstructExpr::CK_VirtualBase:
  539. ForVirtualBase = true;
  540. // fall-through
  541. case CXXConstructExpr::CK_NonVirtualBase:
  542. Type = Ctor_Base;
  543. }
  544. // Call the constructor.
  545. EmitCXXConstructorCall(CD, Type, ForVirtualBase, Delegating, Dest.getAddr(),
  546. E);
  547. }
  548. }
  549. void
  550. CodeGenFunction::EmitSynthesizedCXXCopyCtor(llvm::Value *Dest,
  551. llvm::Value *Src,
  552. const Expr *Exp) {
  553. if (const ExprWithCleanups *E = dyn_cast<ExprWithCleanups>(Exp))
  554. Exp = E->getSubExpr();
  555. assert(isa<CXXConstructExpr>(Exp) &&
  556. "EmitSynthesizedCXXCopyCtor - unknown copy ctor expr");
  557. const CXXConstructExpr* E = cast<CXXConstructExpr>(Exp);
  558. const CXXConstructorDecl *CD = E->getConstructor();
  559. RunCleanupsScope Scope(*this);
  560. // If we require zero initialization before (or instead of) calling the
  561. // constructor, as can be the case with a non-user-provided default
  562. // constructor, emit the zero initialization now.
  563. // FIXME. Do I still need this for a copy ctor synthesis?
  564. if (E->requiresZeroInitialization())
  565. EmitNullInitialization(Dest, E->getType());
  566. assert(!getContext().getAsConstantArrayType(E->getType())
  567. && "EmitSynthesizedCXXCopyCtor - Copied-in Array");
  568. EmitSynthesizedCXXCopyCtorCall(CD, Dest, Src, E);
  569. }
  570. static CharUnits CalculateCookiePadding(CodeGenFunction &CGF,
  571. const CXXNewExpr *E) {
  572. if (!E->isArray())
  573. return CharUnits::Zero();
  574. // No cookie is required if the operator new[] being used is the
  575. // reserved placement operator new[].
  576. if (E->getOperatorNew()->isReservedGlobalPlacementOperator())
  577. return CharUnits::Zero();
  578. return CGF.CGM.getCXXABI().GetArrayCookieSize(E);
  579. }
  580. static llvm::Value *EmitCXXNewAllocSize(CodeGenFunction &CGF,
  581. const CXXNewExpr *e,
  582. unsigned minElements,
  583. llvm::Value *&numElements,
  584. llvm::Value *&sizeWithoutCookie) {
  585. QualType type = e->getAllocatedType();
  586. if (!e->isArray()) {
  587. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  588. sizeWithoutCookie
  589. = llvm::ConstantInt::get(CGF.SizeTy, typeSize.getQuantity());
  590. return sizeWithoutCookie;
  591. }
  592. // The width of size_t.
  593. unsigned sizeWidth = CGF.SizeTy->getBitWidth();
  594. // Figure out the cookie size.
  595. llvm::APInt cookieSize(sizeWidth,
  596. CalculateCookiePadding(CGF, e).getQuantity());
  597. // Emit the array size expression.
  598. // We multiply the size of all dimensions for NumElements.
  599. // e.g for 'int[2][3]', ElemType is 'int' and NumElements is 6.
  600. numElements = CGF.EmitScalarExpr(e->getArraySize());
  601. assert(isa<llvm::IntegerType>(numElements->getType()));
  602. // The number of elements can be have an arbitrary integer type;
  603. // essentially, we need to multiply it by a constant factor, add a
  604. // cookie size, and verify that the result is representable as a
  605. // size_t. That's just a gloss, though, and it's wrong in one
  606. // important way: if the count is negative, it's an error even if
  607. // the cookie size would bring the total size >= 0.
  608. bool isSigned
  609. = e->getArraySize()->getType()->isSignedIntegerOrEnumerationType();
  610. llvm::IntegerType *numElementsType
  611. = cast<llvm::IntegerType>(numElements->getType());
  612. unsigned numElementsWidth = numElementsType->getBitWidth();
  613. // Compute the constant factor.
  614. llvm::APInt arraySizeMultiplier(sizeWidth, 1);
  615. while (const ConstantArrayType *CAT
  616. = CGF.getContext().getAsConstantArrayType(type)) {
  617. type = CAT->getElementType();
  618. arraySizeMultiplier *= CAT->getSize();
  619. }
  620. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  621. llvm::APInt typeSizeMultiplier(sizeWidth, typeSize.getQuantity());
  622. typeSizeMultiplier *= arraySizeMultiplier;
  623. // This will be a size_t.
  624. llvm::Value *size;
  625. // If someone is doing 'new int[42]' there is no need to do a dynamic check.
  626. // Don't bloat the -O0 code.
  627. if (llvm::ConstantInt *numElementsC =
  628. dyn_cast<llvm::ConstantInt>(numElements)) {
  629. const llvm::APInt &count = numElementsC->getValue();
  630. bool hasAnyOverflow = false;
  631. // If 'count' was a negative number, it's an overflow.
  632. if (isSigned && count.isNegative())
  633. hasAnyOverflow = true;
  634. // We want to do all this arithmetic in size_t. If numElements is
  635. // wider than that, check whether it's already too big, and if so,
  636. // overflow.
  637. else if (numElementsWidth > sizeWidth &&
  638. numElementsWidth - sizeWidth > count.countLeadingZeros())
  639. hasAnyOverflow = true;
  640. // Okay, compute a count at the right width.
  641. llvm::APInt adjustedCount = count.zextOrTrunc(sizeWidth);
  642. // If there is a brace-initializer, we cannot allocate fewer elements than
  643. // there are initializers. If we do, that's treated like an overflow.
  644. if (adjustedCount.ult(minElements))
  645. hasAnyOverflow = true;
  646. // Scale numElements by that. This might overflow, but we don't
  647. // care because it only overflows if allocationSize does, too, and
  648. // if that overflows then we shouldn't use this.
  649. numElements = llvm::ConstantInt::get(CGF.SizeTy,
  650. adjustedCount * arraySizeMultiplier);
  651. // Compute the size before cookie, and track whether it overflowed.
  652. bool overflow;
  653. llvm::APInt allocationSize
  654. = adjustedCount.umul_ov(typeSizeMultiplier, overflow);
  655. hasAnyOverflow |= overflow;
  656. // Add in the cookie, and check whether it's overflowed.
  657. if (cookieSize != 0) {
  658. // Save the current size without a cookie. This shouldn't be
  659. // used if there was overflow.
  660. sizeWithoutCookie = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  661. allocationSize = allocationSize.uadd_ov(cookieSize, overflow);
  662. hasAnyOverflow |= overflow;
  663. }
  664. // On overflow, produce a -1 so operator new will fail.
  665. if (hasAnyOverflow) {
  666. size = llvm::Constant::getAllOnesValue(CGF.SizeTy);
  667. } else {
  668. size = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  669. }
  670. // Otherwise, we might need to use the overflow intrinsics.
  671. } else {
  672. // There are up to five conditions we need to test for:
  673. // 1) if isSigned, we need to check whether numElements is negative;
  674. // 2) if numElementsWidth > sizeWidth, we need to check whether
  675. // numElements is larger than something representable in size_t;
  676. // 3) if minElements > 0, we need to check whether numElements is smaller
  677. // than that.
  678. // 4) we need to compute
  679. // sizeWithoutCookie := numElements * typeSizeMultiplier
  680. // and check whether it overflows; and
  681. // 5) if we need a cookie, we need to compute
  682. // size := sizeWithoutCookie + cookieSize
  683. // and check whether it overflows.
  684. llvm::Value *hasOverflow = nullptr;
  685. // If numElementsWidth > sizeWidth, then one way or another, we're
  686. // going to have to do a comparison for (2), and this happens to
  687. // take care of (1), too.
  688. if (numElementsWidth > sizeWidth) {
  689. llvm::APInt threshold(numElementsWidth, 1);
  690. threshold <<= sizeWidth;
  691. llvm::Value *thresholdV
  692. = llvm::ConstantInt::get(numElementsType, threshold);
  693. hasOverflow = CGF.Builder.CreateICmpUGE(numElements, thresholdV);
  694. numElements = CGF.Builder.CreateTrunc(numElements, CGF.SizeTy);
  695. // Otherwise, if we're signed, we want to sext up to size_t.
  696. } else if (isSigned) {
  697. if (numElementsWidth < sizeWidth)
  698. numElements = CGF.Builder.CreateSExt(numElements, CGF.SizeTy);
  699. // If there's a non-1 type size multiplier, then we can do the
  700. // signedness check at the same time as we do the multiply
  701. // because a negative number times anything will cause an
  702. // unsigned overflow. Otherwise, we have to do it here. But at least
  703. // in this case, we can subsume the >= minElements check.
  704. if (typeSizeMultiplier == 1)
  705. hasOverflow = CGF.Builder.CreateICmpSLT(numElements,
  706. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  707. // Otherwise, zext up to size_t if necessary.
  708. } else if (numElementsWidth < sizeWidth) {
  709. numElements = CGF.Builder.CreateZExt(numElements, CGF.SizeTy);
  710. }
  711. assert(numElements->getType() == CGF.SizeTy);
  712. if (minElements) {
  713. // Don't allow allocation of fewer elements than we have initializers.
  714. if (!hasOverflow) {
  715. hasOverflow = CGF.Builder.CreateICmpULT(numElements,
  716. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  717. } else if (numElementsWidth > sizeWidth) {
  718. // The other existing overflow subsumes this check.
  719. // We do an unsigned comparison, since any signed value < -1 is
  720. // taken care of either above or below.
  721. hasOverflow = CGF.Builder.CreateOr(hasOverflow,
  722. CGF.Builder.CreateICmpULT(numElements,
  723. llvm::ConstantInt::get(CGF.SizeTy, minElements)));
  724. }
  725. }
  726. size = numElements;
  727. // Multiply by the type size if necessary. This multiplier
  728. // includes all the factors for nested arrays.
  729. //
  730. // This step also causes numElements to be scaled up by the
  731. // nested-array factor if necessary. Overflow on this computation
  732. // can be ignored because the result shouldn't be used if
  733. // allocation fails.
  734. if (typeSizeMultiplier != 1) {
  735. llvm::Value *umul_with_overflow
  736. = CGF.CGM.getIntrinsic(llvm::Intrinsic::umul_with_overflow, CGF.SizeTy);
  737. llvm::Value *tsmV =
  738. llvm::ConstantInt::get(CGF.SizeTy, typeSizeMultiplier);
  739. llvm::Value *result =
  740. CGF.Builder.CreateCall(umul_with_overflow, {size, tsmV});
  741. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  742. if (hasOverflow)
  743. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  744. else
  745. hasOverflow = overflowed;
  746. size = CGF.Builder.CreateExtractValue(result, 0);
  747. // Also scale up numElements by the array size multiplier.
  748. if (arraySizeMultiplier != 1) {
  749. // If the base element type size is 1, then we can re-use the
  750. // multiply we just did.
  751. if (typeSize.isOne()) {
  752. assert(arraySizeMultiplier == typeSizeMultiplier);
  753. numElements = size;
  754. // Otherwise we need a separate multiply.
  755. } else {
  756. llvm::Value *asmV =
  757. llvm::ConstantInt::get(CGF.SizeTy, arraySizeMultiplier);
  758. numElements = CGF.Builder.CreateMul(numElements, asmV);
  759. }
  760. }
  761. } else {
  762. // numElements doesn't need to be scaled.
  763. assert(arraySizeMultiplier == 1);
  764. }
  765. // Add in the cookie size if necessary.
  766. if (cookieSize != 0) {
  767. sizeWithoutCookie = size;
  768. llvm::Value *uadd_with_overflow
  769. = CGF.CGM.getIntrinsic(llvm::Intrinsic::uadd_with_overflow, CGF.SizeTy);
  770. llvm::Value *cookieSizeV = llvm::ConstantInt::get(CGF.SizeTy, cookieSize);
  771. llvm::Value *result =
  772. CGF.Builder.CreateCall(uadd_with_overflow, {size, cookieSizeV});
  773. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  774. if (hasOverflow)
  775. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  776. else
  777. hasOverflow = overflowed;
  778. size = CGF.Builder.CreateExtractValue(result, 0);
  779. }
  780. // If we had any possibility of dynamic overflow, make a select to
  781. // overwrite 'size' with an all-ones value, which should cause
  782. // operator new to throw.
  783. if (hasOverflow)
  784. size = CGF.Builder.CreateSelect(hasOverflow,
  785. llvm::Constant::getAllOnesValue(CGF.SizeTy),
  786. size);
  787. }
  788. if (cookieSize == 0)
  789. sizeWithoutCookie = size;
  790. else
  791. assert(sizeWithoutCookie && "didn't set sizeWithoutCookie?");
  792. return size;
  793. }
  794. static void StoreAnyExprIntoOneUnit(CodeGenFunction &CGF, const Expr *Init,
  795. QualType AllocType, llvm::Value *NewPtr) {
  796. // FIXME: Refactor with EmitExprAsInit.
  797. CharUnits Alignment = CGF.getContext().getTypeAlignInChars(AllocType);
  798. switch (CGF.getEvaluationKind(AllocType)) {
  799. case TEK_Scalar:
  800. CGF.EmitScalarInit(Init, nullptr,
  801. CGF.MakeAddrLValue(NewPtr, AllocType, Alignment), false);
  802. return;
  803. case TEK_Complex:
  804. CGF.EmitComplexExprIntoLValue(Init, CGF.MakeAddrLValue(NewPtr, AllocType,
  805. Alignment),
  806. /*isInit*/ true);
  807. return;
  808. case TEK_Aggregate: {
  809. AggValueSlot Slot
  810. = AggValueSlot::forAddr(NewPtr, Alignment, AllocType.getQualifiers(),
  811. AggValueSlot::IsDestructed,
  812. AggValueSlot::DoesNotNeedGCBarriers,
  813. AggValueSlot::IsNotAliased);
  814. CGF.EmitAggExpr(Init, Slot);
  815. return;
  816. }
  817. }
  818. llvm_unreachable("bad evaluation kind");
  819. }
  820. void CodeGenFunction::EmitNewArrayInitializer(
  821. const CXXNewExpr *E, QualType ElementType, llvm::Type *ElementTy,
  822. llvm::Value *BeginPtr, llvm::Value *NumElements,
  823. llvm::Value *AllocSizeWithoutCookie) {
  824. // If we have a type with trivial initialization and no initializer,
  825. // there's nothing to do.
  826. if (!E->hasInitializer())
  827. return;
  828. llvm::Value *CurPtr = BeginPtr;
  829. unsigned InitListElements = 0;
  830. const Expr *Init = E->getInitializer();
  831. llvm::AllocaInst *EndOfInit = nullptr;
  832. QualType::DestructionKind DtorKind = ElementType.isDestructedType();
  833. EHScopeStack::stable_iterator Cleanup;
  834. llvm::Instruction *CleanupDominator = nullptr;
  835. // If the initializer is an initializer list, first do the explicit elements.
  836. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(Init)) {
  837. InitListElements = ILE->getNumInits();
  838. // If this is a multi-dimensional array new, we will initialize multiple
  839. // elements with each init list element.
  840. QualType AllocType = E->getAllocatedType();
  841. if (const ConstantArrayType *CAT = dyn_cast_or_null<ConstantArrayType>(
  842. AllocType->getAsArrayTypeUnsafe())) {
  843. unsigned AS = CurPtr->getType()->getPointerAddressSpace();
  844. ElementTy = ConvertTypeForMem(AllocType);
  845. llvm::Type *AllocPtrTy = ElementTy->getPointerTo(AS);
  846. CurPtr = Builder.CreateBitCast(CurPtr, AllocPtrTy);
  847. InitListElements *= getContext().getConstantArrayElementCount(CAT);
  848. }
  849. // Enter a partial-destruction Cleanup if necessary.
  850. if (needsEHCleanup(DtorKind)) {
  851. // In principle we could tell the Cleanup where we are more
  852. // directly, but the control flow can get so varied here that it
  853. // would actually be quite complex. Therefore we go through an
  854. // alloca.
  855. EndOfInit = CreateTempAlloca(BeginPtr->getType(), "array.init.end");
  856. CleanupDominator = Builder.CreateStore(BeginPtr, EndOfInit);
  857. pushIrregularPartialArrayCleanup(BeginPtr, EndOfInit, ElementType,
  858. getDestroyer(DtorKind));
  859. Cleanup = EHStack.stable_begin();
  860. }
  861. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i) {
  862. // Tell the cleanup that it needs to destroy up to this
  863. // element. TODO: some of these stores can be trivially
  864. // observed to be unnecessary.
  865. if (EndOfInit)
  866. Builder.CreateStore(Builder.CreateBitCast(CurPtr, BeginPtr->getType()),
  867. EndOfInit);
  868. // FIXME: If the last initializer is an incomplete initializer list for
  869. // an array, and we have an array filler, we can fold together the two
  870. // initialization loops.
  871. StoreAnyExprIntoOneUnit(*this, ILE->getInit(i),
  872. ILE->getInit(i)->getType(), CurPtr);
  873. CurPtr = Builder.CreateConstInBoundsGEP1_32(ElementTy, CurPtr, 1,
  874. "array.exp.next");
  875. }
  876. // The remaining elements are filled with the array filler expression.
  877. Init = ILE->getArrayFiller();
  878. // Extract the initializer for the individual array elements by pulling
  879. // out the array filler from all the nested initializer lists. This avoids
  880. // generating a nested loop for the initialization.
  881. while (Init && Init->getType()->isConstantArrayType()) {
  882. auto *SubILE = dyn_cast<InitListExpr>(Init);
  883. if (!SubILE)
  884. break;
  885. assert(SubILE->getNumInits() == 0 && "explicit inits in array filler?");
  886. Init = SubILE->getArrayFiller();
  887. }
  888. // Switch back to initializing one base element at a time.
  889. CurPtr = Builder.CreateBitCast(CurPtr, BeginPtr->getType());
  890. }
  891. // Attempt to perform zero-initialization using memset.
  892. auto TryMemsetInitialization = [&]() -> bool {
  893. // FIXME: If the type is a pointer-to-data-member under the Itanium ABI,
  894. // we can initialize with a memset to -1.
  895. if (!CGM.getTypes().isZeroInitializable(ElementType))
  896. return false;
  897. // Optimization: since zero initialization will just set the memory
  898. // to all zeroes, generate a single memset to do it in one shot.
  899. // Subtract out the size of any elements we've already initialized.
  900. auto *RemainingSize = AllocSizeWithoutCookie;
  901. if (InitListElements) {
  902. // We know this can't overflow; we check this when doing the allocation.
  903. auto *InitializedSize = llvm::ConstantInt::get(
  904. RemainingSize->getType(),
  905. getContext().getTypeSizeInChars(ElementType).getQuantity() *
  906. InitListElements);
  907. RemainingSize = Builder.CreateSub(RemainingSize, InitializedSize);
  908. }
  909. // Create the memset.
  910. CharUnits Alignment = getContext().getTypeAlignInChars(ElementType);
  911. Builder.CreateMemSet(CurPtr, Builder.getInt8(0), RemainingSize,
  912. Alignment.getQuantity(), false);
  913. return true;
  914. };
  915. // If all elements have already been initialized, skip any further
  916. // initialization.
  917. llvm::ConstantInt *ConstNum = dyn_cast<llvm::ConstantInt>(NumElements);
  918. if (ConstNum && ConstNum->getZExtValue() <= InitListElements) {
  919. // If there was a Cleanup, deactivate it.
  920. if (CleanupDominator)
  921. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  922. return;
  923. }
  924. assert(Init && "have trailing elements to initialize but no initializer");
  925. // If this is a constructor call, try to optimize it out, and failing that
  926. // emit a single loop to initialize all remaining elements.
  927. if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
  928. CXXConstructorDecl *Ctor = CCE->getConstructor();
  929. if (Ctor->isTrivial()) {
  930. // If new expression did not specify value-initialization, then there
  931. // is no initialization.
  932. if (!CCE->requiresZeroInitialization() || Ctor->getParent()->isEmpty())
  933. return;
  934. if (TryMemsetInitialization())
  935. return;
  936. }
  937. // Store the new Cleanup position for irregular Cleanups.
  938. //
  939. // FIXME: Share this cleanup with the constructor call emission rather than
  940. // having it create a cleanup of its own.
  941. if (EndOfInit) Builder.CreateStore(CurPtr, EndOfInit);
  942. // Emit a constructor call loop to initialize the remaining elements.
  943. if (InitListElements)
  944. NumElements = Builder.CreateSub(
  945. NumElements,
  946. llvm::ConstantInt::get(NumElements->getType(), InitListElements));
  947. EmitCXXAggrConstructorCall(Ctor, NumElements, CurPtr, CCE,
  948. CCE->requiresZeroInitialization());
  949. return;
  950. }
  951. // If this is value-initialization, we can usually use memset.
  952. ImplicitValueInitExpr IVIE(ElementType);
  953. if (isa<ImplicitValueInitExpr>(Init)) {
  954. if (TryMemsetInitialization())
  955. return;
  956. // Switch to an ImplicitValueInitExpr for the element type. This handles
  957. // only one case: multidimensional array new of pointers to members. In
  958. // all other cases, we already have an initializer for the array element.
  959. Init = &IVIE;
  960. }
  961. // At this point we should have found an initializer for the individual
  962. // elements of the array.
  963. assert(getContext().hasSameUnqualifiedType(ElementType, Init->getType()) &&
  964. "got wrong type of element to initialize");
  965. // If we have an empty initializer list, we can usually use memset.
  966. if (auto *ILE = dyn_cast<InitListExpr>(Init))
  967. if (ILE->getNumInits() == 0 && TryMemsetInitialization())
  968. return;
  969. // If we have a struct whose every field is value-initialized, we can
  970. // usually use memset.
  971. if (auto *ILE = dyn_cast<InitListExpr>(Init)) {
  972. if (const RecordType *RType = ILE->getType()->getAs<RecordType>()) {
  973. if (RType->getDecl()->isStruct()) {
  974. unsigned NumFields = 0;
  975. for (auto *Field : RType->getDecl()->fields())
  976. if (!Field->isUnnamedBitfield())
  977. ++NumFields;
  978. if (ILE->getNumInits() == NumFields)
  979. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i)
  980. if (!isa<ImplicitValueInitExpr>(ILE->getInit(i)))
  981. --NumFields;
  982. if (ILE->getNumInits() == NumFields && TryMemsetInitialization())
  983. return;
  984. }
  985. }
  986. }
  987. // Create the loop blocks.
  988. llvm::BasicBlock *EntryBB = Builder.GetInsertBlock();
  989. llvm::BasicBlock *LoopBB = createBasicBlock("new.loop");
  990. llvm::BasicBlock *ContBB = createBasicBlock("new.loop.end");
  991. // Find the end of the array, hoisted out of the loop.
  992. llvm::Value *EndPtr =
  993. Builder.CreateInBoundsGEP(BeginPtr, NumElements, "array.end");
  994. // If the number of elements isn't constant, we have to now check if there is
  995. // anything left to initialize.
  996. if (!ConstNum) {
  997. llvm::Value *IsEmpty = Builder.CreateICmpEQ(CurPtr, EndPtr,
  998. "array.isempty");
  999. Builder.CreateCondBr(IsEmpty, ContBB, LoopBB);
  1000. }
  1001. // Enter the loop.
  1002. EmitBlock(LoopBB);
  1003. // Set up the current-element phi.
  1004. llvm::PHINode *CurPtrPhi =
  1005. Builder.CreatePHI(CurPtr->getType(), 2, "array.cur");
  1006. CurPtrPhi->addIncoming(CurPtr, EntryBB);
  1007. CurPtr = CurPtrPhi;
  1008. // Store the new Cleanup position for irregular Cleanups.
  1009. if (EndOfInit) Builder.CreateStore(CurPtr, EndOfInit);
  1010. // Enter a partial-destruction Cleanup if necessary.
  1011. if (!CleanupDominator && needsEHCleanup(DtorKind)) {
  1012. pushRegularPartialArrayCleanup(BeginPtr, CurPtr, ElementType,
  1013. getDestroyer(DtorKind));
  1014. Cleanup = EHStack.stable_begin();
  1015. CleanupDominator = Builder.CreateUnreachable();
  1016. }
  1017. // Emit the initializer into this element.
  1018. StoreAnyExprIntoOneUnit(*this, Init, Init->getType(), CurPtr);
  1019. // Leave the Cleanup if we entered one.
  1020. if (CleanupDominator) {
  1021. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  1022. CleanupDominator->eraseFromParent();
  1023. }
  1024. // Advance to the next element by adjusting the pointer type as necessary.
  1025. llvm::Value *NextPtr =
  1026. Builder.CreateConstInBoundsGEP1_32(ElementTy, CurPtr, 1, "array.next");
  1027. // Check whether we've gotten to the end of the array and, if so,
  1028. // exit the loop.
  1029. llvm::Value *IsEnd = Builder.CreateICmpEQ(NextPtr, EndPtr, "array.atend");
  1030. Builder.CreateCondBr(IsEnd, ContBB, LoopBB);
  1031. CurPtrPhi->addIncoming(NextPtr, Builder.GetInsertBlock());
  1032. EmitBlock(ContBB);
  1033. }
  1034. static void EmitNewInitializer(CodeGenFunction &CGF, const CXXNewExpr *E,
  1035. QualType ElementType, llvm::Type *ElementTy,
  1036. llvm::Value *NewPtr, llvm::Value *NumElements,
  1037. llvm::Value *AllocSizeWithoutCookie) {
  1038. ApplyDebugLocation DL(CGF, E);
  1039. if (E->isArray())
  1040. CGF.EmitNewArrayInitializer(E, ElementType, ElementTy, NewPtr, NumElements,
  1041. AllocSizeWithoutCookie);
  1042. else if (const Expr *Init = E->getInitializer())
  1043. StoreAnyExprIntoOneUnit(CGF, Init, E->getAllocatedType(), NewPtr);
  1044. }
  1045. /// Emit a call to an operator new or operator delete function, as implicitly
  1046. /// created by new-expressions and delete-expressions.
  1047. static RValue EmitNewDeleteCall(CodeGenFunction &CGF,
  1048. const FunctionDecl *Callee,
  1049. const FunctionProtoType *CalleeType,
  1050. const CallArgList &Args) {
  1051. llvm::Instruction *CallOrInvoke;
  1052. llvm::Value *CalleeAddr = CGF.CGM.GetAddrOfFunction(Callee);
  1053. RValue RV =
  1054. CGF.EmitCall(CGF.CGM.getTypes().arrangeFreeFunctionCall(
  1055. Args, CalleeType, /*chainCall=*/false),
  1056. CalleeAddr, ReturnValueSlot(), Args, Callee, &CallOrInvoke);
  1057. /// C++1y [expr.new]p10:
  1058. /// [In a new-expression,] an implementation is allowed to omit a call
  1059. /// to a replaceable global allocation function.
  1060. ///
  1061. /// We model such elidable calls with the 'builtin' attribute.
  1062. llvm::Function *Fn = dyn_cast<llvm::Function>(CalleeAddr);
  1063. if (Callee->isReplaceableGlobalAllocationFunction() &&
  1064. Fn && Fn->hasFnAttribute(llvm::Attribute::NoBuiltin)) {
  1065. // FIXME: Add addAttribute to CallSite.
  1066. if (llvm::CallInst *CI = dyn_cast<llvm::CallInst>(CallOrInvoke))
  1067. CI->addAttribute(llvm::AttributeSet::FunctionIndex,
  1068. llvm::Attribute::Builtin);
  1069. else if (llvm::InvokeInst *II = dyn_cast<llvm::InvokeInst>(CallOrInvoke))
  1070. II->addAttribute(llvm::AttributeSet::FunctionIndex,
  1071. llvm::Attribute::Builtin);
  1072. else
  1073. llvm_unreachable("unexpected kind of call instruction");
  1074. }
  1075. return RV;
  1076. }
  1077. RValue CodeGenFunction::EmitBuiltinNewDeleteCall(const FunctionProtoType *Type,
  1078. const Expr *Arg,
  1079. bool IsDelete) {
  1080. CallArgList Args;
  1081. const Stmt *ArgS = Arg;
  1082. EmitCallArgs(Args, *Type->param_type_begin(),
  1083. ConstExprIterator(&ArgS), ConstExprIterator(&ArgS + 1));
  1084. // Find the allocation or deallocation function that we're calling.
  1085. ASTContext &Ctx = getContext();
  1086. DeclarationName Name = Ctx.DeclarationNames
  1087. .getCXXOperatorName(IsDelete ? OO_Delete : OO_New);
  1088. for (auto *Decl : Ctx.getTranslationUnitDecl()->lookup(Name))
  1089. if (auto *FD = dyn_cast<FunctionDecl>(Decl))
  1090. if (Ctx.hasSameType(FD->getType(), QualType(Type, 0)))
  1091. return EmitNewDeleteCall(*this, cast<FunctionDecl>(Decl), Type, Args);
  1092. llvm_unreachable("predeclared global operator new/delete is missing");
  1093. }
  1094. namespace {
  1095. /// A cleanup to call the given 'operator delete' function upon
  1096. /// abnormal exit from a new expression.
  1097. class CallDeleteDuringNew : public EHScopeStack::Cleanup {
  1098. size_t NumPlacementArgs;
  1099. const FunctionDecl *OperatorDelete;
  1100. llvm::Value *Ptr;
  1101. llvm::Value *AllocSize;
  1102. RValue *getPlacementArgs() { return reinterpret_cast<RValue*>(this+1); }
  1103. public:
  1104. static size_t getExtraSize(size_t NumPlacementArgs) {
  1105. return NumPlacementArgs * sizeof(RValue);
  1106. }
  1107. CallDeleteDuringNew(size_t NumPlacementArgs,
  1108. const FunctionDecl *OperatorDelete,
  1109. llvm::Value *Ptr,
  1110. llvm::Value *AllocSize)
  1111. : NumPlacementArgs(NumPlacementArgs), OperatorDelete(OperatorDelete),
  1112. Ptr(Ptr), AllocSize(AllocSize) {}
  1113. void setPlacementArg(unsigned I, RValue Arg) {
  1114. assert(I < NumPlacementArgs && "index out of range");
  1115. getPlacementArgs()[I] = Arg;
  1116. }
  1117. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1118. const FunctionProtoType *FPT
  1119. = OperatorDelete->getType()->getAs<FunctionProtoType>();
  1120. assert(FPT->getNumParams() == NumPlacementArgs + 1 ||
  1121. (FPT->getNumParams() == 2 && NumPlacementArgs == 0));
  1122. CallArgList DeleteArgs;
  1123. // The first argument is always a void*.
  1124. FunctionProtoType::param_type_iterator AI = FPT->param_type_begin();
  1125. DeleteArgs.add(RValue::get(Ptr), *AI++);
  1126. // A member 'operator delete' can take an extra 'size_t' argument.
  1127. if (FPT->getNumParams() == NumPlacementArgs + 2)
  1128. DeleteArgs.add(RValue::get(AllocSize), *AI++);
  1129. // Pass the rest of the arguments, which must match exactly.
  1130. for (unsigned I = 0; I != NumPlacementArgs; ++I)
  1131. DeleteArgs.add(getPlacementArgs()[I], *AI++);
  1132. // Call 'operator delete'.
  1133. EmitNewDeleteCall(CGF, OperatorDelete, FPT, DeleteArgs);
  1134. }
  1135. };
  1136. /// A cleanup to call the given 'operator delete' function upon
  1137. /// abnormal exit from a new expression when the new expression is
  1138. /// conditional.
  1139. class CallDeleteDuringConditionalNew : public EHScopeStack::Cleanup {
  1140. size_t NumPlacementArgs;
  1141. const FunctionDecl *OperatorDelete;
  1142. DominatingValue<RValue>::saved_type Ptr;
  1143. DominatingValue<RValue>::saved_type AllocSize;
  1144. DominatingValue<RValue>::saved_type *getPlacementArgs() {
  1145. return reinterpret_cast<DominatingValue<RValue>::saved_type*>(this+1);
  1146. }
  1147. public:
  1148. static size_t getExtraSize(size_t NumPlacementArgs) {
  1149. return NumPlacementArgs * sizeof(DominatingValue<RValue>::saved_type);
  1150. }
  1151. CallDeleteDuringConditionalNew(size_t NumPlacementArgs,
  1152. const FunctionDecl *OperatorDelete,
  1153. DominatingValue<RValue>::saved_type Ptr,
  1154. DominatingValue<RValue>::saved_type AllocSize)
  1155. : NumPlacementArgs(NumPlacementArgs), OperatorDelete(OperatorDelete),
  1156. Ptr(Ptr), AllocSize(AllocSize) {}
  1157. void setPlacementArg(unsigned I, DominatingValue<RValue>::saved_type Arg) {
  1158. assert(I < NumPlacementArgs && "index out of range");
  1159. getPlacementArgs()[I] = Arg;
  1160. }
  1161. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1162. const FunctionProtoType *FPT
  1163. = OperatorDelete->getType()->getAs<FunctionProtoType>();
  1164. assert(FPT->getNumParams() == NumPlacementArgs + 1 ||
  1165. (FPT->getNumParams() == 2 && NumPlacementArgs == 0));
  1166. CallArgList DeleteArgs;
  1167. // The first argument is always a void*.
  1168. FunctionProtoType::param_type_iterator AI = FPT->param_type_begin();
  1169. DeleteArgs.add(Ptr.restore(CGF), *AI++);
  1170. // A member 'operator delete' can take an extra 'size_t' argument.
  1171. if (FPT->getNumParams() == NumPlacementArgs + 2) {
  1172. RValue RV = AllocSize.restore(CGF);
  1173. DeleteArgs.add(RV, *AI++);
  1174. }
  1175. // Pass the rest of the arguments, which must match exactly.
  1176. for (unsigned I = 0; I != NumPlacementArgs; ++I) {
  1177. RValue RV = getPlacementArgs()[I].restore(CGF);
  1178. DeleteArgs.add(RV, *AI++);
  1179. }
  1180. // Call 'operator delete'.
  1181. EmitNewDeleteCall(CGF, OperatorDelete, FPT, DeleteArgs);
  1182. }
  1183. };
  1184. }
  1185. /// Enter a cleanup to call 'operator delete' if the initializer in a
  1186. /// new-expression throws.
  1187. static void EnterNewDeleteCleanup(CodeGenFunction &CGF,
  1188. const CXXNewExpr *E,
  1189. llvm::Value *NewPtr,
  1190. llvm::Value *AllocSize,
  1191. const CallArgList &NewArgs) {
  1192. // If we're not inside a conditional branch, then the cleanup will
  1193. // dominate and we can do the easier (and more efficient) thing.
  1194. if (!CGF.isInConditionalBranch()) {
  1195. CallDeleteDuringNew *Cleanup = CGF.EHStack
  1196. .pushCleanupWithExtra<CallDeleteDuringNew>(EHCleanup,
  1197. E->getNumPlacementArgs(),
  1198. E->getOperatorDelete(),
  1199. NewPtr, AllocSize);
  1200. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I)
  1201. Cleanup->setPlacementArg(I, NewArgs[I+1].RV);
  1202. return;
  1203. }
  1204. // Otherwise, we need to save all this stuff.
  1205. DominatingValue<RValue>::saved_type SavedNewPtr =
  1206. DominatingValue<RValue>::save(CGF, RValue::get(NewPtr));
  1207. DominatingValue<RValue>::saved_type SavedAllocSize =
  1208. DominatingValue<RValue>::save(CGF, RValue::get(AllocSize));
  1209. CallDeleteDuringConditionalNew *Cleanup = CGF.EHStack
  1210. .pushCleanupWithExtra<CallDeleteDuringConditionalNew>(EHCleanup,
  1211. E->getNumPlacementArgs(),
  1212. E->getOperatorDelete(),
  1213. SavedNewPtr,
  1214. SavedAllocSize);
  1215. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I)
  1216. Cleanup->setPlacementArg(I,
  1217. DominatingValue<RValue>::save(CGF, NewArgs[I+1].RV));
  1218. CGF.initFullExprCleanup();
  1219. }
  1220. llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) {
  1221. // The element type being allocated.
  1222. QualType allocType = getContext().getBaseElementType(E->getAllocatedType());
  1223. // 1. Build a call to the allocation function.
  1224. FunctionDecl *allocator = E->getOperatorNew();
  1225. const FunctionProtoType *allocatorType =
  1226. allocator->getType()->castAs<FunctionProtoType>();
  1227. CallArgList allocatorArgs;
  1228. // The allocation size is the first argument.
  1229. QualType sizeType = getContext().getSizeType();
  1230. // If there is a brace-initializer, cannot allocate fewer elements than inits.
  1231. unsigned minElements = 0;
  1232. if (E->isArray() && E->hasInitializer()) {
  1233. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(E->getInitializer()))
  1234. minElements = ILE->getNumInits();
  1235. }
  1236. llvm::Value *numElements = nullptr;
  1237. llvm::Value *allocSizeWithoutCookie = nullptr;
  1238. llvm::Value *allocSize =
  1239. EmitCXXNewAllocSize(*this, E, minElements, numElements,
  1240. allocSizeWithoutCookie);
  1241. allocatorArgs.add(RValue::get(allocSize), sizeType);
  1242. // We start at 1 here because the first argument (the allocation size)
  1243. // has already been emitted.
  1244. EmitCallArgs(allocatorArgs, allocatorType, E->placement_arg_begin(),
  1245. E->placement_arg_end(), /* CalleeDecl */ nullptr,
  1246. /*ParamsToSkip*/ 1);
  1247. // Emit the allocation call. If the allocator is a global placement
  1248. // operator, just "inline" it directly.
  1249. RValue RV;
  1250. if (allocator->isReservedGlobalPlacementOperator()) {
  1251. assert(allocatorArgs.size() == 2);
  1252. RV = allocatorArgs[1].RV;
  1253. // TODO: kill any unnecessary computations done for the size
  1254. // argument.
  1255. } else {
  1256. RV = EmitNewDeleteCall(*this, allocator, allocatorType, allocatorArgs);
  1257. }
  1258. // Emit a null check on the allocation result if the allocation
  1259. // function is allowed to return null (because it has a non-throwing
  1260. // exception spec or is the reserved placement new) and we have an
  1261. // interesting initializer.
  1262. bool nullCheck = E->shouldNullCheckAllocation(getContext()) &&
  1263. (!allocType.isPODType(getContext()) || E->hasInitializer());
  1264. llvm::BasicBlock *nullCheckBB = nullptr;
  1265. llvm::BasicBlock *contBB = nullptr;
  1266. llvm::Value *allocation = RV.getScalarVal();
  1267. unsigned AS = allocation->getType()->getPointerAddressSpace();
  1268. // The null-check means that the initializer is conditionally
  1269. // evaluated.
  1270. ConditionalEvaluation conditional(*this);
  1271. if (nullCheck) {
  1272. conditional.begin(*this);
  1273. nullCheckBB = Builder.GetInsertBlock();
  1274. llvm::BasicBlock *notNullBB = createBasicBlock("new.notnull");
  1275. contBB = createBasicBlock("new.cont");
  1276. llvm::Value *isNull = Builder.CreateIsNull(allocation, "new.isnull");
  1277. Builder.CreateCondBr(isNull, contBB, notNullBB);
  1278. EmitBlock(notNullBB);
  1279. }
  1280. // If there's an operator delete, enter a cleanup to call it if an
  1281. // exception is thrown.
  1282. EHScopeStack::stable_iterator operatorDeleteCleanup;
  1283. llvm::Instruction *cleanupDominator = nullptr;
  1284. if (E->getOperatorDelete() &&
  1285. !E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
  1286. EnterNewDeleteCleanup(*this, E, allocation, allocSize, allocatorArgs);
  1287. operatorDeleteCleanup = EHStack.stable_begin();
  1288. cleanupDominator = Builder.CreateUnreachable();
  1289. }
  1290. assert((allocSize == allocSizeWithoutCookie) ==
  1291. CalculateCookiePadding(*this, E).isZero());
  1292. if (allocSize != allocSizeWithoutCookie) {
  1293. assert(E->isArray());
  1294. allocation = CGM.getCXXABI().InitializeArrayCookie(*this, allocation,
  1295. numElements,
  1296. E, allocType);
  1297. }
  1298. llvm::Type *elementTy = ConvertTypeForMem(allocType);
  1299. llvm::Type *elementPtrTy = elementTy->getPointerTo(AS);
  1300. llvm::Value *result = Builder.CreateBitCast(allocation, elementPtrTy);
  1301. EmitNewInitializer(*this, E, allocType, elementTy, result, numElements,
  1302. allocSizeWithoutCookie);
  1303. if (E->isArray()) {
  1304. // NewPtr is a pointer to the base element type. If we're
  1305. // allocating an array of arrays, we'll need to cast back to the
  1306. // array pointer type.
  1307. llvm::Type *resultType = ConvertTypeForMem(E->getType());
  1308. if (result->getType() != resultType)
  1309. result = Builder.CreateBitCast(result, resultType);
  1310. }
  1311. // Deactivate the 'operator delete' cleanup if we finished
  1312. // initialization.
  1313. if (operatorDeleteCleanup.isValid()) {
  1314. DeactivateCleanupBlock(operatorDeleteCleanup, cleanupDominator);
  1315. cleanupDominator->eraseFromParent();
  1316. }
  1317. if (nullCheck) {
  1318. conditional.end(*this);
  1319. llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
  1320. EmitBlock(contBB);
  1321. llvm::PHINode *PHI = Builder.CreatePHI(result->getType(), 2);
  1322. PHI->addIncoming(result, notNullBB);
  1323. PHI->addIncoming(llvm::Constant::getNullValue(result->getType()),
  1324. nullCheckBB);
  1325. result = PHI;
  1326. }
  1327. return result;
  1328. }
  1329. void CodeGenFunction::EmitDeleteCall(const FunctionDecl *DeleteFD,
  1330. llvm::Value *Ptr,
  1331. QualType DeleteTy) {
  1332. assert(DeleteFD->getOverloadedOperator() == OO_Delete);
  1333. const FunctionProtoType *DeleteFTy =
  1334. DeleteFD->getType()->getAs<FunctionProtoType>();
  1335. CallArgList DeleteArgs;
  1336. // Check if we need to pass the size to the delete operator.
  1337. llvm::Value *Size = nullptr;
  1338. QualType SizeTy;
  1339. if (DeleteFTy->getNumParams() == 2) {
  1340. SizeTy = DeleteFTy->getParamType(1);
  1341. CharUnits DeleteTypeSize = getContext().getTypeSizeInChars(DeleteTy);
  1342. Size = llvm::ConstantInt::get(ConvertType(SizeTy),
  1343. DeleteTypeSize.getQuantity());
  1344. }
  1345. QualType ArgTy = DeleteFTy->getParamType(0);
  1346. llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy));
  1347. DeleteArgs.add(RValue::get(DeletePtr), ArgTy);
  1348. if (Size)
  1349. DeleteArgs.add(RValue::get(Size), SizeTy);
  1350. // Emit the call to delete.
  1351. EmitNewDeleteCall(*this, DeleteFD, DeleteFTy, DeleteArgs);
  1352. }
  1353. namespace {
  1354. /// Calls the given 'operator delete' on a single object.
  1355. struct CallObjectDelete : EHScopeStack::Cleanup {
  1356. llvm::Value *Ptr;
  1357. const FunctionDecl *OperatorDelete;
  1358. QualType ElementType;
  1359. CallObjectDelete(llvm::Value *Ptr,
  1360. const FunctionDecl *OperatorDelete,
  1361. QualType ElementType)
  1362. : Ptr(Ptr), OperatorDelete(OperatorDelete), ElementType(ElementType) {}
  1363. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1364. CGF.EmitDeleteCall(OperatorDelete, Ptr, ElementType);
  1365. }
  1366. };
  1367. }
  1368. void
  1369. CodeGenFunction::pushCallObjectDeleteCleanup(const FunctionDecl *OperatorDelete,
  1370. llvm::Value *CompletePtr,
  1371. QualType ElementType) {
  1372. EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup, CompletePtr,
  1373. OperatorDelete, ElementType);
  1374. }
  1375. /// Emit the code for deleting a single object.
  1376. static void EmitObjectDelete(CodeGenFunction &CGF,
  1377. const CXXDeleteExpr *DE,
  1378. llvm::Value *Ptr,
  1379. QualType ElementType) {
  1380. // Find the destructor for the type, if applicable. If the
  1381. // destructor is virtual, we'll just emit the vcall and return.
  1382. const CXXDestructorDecl *Dtor = nullptr;
  1383. if (const RecordType *RT = ElementType->getAs<RecordType>()) {
  1384. CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  1385. if (RD->hasDefinition() && !RD->hasTrivialDestructor()) {
  1386. Dtor = RD->getDestructor();
  1387. if (Dtor->isVirtual()) {
  1388. CGF.CGM.getCXXABI().emitVirtualObjectDelete(CGF, DE, Ptr, ElementType,
  1389. Dtor);
  1390. return;
  1391. }
  1392. }
  1393. }
  1394. // Make sure that we call delete even if the dtor throws.
  1395. // This doesn't have to a conditional cleanup because we're going
  1396. // to pop it off in a second.
  1397. const FunctionDecl *OperatorDelete = DE->getOperatorDelete();
  1398. CGF.EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup,
  1399. Ptr, OperatorDelete, ElementType);
  1400. if (Dtor)
  1401. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
  1402. /*ForVirtualBase=*/false,
  1403. /*Delegating=*/false,
  1404. Ptr);
  1405. else if (CGF.getLangOpts().ObjCAutoRefCount &&
  1406. ElementType->isObjCLifetimeType()) {
  1407. switch (ElementType.getObjCLifetime()) {
  1408. case Qualifiers::OCL_None:
  1409. case Qualifiers::OCL_ExplicitNone:
  1410. case Qualifiers::OCL_Autoreleasing:
  1411. break;
  1412. case Qualifiers::OCL_Strong: {
  1413. // Load the pointer value.
  1414. llvm::Value *PtrValue = CGF.Builder.CreateLoad(Ptr,
  1415. ElementType.isVolatileQualified());
  1416. CGF.EmitARCRelease(PtrValue, ARCPreciseLifetime);
  1417. break;
  1418. }
  1419. case Qualifiers::OCL_Weak:
  1420. CGF.EmitARCDestroyWeak(Ptr);
  1421. break;
  1422. }
  1423. }
  1424. CGF.PopCleanupBlock();
  1425. }
  1426. namespace {
  1427. /// Calls the given 'operator delete' on an array of objects.
  1428. struct CallArrayDelete : EHScopeStack::Cleanup {
  1429. llvm::Value *Ptr;
  1430. const FunctionDecl *OperatorDelete;
  1431. llvm::Value *NumElements;
  1432. QualType ElementType;
  1433. CharUnits CookieSize;
  1434. CallArrayDelete(llvm::Value *Ptr,
  1435. const FunctionDecl *OperatorDelete,
  1436. llvm::Value *NumElements,
  1437. QualType ElementType,
  1438. CharUnits CookieSize)
  1439. : Ptr(Ptr), OperatorDelete(OperatorDelete), NumElements(NumElements),
  1440. ElementType(ElementType), CookieSize(CookieSize) {}
  1441. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1442. const FunctionProtoType *DeleteFTy =
  1443. OperatorDelete->getType()->getAs<FunctionProtoType>();
  1444. assert(DeleteFTy->getNumParams() == 1 || DeleteFTy->getNumParams() == 2);
  1445. CallArgList Args;
  1446. // Pass the pointer as the first argument.
  1447. QualType VoidPtrTy = DeleteFTy->getParamType(0);
  1448. llvm::Value *DeletePtr
  1449. = CGF.Builder.CreateBitCast(Ptr, CGF.ConvertType(VoidPtrTy));
  1450. Args.add(RValue::get(DeletePtr), VoidPtrTy);
  1451. // Pass the original requested size as the second argument.
  1452. if (DeleteFTy->getNumParams() == 2) {
  1453. QualType size_t = DeleteFTy->getParamType(1);
  1454. llvm::IntegerType *SizeTy
  1455. = cast<llvm::IntegerType>(CGF.ConvertType(size_t));
  1456. CharUnits ElementTypeSize =
  1457. CGF.CGM.getContext().getTypeSizeInChars(ElementType);
  1458. // The size of an element, multiplied by the number of elements.
  1459. llvm::Value *Size
  1460. = llvm::ConstantInt::get(SizeTy, ElementTypeSize.getQuantity());
  1461. if (NumElements)
  1462. Size = CGF.Builder.CreateMul(Size, NumElements);
  1463. // Plus the size of the cookie if applicable.
  1464. if (!CookieSize.isZero()) {
  1465. llvm::Value *CookieSizeV
  1466. = llvm::ConstantInt::get(SizeTy, CookieSize.getQuantity());
  1467. Size = CGF.Builder.CreateAdd(Size, CookieSizeV);
  1468. }
  1469. Args.add(RValue::get(Size), size_t);
  1470. }
  1471. // Emit the call to delete.
  1472. EmitNewDeleteCall(CGF, OperatorDelete, DeleteFTy, Args);
  1473. }
  1474. };
  1475. }
  1476. /// Emit the code for deleting an array of objects.
  1477. static void EmitArrayDelete(CodeGenFunction &CGF,
  1478. const CXXDeleteExpr *E,
  1479. llvm::Value *deletedPtr,
  1480. QualType elementType) {
  1481. llvm::Value *numElements = nullptr;
  1482. llvm::Value *allocatedPtr = nullptr;
  1483. CharUnits cookieSize;
  1484. CGF.CGM.getCXXABI().ReadArrayCookie(CGF, deletedPtr, E, elementType,
  1485. numElements, allocatedPtr, cookieSize);
  1486. assert(allocatedPtr && "ReadArrayCookie didn't set allocated pointer");
  1487. // Make sure that we call delete even if one of the dtors throws.
  1488. const FunctionDecl *operatorDelete = E->getOperatorDelete();
  1489. CGF.EHStack.pushCleanup<CallArrayDelete>(NormalAndEHCleanup,
  1490. allocatedPtr, operatorDelete,
  1491. numElements, elementType,
  1492. cookieSize);
  1493. // Destroy the elements.
  1494. if (QualType::DestructionKind dtorKind = elementType.isDestructedType()) {
  1495. assert(numElements && "no element count for a type with a destructor!");
  1496. llvm::Value *arrayEnd =
  1497. CGF.Builder.CreateInBoundsGEP(deletedPtr, numElements, "delete.end");
  1498. // Note that it is legal to allocate a zero-length array, and we
  1499. // can never fold the check away because the length should always
  1500. // come from a cookie.
  1501. CGF.emitArrayDestroy(deletedPtr, arrayEnd, elementType,
  1502. CGF.getDestroyer(dtorKind),
  1503. /*checkZeroLength*/ true,
  1504. CGF.needsEHCleanup(dtorKind));
  1505. }
  1506. // Pop the cleanup block.
  1507. CGF.PopCleanupBlock();
  1508. }
  1509. void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) {
  1510. const Expr *Arg = E->getArgument();
  1511. llvm::Value *Ptr = EmitScalarExpr(Arg);
  1512. // Null check the pointer.
  1513. llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull");
  1514. llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end");
  1515. llvm::Value *IsNull = Builder.CreateIsNull(Ptr, "isnull");
  1516. Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull);
  1517. EmitBlock(DeleteNotNull);
  1518. // We might be deleting a pointer to array. If so, GEP down to the
  1519. // first non-array element.
  1520. // (this assumes that A(*)[3][7] is converted to [3 x [7 x %A]]*)
  1521. QualType DeleteTy = Arg->getType()->getAs<PointerType>()->getPointeeType();
  1522. if (DeleteTy->isConstantArrayType()) {
  1523. llvm::Value *Zero = Builder.getInt32(0);
  1524. SmallVector<llvm::Value*,8> GEP;
  1525. GEP.push_back(Zero); // point at the outermost array
  1526. // For each layer of array type we're pointing at:
  1527. while (const ConstantArrayType *Arr
  1528. = getContext().getAsConstantArrayType(DeleteTy)) {
  1529. // 1. Unpeel the array type.
  1530. DeleteTy = Arr->getElementType();
  1531. // 2. GEP to the first element of the array.
  1532. GEP.push_back(Zero);
  1533. }
  1534. Ptr = Builder.CreateInBoundsGEP(Ptr, GEP, "del.first");
  1535. }
  1536. assert(ConvertTypeForMem(DeleteTy) ==
  1537. cast<llvm::PointerType>(Ptr->getType())->getElementType());
  1538. if (E->isArrayForm()) {
  1539. EmitArrayDelete(*this, E, Ptr, DeleteTy);
  1540. } else {
  1541. EmitObjectDelete(*this, E, Ptr, DeleteTy);
  1542. }
  1543. EmitBlock(DeleteEnd);
  1544. }
  1545. static bool isGLValueFromPointerDeref(const Expr *E) {
  1546. E = E->IgnoreParens();
  1547. if (const auto *CE = dyn_cast<CastExpr>(E)) {
  1548. if (!CE->getSubExpr()->isGLValue())
  1549. return false;
  1550. return isGLValueFromPointerDeref(CE->getSubExpr());
  1551. }
  1552. if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E))
  1553. return isGLValueFromPointerDeref(OVE->getSourceExpr());
  1554. if (const auto *BO = dyn_cast<BinaryOperator>(E))
  1555. if (BO->getOpcode() == BO_Comma)
  1556. return isGLValueFromPointerDeref(BO->getRHS());
  1557. if (const auto *ACO = dyn_cast<AbstractConditionalOperator>(E))
  1558. return isGLValueFromPointerDeref(ACO->getTrueExpr()) ||
  1559. isGLValueFromPointerDeref(ACO->getFalseExpr());
  1560. // C++11 [expr.sub]p1:
  1561. // The expression E1[E2] is identical (by definition) to *((E1)+(E2))
  1562. if (isa<ArraySubscriptExpr>(E))
  1563. return true;
  1564. if (const auto *UO = dyn_cast<UnaryOperator>(E))
  1565. if (UO->getOpcode() == UO_Deref)
  1566. return true;
  1567. return false;
  1568. }
  1569. static llvm::Value *EmitTypeidFromVTable(CodeGenFunction &CGF, const Expr *E,
  1570. llvm::Type *StdTypeInfoPtrTy) {
  1571. // Get the vtable pointer.
  1572. llvm::Value *ThisPtr = CGF.EmitLValue(E).getAddress();
  1573. // C++ [expr.typeid]p2:
  1574. // If the glvalue expression is obtained by applying the unary * operator to
  1575. // a pointer and the pointer is a null pointer value, the typeid expression
  1576. // throws the std::bad_typeid exception.
  1577. //
  1578. // However, this paragraph's intent is not clear. We choose a very generous
  1579. // interpretation which implores us to consider comma operators, conditional
  1580. // operators, parentheses and other such constructs.
  1581. QualType SrcRecordTy = E->getType();
  1582. if (CGF.CGM.getCXXABI().shouldTypeidBeNullChecked(
  1583. isGLValueFromPointerDeref(E), SrcRecordTy)) {
  1584. llvm::BasicBlock *BadTypeidBlock =
  1585. CGF.createBasicBlock("typeid.bad_typeid");
  1586. llvm::BasicBlock *EndBlock = CGF.createBasicBlock("typeid.end");
  1587. llvm::Value *IsNull = CGF.Builder.CreateIsNull(ThisPtr);
  1588. CGF.Builder.CreateCondBr(IsNull, BadTypeidBlock, EndBlock);
  1589. CGF.EmitBlock(BadTypeidBlock);
  1590. CGF.CGM.getCXXABI().EmitBadTypeidCall(CGF);
  1591. CGF.EmitBlock(EndBlock);
  1592. }
  1593. return CGF.CGM.getCXXABI().EmitTypeid(CGF, SrcRecordTy, ThisPtr,
  1594. StdTypeInfoPtrTy);
  1595. }
  1596. llvm::Value *CodeGenFunction::EmitCXXTypeidExpr(const CXXTypeidExpr *E) {
  1597. llvm::Type *StdTypeInfoPtrTy =
  1598. ConvertType(E->getType())->getPointerTo();
  1599. if (E->isTypeOperand()) {
  1600. llvm::Constant *TypeInfo =
  1601. CGM.GetAddrOfRTTIDescriptor(E->getTypeOperand(getContext()));
  1602. return Builder.CreateBitCast(TypeInfo, StdTypeInfoPtrTy);
  1603. }
  1604. // C++ [expr.typeid]p2:
  1605. // When typeid is applied to a glvalue expression whose type is a
  1606. // polymorphic class type, the result refers to a std::type_info object
  1607. // representing the type of the most derived object (that is, the dynamic
  1608. // type) to which the glvalue refers.
  1609. if (E->isPotentiallyEvaluated())
  1610. return EmitTypeidFromVTable(*this, E->getExprOperand(),
  1611. StdTypeInfoPtrTy);
  1612. QualType OperandTy = E->getExprOperand()->getType();
  1613. return Builder.CreateBitCast(CGM.GetAddrOfRTTIDescriptor(OperandTy),
  1614. StdTypeInfoPtrTy);
  1615. }
  1616. static llvm::Value *EmitDynamicCastToNull(CodeGenFunction &CGF,
  1617. QualType DestTy) {
  1618. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1619. if (DestTy->isPointerType())
  1620. return llvm::Constant::getNullValue(DestLTy);
  1621. /// C++ [expr.dynamic.cast]p9:
  1622. /// A failed cast to reference type throws std::bad_cast
  1623. if (!CGF.CGM.getCXXABI().EmitBadCastCall(CGF))
  1624. return nullptr;
  1625. CGF.EmitBlock(CGF.createBasicBlock("dynamic_cast.end"));
  1626. return llvm::UndefValue::get(DestLTy);
  1627. }
  1628. llvm::Value *CodeGenFunction::EmitDynamicCast(llvm::Value *Value,
  1629. const CXXDynamicCastExpr *DCE) {
  1630. QualType DestTy = DCE->getTypeAsWritten();
  1631. if (DCE->isAlwaysNull())
  1632. if (llvm::Value *T = EmitDynamicCastToNull(*this, DestTy))
  1633. return T;
  1634. QualType SrcTy = DCE->getSubExpr()->getType();
  1635. // C++ [expr.dynamic.cast]p7:
  1636. // If T is "pointer to cv void," then the result is a pointer to the most
  1637. // derived object pointed to by v.
  1638. const PointerType *DestPTy = DestTy->getAs<PointerType>();
  1639. bool isDynamicCastToVoid;
  1640. QualType SrcRecordTy;
  1641. QualType DestRecordTy;
  1642. if (DestPTy) {
  1643. isDynamicCastToVoid = DestPTy->getPointeeType()->isVoidType();
  1644. SrcRecordTy = SrcTy->castAs<PointerType>()->getPointeeType();
  1645. DestRecordTy = DestPTy->getPointeeType();
  1646. } else {
  1647. isDynamicCastToVoid = false;
  1648. SrcRecordTy = SrcTy;
  1649. DestRecordTy = DestTy->castAs<ReferenceType>()->getPointeeType();
  1650. }
  1651. assert(SrcRecordTy->isRecordType() && "source type must be a record type!");
  1652. // C++ [expr.dynamic.cast]p4:
  1653. // If the value of v is a null pointer value in the pointer case, the result
  1654. // is the null pointer value of type T.
  1655. bool ShouldNullCheckSrcValue =
  1656. CGM.getCXXABI().shouldDynamicCastCallBeNullChecked(SrcTy->isPointerType(),
  1657. SrcRecordTy);
  1658. llvm::BasicBlock *CastNull = nullptr;
  1659. llvm::BasicBlock *CastNotNull = nullptr;
  1660. llvm::BasicBlock *CastEnd = createBasicBlock("dynamic_cast.end");
  1661. if (ShouldNullCheckSrcValue) {
  1662. CastNull = createBasicBlock("dynamic_cast.null");
  1663. CastNotNull = createBasicBlock("dynamic_cast.notnull");
  1664. llvm::Value *IsNull = Builder.CreateIsNull(Value);
  1665. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  1666. EmitBlock(CastNotNull);
  1667. }
  1668. if (isDynamicCastToVoid) {
  1669. Value = CGM.getCXXABI().EmitDynamicCastToVoid(*this, Value, SrcRecordTy,
  1670. DestTy);
  1671. } else {
  1672. assert(DestRecordTy->isRecordType() &&
  1673. "destination type must be a record type!");
  1674. Value = CGM.getCXXABI().EmitDynamicCastCall(*this, Value, SrcRecordTy,
  1675. DestTy, DestRecordTy, CastEnd);
  1676. }
  1677. if (ShouldNullCheckSrcValue) {
  1678. EmitBranch(CastEnd);
  1679. EmitBlock(CastNull);
  1680. EmitBranch(CastEnd);
  1681. }
  1682. EmitBlock(CastEnd);
  1683. if (ShouldNullCheckSrcValue) {
  1684. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
  1685. PHI->addIncoming(Value, CastNotNull);
  1686. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
  1687. Value = PHI;
  1688. }
  1689. return Value;
  1690. }
  1691. void CodeGenFunction::EmitLambdaExpr(const LambdaExpr *E, AggValueSlot Slot) {
  1692. RunCleanupsScope Scope(*this);
  1693. LValue SlotLV =
  1694. MakeAddrLValue(Slot.getAddr(), E->getType(), Slot.getAlignment());
  1695. CXXRecordDecl::field_iterator CurField = E->getLambdaClass()->field_begin();
  1696. for (LambdaExpr::capture_init_iterator i = E->capture_init_begin(),
  1697. e = E->capture_init_end();
  1698. i != e; ++i, ++CurField) {
  1699. // Emit initialization
  1700. LValue LV = EmitLValueForFieldInitialization(SlotLV, *CurField);
  1701. if (CurField->hasCapturedVLAType()) {
  1702. auto VAT = CurField->getCapturedVLAType();
  1703. EmitStoreThroughLValue(RValue::get(VLASizeMap[VAT->getSizeExpr()]), LV);
  1704. } else {
  1705. ArrayRef<VarDecl *> ArrayIndexes;
  1706. if (CurField->getType()->isArrayType())
  1707. ArrayIndexes = E->getCaptureInitIndexVars(i);
  1708. EmitInitializerForField(*CurField, LV, *i, ArrayIndexes);
  1709. }
  1710. }
  1711. }