CGExprCXX.cpp 78 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028
  1. //===--- CGExprCXX.cpp - Emit LLVM Code for C++ expressions ---------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code dealing with code generation of C++ expressions
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CodeGenFunction.h"
  14. #include "CGCUDARuntime.h"
  15. #include "CGHLSLRuntime.h" // HLSL Change
  16. #include "CGCXXABI.h"
  17. #include "CGDebugInfo.h"
  18. #include "CGObjCRuntime.h"
  19. #include "clang/CodeGen/CGFunctionInfo.h"
  20. #include "clang/Frontend/CodeGenOptions.h"
  21. #include "llvm/IR/CallSite.h"
  22. #include "llvm/IR/Intrinsics.h"
  23. using namespace clang;
  24. using namespace CodeGen;
  25. static RequiredArgs commonEmitCXXMemberOrOperatorCall(
  26. CodeGenFunction &CGF, const CXXMethodDecl *MD, llvm::Value *Callee,
  27. ReturnValueSlot ReturnValue, llvm::Value *This, llvm::Value *ImplicitParam,
  28. QualType ImplicitParamTy, const CallExpr *CE, CallArgList &Args,
  29. ArrayRef<const Stmt *> argList// HLSL Change - use updated argList for out parameter.
  30. ) {
  31. assert(CE == nullptr || isa<CXXMemberCallExpr>(CE) ||
  32. isa<CXXOperatorCallExpr>(CE));
  33. assert(MD->isInstance() &&
  34. "Trying to emit a member or operator call expr on a static method!");
  35. // C++11 [class.mfct.non-static]p2:
  36. // If a non-static member function of a class X is called for an object that
  37. // is not of type X, or of a type derived from X, the behavior is undefined.
  38. SourceLocation CallLoc;
  39. if (CE)
  40. CallLoc = CE->getExprLoc();
  41. CGF.EmitTypeCheck(
  42. isa<CXXConstructorDecl>(MD) ? CodeGenFunction::TCK_ConstructorCall
  43. : CodeGenFunction::TCK_MemberCall,
  44. CallLoc, This, CGF.getContext().getRecordType(MD->getParent()));
  45. // Push the this ptr.
  46. Args.add(RValue::get(This), MD->getThisType(CGF.getContext()));
  47. // If there is an implicit parameter (e.g. VTT), emit it.
  48. if (ImplicitParam) {
  49. Args.add(RValue::get(ImplicitParam), ImplicitParamTy);
  50. }
  51. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  52. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, Args.size());
  53. // And the rest of the call args.
  54. if (CE) {
  55. // Special case: skip first argument of CXXOperatorCall (it is "this").
  56. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(CE) ? 1 : 0;
  57. CGF.EmitCallArgs(Args, FPT,
  58. argList.begin() + ArgsToSkip, // HLSL Change - use updated argList for out parameter.
  59. argList.end(), // HLSL Change - use updated argList for out parameter.
  60. CE->getDirectCallee());
  61. } else {
  62. assert(
  63. FPT->getNumParams() == 0 &&
  64. "No CallExpr specified for function with non-zero number of arguments");
  65. }
  66. return required;
  67. }
  68. RValue CodeGenFunction::EmitCXXMemberOrOperatorCall(
  69. const CXXMethodDecl *MD, llvm::Value *Callee, ReturnValueSlot ReturnValue,
  70. llvm::Value *This, llvm::Value *ImplicitParam, QualType ImplicitParamTy,
  71. const CallExpr *CE) {
  72. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  73. CallArgList Args;
  74. // HLSL Change Begins
  75. llvm::SmallVector<LValue, 8> castArgList;
  76. // The argList of the CallExpr, may be update for out parameter
  77. llvm::SmallVector<const Stmt *, 8> argList(CE->arg_begin(), CE->arg_end());
  78. ConstExprIterator argBegin = argList.data();
  79. ConstExprIterator argEnd = argList.data() + CE->getNumArgs();
  80. // out param conversion
  81. CodeGenFunction::HLSLOutParamScope OutParamScope(*this);
  82. auto MapTemp = [&](const VarDecl *LocalVD, llvm::Value *TmpArg) {
  83. OutParamScope.addTemp(LocalVD, TmpArg);
  84. };
  85. if (getLangOpts().HLSL) {
  86. if (const FunctionDecl *FD = CE->getDirectCallee())
  87. CGM.getHLSLRuntime().EmitHLSLOutParamConversionInit(*this, FD, CE,
  88. castArgList, argList, MapTemp);
  89. }
  90. // HLSL Change Ends
  91. RequiredArgs required = commonEmitCXXMemberOrOperatorCall(
  92. *this, MD, Callee, ReturnValue, This, ImplicitParam, ImplicitParamTy, CE,
  93. Args, argList); // HLSL Change - use updated argList.
  94. RValue CallVal = EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, required),
  95. Callee, ReturnValue, Args, MD);
  96. // HLSL Change Begins
  97. // out param conversion
  98. // conversion and copy back after the call
  99. if (getLangOpts().HLSL)
  100. CGM.getHLSLRuntime().EmitHLSLOutParamConversionCopyBack(*this, castArgList);
  101. // HLSL Change Ends
  102. return CallVal;
  103. }
  104. RValue CodeGenFunction::EmitCXXStructorCall(
  105. const CXXMethodDecl *MD, llvm::Value *Callee, ReturnValueSlot ReturnValue,
  106. llvm::Value *This, llvm::Value *ImplicitParam, QualType ImplicitParamTy,
  107. const CallExpr *CE, StructorType Type) {
  108. CallArgList Args;
  109. // HLSL Change Begins
  110. llvm::SmallVector<LValue, 8> castArgList;
  111. // The argList of the CallExpr, may be update for out parameter
  112. llvm::SmallVector<const Stmt *, 8> argList(CE->arg_begin(), CE->arg_end());
  113. ConstExprIterator argBegin = argList.data();
  114. ConstExprIterator argEnd = argList.data() + CE->getNumArgs();
  115. // out param conversion
  116. CodeGenFunction::HLSLOutParamScope OutParamScope(*this);
  117. auto MapTemp = [&](const VarDecl *LocalVD, llvm::Value *TmpArg) {
  118. OutParamScope.addTemp(LocalVD, TmpArg);
  119. };
  120. if (getLangOpts().HLSL) {
  121. if (const FunctionDecl *FD = CE->getDirectCallee())
  122. CGM.getHLSLRuntime().EmitHLSLOutParamConversionInit(*this, FD, CE,
  123. castArgList, argList, MapTemp);
  124. }
  125. // HLSL Change Ends
  126. commonEmitCXXMemberOrOperatorCall(*this, MD, Callee, ReturnValue, This,
  127. ImplicitParam, ImplicitParamTy, CE, Args,
  128. argList); // HLSL Change - use updated argList.
  129. RValue CallVal = EmitCall(CGM.getTypes().arrangeCXXStructorDeclaration(MD, Type),
  130. Callee, ReturnValue, Args, MD);
  131. // HLSL Change Begins
  132. // out param conversion
  133. // conversion and copy back after the call
  134. if (getLangOpts().HLSL)
  135. CGM.getHLSLRuntime().EmitHLSLOutParamConversionCopyBack(*this, castArgList);
  136. // HLSL Change Ends
  137. return CallVal;
  138. }
  139. static CXXRecordDecl *getCXXRecord(const Expr *E) {
  140. QualType T = E->getType();
  141. if (const PointerType *PTy = T->getAs<PointerType>())
  142. T = PTy->getPointeeType();
  143. const RecordType *Ty = T->castAs<RecordType>();
  144. return cast<CXXRecordDecl>(Ty->getDecl());
  145. }
  146. // Note: This function also emit constructor calls to support a MSVC
  147. // extensions allowing explicit constructor function call.
  148. RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE,
  149. ReturnValueSlot ReturnValue) {
  150. const Expr *callee = CE->getCallee()->IgnoreParens();
  151. if (isa<BinaryOperator>(callee))
  152. return EmitCXXMemberPointerCallExpr(CE, ReturnValue);
  153. const MemberExpr *ME = cast<MemberExpr>(callee);
  154. const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl());
  155. if (MD->isStatic()) {
  156. // The method is static, emit it as we would a regular call.
  157. llvm::Value *Callee = CGM.GetAddrOfFunction(MD);
  158. return EmitCall(getContext().getPointerType(MD->getType()), Callee, CE,
  159. ReturnValue);
  160. }
  161. bool HasQualifier = ME->hasQualifier();
  162. NestedNameSpecifier *Qualifier = HasQualifier ? ME->getQualifier() : nullptr;
  163. bool IsArrow = ME->isArrow();
  164. const Expr *Base = ME->getBase();
  165. return EmitCXXMemberOrOperatorMemberCallExpr(
  166. CE, MD, ReturnValue, HasQualifier, Qualifier, IsArrow, Base);
  167. }
  168. RValue CodeGenFunction::EmitCXXMemberOrOperatorMemberCallExpr(
  169. const CallExpr *CE, const CXXMethodDecl *MD, ReturnValueSlot ReturnValue,
  170. bool HasQualifier, NestedNameSpecifier *Qualifier, bool IsArrow,
  171. const Expr *Base) {
  172. assert(isa<CXXMemberCallExpr>(CE) || isa<CXXOperatorCallExpr>(CE));
  173. // HLSL Change Begins
  174. if (hlsl::IsHLSLMatType(Base->getType())) {
  175. if (const CXXOperatorCallExpr *opCall = dyn_cast<CXXOperatorCallExpr>(CE)) {
  176. assert(opCall->getOperator() == OverloadedOperatorKind::OO_Subscript &&
  177. "must be subscript");
  178. llvm::Value *This = nullptr;
  179. if (Base->getValueKind() != ExprValueKind::VK_RValue) {
  180. This = EmitLValue(Base).getAddress();
  181. } else {
  182. llvm::Value *Val = EmitScalarExpr(Base);
  183. This = Builder.CreateAlloca(Val->getType());
  184. CGM.getHLSLRuntime().EmitHLSLMatrixStore(*this, Val, This, Base->getType());
  185. }
  186. llvm::Value *Idx = EmitScalarExpr(CE->getArg(1));
  187. llvm::Type *RetTy =
  188. ConvertType(getContext().getLValueReferenceType(CE->getType()));
  189. llvm::Value *matSub = CGM.getHLSLRuntime().EmitHLSLMatrixSubscript(
  190. *this, RetTy, This, Idx, Base->getType());
  191. return RValue::get(matSub);
  192. }
  193. }
  194. if (hlsl::IsHLSLVecType(Base->getType())) {
  195. if (const CXXOperatorCallExpr *opCall = dyn_cast<CXXOperatorCallExpr>(CE)) {
  196. assert(opCall->getOperator() == OverloadedOperatorKind::OO_Subscript &&
  197. "must be subscript");
  198. llvm::Value *This = nullptr;
  199. if (Base->getValueKind() != ExprValueKind::VK_RValue) {
  200. LValue LV = EmitLValue(Base);
  201. if (LV.isSimple()) {
  202. This = EmitLValue(Base).getAddress();
  203. if (isa<ExtMatrixElementExpr>(Base)) {
  204. llvm::Value *Val = Builder.CreateLoad(This);
  205. This = Builder.CreateAlloca(Val->getType());
  206. Builder.CreateStore(Val, This);
  207. }
  208. } else {
  209. assert(LV.isExtVectorElt() && "must be ext vector here");
  210. This = LV.getExtVectorAddr();
  211. llvm::Constant *Elts = LV.getExtVectorElts();
  212. llvm::Type *Ty = ConvertType(LV.getType());
  213. llvm::Constant *zero = Builder.getInt32(0);
  214. llvm::Value *TmpThis = Builder.CreateAlloca(Ty);
  215. for (unsigned i = 0; i < Ty->getVectorNumElements(); i++) {
  216. llvm::Value *EltIdx = Elts->getAggregateElement(i);
  217. llvm::Value *EltGEP = Builder.CreateGEP(This, {zero, EltIdx});
  218. llvm::Value *TmpEltGEP =
  219. Builder.CreateGEP(TmpThis, {zero, Builder.getInt32(i)});
  220. llvm::Value *Elt = Builder.CreateLoad(EltGEP);
  221. Builder.CreateStore(Elt, TmpEltGEP);
  222. }
  223. This = TmpThis;
  224. }
  225. } else {
  226. llvm::Value *Val = EmitScalarExpr(Base);
  227. This = Builder.CreateAlloca(Val->getType());
  228. Builder.CreateStore(Val, This);
  229. }
  230. bool isBool = false;
  231. if (llvm::IntegerType *IT =
  232. dyn_cast<llvm::IntegerType>(This->getType()
  233. ->getPointerElementType()
  234. ->getVectorElementType())) {
  235. if (IT->getBitWidth() == 1) {
  236. isBool = true;
  237. }
  238. }
  239. llvm::Value *Idx = EmitScalarExpr(CE->getArg(1));
  240. llvm::Constant *zero = llvm::ConstantInt::get(Idx->getType(), 0);
  241. llvm::Value *Elt = Builder.CreateGEP(This, {zero, Idx});
  242. if (isBool) {
  243. // bool pointer is not i1 *.
  244. llvm::Type *BoolTy = llvm::IntegerType::get(
  245. getLLVMContext(), getContext().getTypeSize(CE->getType()));
  246. Elt = Builder.CreateBitCast(
  247. Elt, llvm::PointerType::get(
  248. BoolTy, Elt->getType()->getPointerAddressSpace()));
  249. }
  250. return RValue::get(Elt);
  251. }
  252. }
  253. if (hlsl::IsHLSLOutputPatchType(Base->getType()) ||
  254. hlsl::IsHLSLInputPatchType(Base->getType())) {
  255. if (const CXXOperatorCallExpr *opCall = dyn_cast<CXXOperatorCallExpr>(CE)) {
  256. assert(opCall->getOperator() == OverloadedOperatorKind::OO_Subscript &&
  257. "must be subscript");
  258. llvm::Value *This = EmitLValue(Base).getAddress();
  259. llvm::Value *Idx = EmitScalarExpr(CE->getArg(1));
  260. llvm::Constant *zero = llvm::ConstantInt::get(Idx->getType(), 0);
  261. llvm::Value *Elt = Builder.CreateGEP(This, { zero, Idx });
  262. return RValue::get(Elt);
  263. }
  264. }
  265. // HLSL Change Ends
  266. // Compute the object pointer.
  267. bool CanUseVirtualCall = MD->isVirtual() && !HasQualifier;
  268. const CXXMethodDecl *DevirtualizedMethod = nullptr;
  269. if (CanUseVirtualCall && CanDevirtualizeMemberFunctionCall(Base, MD)) {
  270. const CXXRecordDecl *BestDynamicDecl = Base->getBestDynamicClassType();
  271. DevirtualizedMethod = MD->getCorrespondingMethodInClass(BestDynamicDecl);
  272. assert(DevirtualizedMethod);
  273. const CXXRecordDecl *DevirtualizedClass = DevirtualizedMethod->getParent();
  274. const Expr *Inner = Base->ignoreParenBaseCasts();
  275. if (DevirtualizedMethod->getReturnType().getCanonicalType() !=
  276. MD->getReturnType().getCanonicalType())
  277. // If the return types are not the same, this might be a case where more
  278. // code needs to run to compensate for it. For example, the derived
  279. // method might return a type that inherits form from the return
  280. // type of MD and has a prefix.
  281. // For now we just avoid devirtualizing these covariant cases.
  282. DevirtualizedMethod = nullptr;
  283. else if (getCXXRecord(Inner) == DevirtualizedClass)
  284. // If the class of the Inner expression is where the dynamic method
  285. // is defined, build the this pointer from it.
  286. Base = Inner;
  287. else if (getCXXRecord(Base) != DevirtualizedClass) {
  288. // If the method is defined in a class that is not the best dynamic
  289. // one or the one of the full expression, we would have to build
  290. // a derived-to-base cast to compute the correct this pointer, but
  291. // we don't have support for that yet, so do a virtual call.
  292. DevirtualizedMethod = nullptr;
  293. }
  294. }
  295. llvm::Value *This;
  296. if (IsArrow)
  297. This = EmitScalarExpr(Base);
  298. else
  299. This = EmitLValue(Base).getAddress();
  300. if (MD->isTrivial() || (MD->isDefaulted() && MD->getParent()->isUnion())) {
  301. if (isa<CXXDestructorDecl>(MD)) return RValue::get(nullptr);
  302. if (isa<CXXConstructorDecl>(MD) &&
  303. cast<CXXConstructorDecl>(MD)->isDefaultConstructor())
  304. return RValue::get(nullptr);
  305. if (!MD->getParent()->mayInsertExtraPadding()) {
  306. if (MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()) {
  307. // We don't like to generate the trivial copy/move assignment operator
  308. // when it isn't necessary; just produce the proper effect here.
  309. // Special case: skip first argument of CXXOperatorCall (it is "this").
  310. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(CE) ? 1 : 0;
  311. llvm::Value *RHS =
  312. EmitLValue(*(CE->arg_begin() + ArgsToSkip)).getAddress();
  313. EmitAggregateAssign(This, RHS, CE->getType());
  314. return RValue::get(This);
  315. }
  316. if (isa<CXXConstructorDecl>(MD) &&
  317. cast<CXXConstructorDecl>(MD)->isCopyOrMoveConstructor()) {
  318. // Trivial move and copy ctor are the same.
  319. assert(CE->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
  320. llvm::Value *RHS = EmitLValue(*CE->arg_begin()).getAddress();
  321. EmitAggregateCopy(This, RHS, CE->arg_begin()->getType());
  322. return RValue::get(This);
  323. }
  324. llvm_unreachable("unknown trivial member function");
  325. }
  326. }
  327. // Compute the function type we're calling.
  328. const CXXMethodDecl *CalleeDecl =
  329. DevirtualizedMethod ? DevirtualizedMethod : MD;
  330. const CGFunctionInfo *FInfo = nullptr;
  331. if (const auto *Dtor = dyn_cast<CXXDestructorDecl>(CalleeDecl))
  332. FInfo = &CGM.getTypes().arrangeCXXStructorDeclaration(
  333. Dtor, StructorType::Complete);
  334. else if (const auto *Ctor = dyn_cast<CXXConstructorDecl>(CalleeDecl))
  335. FInfo = &CGM.getTypes().arrangeCXXStructorDeclaration(
  336. Ctor, StructorType::Complete);
  337. else
  338. FInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(CalleeDecl);
  339. llvm::FunctionType *Ty = CGM.getTypes().GetFunctionType(*FInfo);
  340. // C++ [class.virtual]p12:
  341. // Explicit qualification with the scope operator (5.1) suppresses the
  342. // virtual call mechanism.
  343. //
  344. // We also don't emit a virtual call if the base expression has a record type
  345. // because then we know what the type is.
  346. bool UseVirtualCall = CanUseVirtualCall && !DevirtualizedMethod;
  347. llvm::Value *Callee;
  348. if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD)) {
  349. assert(CE->arg_begin() == CE->arg_end() &&
  350. "Destructor shouldn't have explicit parameters");
  351. assert(ReturnValue.isNull() && "Destructor shouldn't have return value");
  352. if (UseVirtualCall) {
  353. CGM.getCXXABI().EmitVirtualDestructorCall(
  354. *this, Dtor, Dtor_Complete, This, cast<CXXMemberCallExpr>(CE));
  355. } else {
  356. if (getLangOpts().AppleKext && MD->isVirtual() && HasQualifier)
  357. Callee = BuildAppleKextVirtualCall(MD, Qualifier, Ty);
  358. else if (!DevirtualizedMethod)
  359. Callee =
  360. CGM.getAddrOfCXXStructor(Dtor, StructorType::Complete, FInfo, Ty);
  361. else {
  362. const CXXDestructorDecl *DDtor =
  363. cast<CXXDestructorDecl>(DevirtualizedMethod);
  364. Callee = CGM.GetAddrOfFunction(GlobalDecl(DDtor, Dtor_Complete), Ty);
  365. }
  366. EmitCXXMemberOrOperatorCall(MD, Callee, ReturnValue, This,
  367. /*ImplicitParam=*/nullptr, QualType(), CE);
  368. }
  369. return RValue::get(nullptr);
  370. }
  371. if (const CXXConstructorDecl *Ctor = dyn_cast<CXXConstructorDecl>(MD)) {
  372. Callee = CGM.GetAddrOfFunction(GlobalDecl(Ctor, Ctor_Complete), Ty);
  373. } else if (UseVirtualCall) {
  374. Callee = CGM.getCXXABI().getVirtualFunctionPointer(*this, MD, This, Ty,
  375. CE->getLocStart());
  376. } else {
  377. if (SanOpts.has(SanitizerKind::CFINVCall) &&
  378. MD->getParent()->isDynamicClass()) {
  379. llvm::Value *VTable = GetVTablePtr(This, Int8PtrTy);
  380. EmitVTablePtrCheckForCall(MD, VTable, CFITCK_NVCall, CE->getLocStart());
  381. }
  382. if (getLangOpts().AppleKext && MD->isVirtual() && HasQualifier)
  383. Callee = BuildAppleKextVirtualCall(MD, Qualifier, Ty);
  384. else if (!DevirtualizedMethod)
  385. Callee = CGM.GetAddrOfFunction(MD, Ty);
  386. else {
  387. Callee = CGM.GetAddrOfFunction(DevirtualizedMethod, Ty);
  388. }
  389. }
  390. if (MD->isVirtual()) {
  391. This = CGM.getCXXABI().adjustThisArgumentForVirtualFunctionCall(
  392. *this, MD, This, UseVirtualCall);
  393. }
  394. return EmitCXXMemberOrOperatorCall(MD, Callee, ReturnValue, This,
  395. /*ImplicitParam=*/nullptr, QualType(), CE);
  396. }
  397. RValue
  398. CodeGenFunction::EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
  399. ReturnValueSlot ReturnValue) {
  400. const BinaryOperator *BO =
  401. cast<BinaryOperator>(E->getCallee()->IgnoreParens());
  402. const Expr *BaseExpr = BO->getLHS();
  403. const Expr *MemFnExpr = BO->getRHS();
  404. const MemberPointerType *MPT =
  405. MemFnExpr->getType()->castAs<MemberPointerType>();
  406. const FunctionProtoType *FPT =
  407. MPT->getPointeeType()->castAs<FunctionProtoType>();
  408. const CXXRecordDecl *RD =
  409. cast<CXXRecordDecl>(MPT->getClass()->getAs<RecordType>()->getDecl());
  410. // Get the member function pointer.
  411. llvm::Value *MemFnPtr = EmitScalarExpr(MemFnExpr);
  412. // Emit the 'this' pointer.
  413. llvm::Value *This;
  414. if (BO->getOpcode() == BO_PtrMemI)
  415. This = EmitScalarExpr(BaseExpr);
  416. else
  417. This = EmitLValue(BaseExpr).getAddress();
  418. EmitTypeCheck(TCK_MemberCall, E->getExprLoc(), This,
  419. QualType(MPT->getClass(), 0));
  420. // Ask the ABI to load the callee. Note that This is modified.
  421. llvm::Value *Callee =
  422. CGM.getCXXABI().EmitLoadOfMemberFunctionPointer(*this, BO, This, MemFnPtr, MPT);
  423. CallArgList Args;
  424. QualType ThisType =
  425. getContext().getPointerType(getContext().getTagDeclType(RD));
  426. // Push the this ptr.
  427. Args.add(RValue::get(This), ThisType);
  428. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, 1);
  429. // And the rest of the call args
  430. EmitCallArgs(Args, FPT, E->arg_begin(), E->arg_end(), E->getDirectCallee());
  431. return EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, required),
  432. Callee, ReturnValue, Args);
  433. }
  434. RValue
  435. CodeGenFunction::EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
  436. const CXXMethodDecl *MD,
  437. ReturnValueSlot ReturnValue) {
  438. assert(MD->isInstance() &&
  439. "Trying to emit a member call expr on a static method!");
  440. return EmitCXXMemberOrOperatorMemberCallExpr(
  441. E, MD, ReturnValue, /*HasQualifier=*/false, /*Qualifier=*/nullptr,
  442. /*IsArrow=*/false, E->getArg(0));
  443. }
  444. RValue CodeGenFunction::EmitCUDAKernelCallExpr(const CUDAKernelCallExpr *E,
  445. ReturnValueSlot ReturnValue) {
  446. return CGM.getCUDARuntime().EmitCUDAKernelCallExpr(*this, E, ReturnValue);
  447. }
  448. // HLSL Change Begins
  449. RValue CodeGenFunction::EmitHLSLBuiltinCallExpr(const FunctionDecl *FD,
  450. const CallExpr *E,
  451. ReturnValueSlot ReturnValue) {
  452. return CGM.getHLSLRuntime().EmitHLSLBuiltinCallExpr(*this, FD, E,
  453. ReturnValue);
  454. }
  455. // HLSL Change Ends
  456. static void EmitNullBaseClassInitialization(CodeGenFunction &CGF,
  457. llvm::Value *DestPtr,
  458. const CXXRecordDecl *Base) {
  459. if (Base->isEmpty())
  460. return;
  461. DestPtr = CGF.EmitCastToVoidPtr(DestPtr);
  462. const ASTRecordLayout &Layout = CGF.getContext().getASTRecordLayout(Base);
  463. CharUnits Size = Layout.getNonVirtualSize();
  464. CharUnits Align = Layout.getNonVirtualAlignment();
  465. llvm::Value *SizeVal = CGF.CGM.getSize(Size);
  466. // If the type contains a pointer to data member we can't memset it to zero.
  467. // Instead, create a null constant and copy it to the destination.
  468. // TODO: there are other patterns besides zero that we can usefully memset,
  469. // like -1, which happens to be the pattern used by member-pointers.
  470. // TODO: isZeroInitializable can be over-conservative in the case where a
  471. // virtual base contains a member pointer.
  472. if (!CGF.CGM.getTypes().isZeroInitializable(Base)) {
  473. llvm::Constant *NullConstant = CGF.CGM.EmitNullConstantForBase(Base);
  474. llvm::GlobalVariable *NullVariable =
  475. new llvm::GlobalVariable(CGF.CGM.getModule(), NullConstant->getType(),
  476. /*isConstant=*/true,
  477. llvm::GlobalVariable::PrivateLinkage,
  478. NullConstant, Twine());
  479. NullVariable->setAlignment(Align.getQuantity());
  480. llvm::Value *SrcPtr = CGF.EmitCastToVoidPtr(NullVariable);
  481. // Get and call the appropriate llvm.memcpy overload.
  482. CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, SizeVal, Align.getQuantity());
  483. return;
  484. }
  485. // Otherwise, just memset the whole thing to zero. This is legal
  486. // because in LLVM, all default initializers (other than the ones we just
  487. // handled above) are guaranteed to have a bit pattern of all zeros.
  488. CGF.Builder.CreateMemSet(DestPtr, CGF.Builder.getInt8(0), SizeVal,
  489. Align.getQuantity());
  490. }
  491. void
  492. CodeGenFunction::EmitCXXConstructExpr(const CXXConstructExpr *E,
  493. AggValueSlot Dest) {
  494. assert(!Dest.isIgnored() && "Must have a destination!");
  495. const CXXConstructorDecl *CD = E->getConstructor();
  496. // If we require zero initialization before (or instead of) calling the
  497. // constructor, as can be the case with a non-user-provided default
  498. // constructor, emit the zero initialization now, unless destination is
  499. // already zeroed.
  500. if (E->requiresZeroInitialization() && !Dest.isZeroed()) {
  501. switch (E->getConstructionKind()) {
  502. case CXXConstructExpr::CK_Delegating:
  503. case CXXConstructExpr::CK_Complete:
  504. EmitNullInitialization(Dest.getAddr(), E->getType());
  505. break;
  506. case CXXConstructExpr::CK_VirtualBase:
  507. case CXXConstructExpr::CK_NonVirtualBase:
  508. EmitNullBaseClassInitialization(*this, Dest.getAddr(), CD->getParent());
  509. break;
  510. }
  511. }
  512. // If this is a call to a trivial default constructor, do nothing.
  513. if (CD->isTrivial() && CD->isDefaultConstructor())
  514. return;
  515. // Elide the constructor if we're constructing from a temporary.
  516. // The temporary check is required because Sema sets this on NRVO
  517. // returns.
  518. if (getLangOpts().ElideConstructors && E->isElidable()) {
  519. assert(getContext().hasSameUnqualifiedType(E->getType(),
  520. E->getArg(0)->getType()));
  521. if (E->getArg(0)->isTemporaryObject(getContext(), CD->getParent())) {
  522. EmitAggExpr(E->getArg(0), Dest);
  523. return;
  524. }
  525. }
  526. if (const ConstantArrayType *arrayType
  527. = getContext().getAsConstantArrayType(E->getType())) {
  528. EmitCXXAggrConstructorCall(CD, arrayType, Dest.getAddr(), E);
  529. } else {
  530. CXXCtorType Type = Ctor_Complete;
  531. bool ForVirtualBase = false;
  532. bool Delegating = false;
  533. switch (E->getConstructionKind()) {
  534. case CXXConstructExpr::CK_Delegating:
  535. // We should be emitting a constructor; GlobalDecl will assert this
  536. Type = CurGD.getCtorType();
  537. Delegating = true;
  538. break;
  539. case CXXConstructExpr::CK_Complete:
  540. Type = Ctor_Complete;
  541. break;
  542. case CXXConstructExpr::CK_VirtualBase:
  543. ForVirtualBase = true;
  544. // fall-through
  545. case CXXConstructExpr::CK_NonVirtualBase:
  546. Type = Ctor_Base;
  547. }
  548. // Call the constructor.
  549. EmitCXXConstructorCall(CD, Type, ForVirtualBase, Delegating, Dest.getAddr(),
  550. E);
  551. }
  552. }
  553. void
  554. CodeGenFunction::EmitSynthesizedCXXCopyCtor(llvm::Value *Dest,
  555. llvm::Value *Src,
  556. const Expr *Exp) {
  557. if (const ExprWithCleanups *E = dyn_cast<ExprWithCleanups>(Exp))
  558. Exp = E->getSubExpr();
  559. assert(isa<CXXConstructExpr>(Exp) &&
  560. "EmitSynthesizedCXXCopyCtor - unknown copy ctor expr");
  561. const CXXConstructExpr* E = cast<CXXConstructExpr>(Exp);
  562. const CXXConstructorDecl *CD = E->getConstructor();
  563. RunCleanupsScope Scope(*this);
  564. // If we require zero initialization before (or instead of) calling the
  565. // constructor, as can be the case with a non-user-provided default
  566. // constructor, emit the zero initialization now.
  567. // FIXME. Do I still need this for a copy ctor synthesis?
  568. if (E->requiresZeroInitialization())
  569. EmitNullInitialization(Dest, E->getType());
  570. assert(!getContext().getAsConstantArrayType(E->getType())
  571. && "EmitSynthesizedCXXCopyCtor - Copied-in Array");
  572. EmitSynthesizedCXXCopyCtorCall(CD, Dest, Src, E);
  573. }
  574. static CharUnits CalculateCookiePadding(CodeGenFunction &CGF,
  575. const CXXNewExpr *E) {
  576. if (!E->isArray())
  577. return CharUnits::Zero();
  578. // No cookie is required if the operator new[] being used is the
  579. // reserved placement operator new[].
  580. if (E->getOperatorNew()->isReservedGlobalPlacementOperator())
  581. return CharUnits::Zero();
  582. return CGF.CGM.getCXXABI().GetArrayCookieSize(E);
  583. }
  584. static llvm::Value *EmitCXXNewAllocSize(CodeGenFunction &CGF,
  585. const CXXNewExpr *e,
  586. unsigned minElements,
  587. llvm::Value *&numElements,
  588. llvm::Value *&sizeWithoutCookie) {
  589. QualType type = e->getAllocatedType();
  590. if (!e->isArray()) {
  591. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  592. sizeWithoutCookie
  593. = llvm::ConstantInt::get(CGF.SizeTy, typeSize.getQuantity());
  594. return sizeWithoutCookie;
  595. }
  596. // The width of size_t.
  597. unsigned sizeWidth = CGF.SizeTy->getBitWidth();
  598. // Figure out the cookie size.
  599. llvm::APInt cookieSize(sizeWidth,
  600. CalculateCookiePadding(CGF, e).getQuantity());
  601. // Emit the array size expression.
  602. // We multiply the size of all dimensions for NumElements.
  603. // e.g for 'int[2][3]', ElemType is 'int' and NumElements is 6.
  604. numElements = CGF.EmitScalarExpr(e->getArraySize());
  605. assert(isa<llvm::IntegerType>(numElements->getType()));
  606. // The number of elements can be have an arbitrary integer type;
  607. // essentially, we need to multiply it by a constant factor, add a
  608. // cookie size, and verify that the result is representable as a
  609. // size_t. That's just a gloss, though, and it's wrong in one
  610. // important way: if the count is negative, it's an error even if
  611. // the cookie size would bring the total size >= 0.
  612. bool isSigned
  613. = e->getArraySize()->getType()->isSignedIntegerOrEnumerationType();
  614. llvm::IntegerType *numElementsType
  615. = cast<llvm::IntegerType>(numElements->getType());
  616. unsigned numElementsWidth = numElementsType->getBitWidth();
  617. // Compute the constant factor.
  618. llvm::APInt arraySizeMultiplier(sizeWidth, 1);
  619. while (const ConstantArrayType *CAT
  620. = CGF.getContext().getAsConstantArrayType(type)) {
  621. type = CAT->getElementType();
  622. arraySizeMultiplier *= CAT->getSize();
  623. }
  624. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  625. llvm::APInt typeSizeMultiplier(sizeWidth, typeSize.getQuantity());
  626. typeSizeMultiplier *= arraySizeMultiplier;
  627. // This will be a size_t.
  628. llvm::Value *size;
  629. // If someone is doing 'new int[42]' there is no need to do a dynamic check.
  630. // Don't bloat the -O0 code.
  631. if (llvm::ConstantInt *numElementsC =
  632. dyn_cast<llvm::ConstantInt>(numElements)) {
  633. const llvm::APInt &count = numElementsC->getValue();
  634. bool hasAnyOverflow = false;
  635. // If 'count' was a negative number, it's an overflow.
  636. if (isSigned && count.isNegative())
  637. hasAnyOverflow = true;
  638. // We want to do all this arithmetic in size_t. If numElements is
  639. // wider than that, check whether it's already too big, and if so,
  640. // overflow.
  641. else if (numElementsWidth > sizeWidth &&
  642. numElementsWidth - sizeWidth > count.countLeadingZeros())
  643. hasAnyOverflow = true;
  644. // Okay, compute a count at the right width.
  645. llvm::APInt adjustedCount = count.zextOrTrunc(sizeWidth);
  646. // If there is a brace-initializer, we cannot allocate fewer elements than
  647. // there are initializers. If we do, that's treated like an overflow.
  648. if (adjustedCount.ult(minElements))
  649. hasAnyOverflow = true;
  650. // Scale numElements by that. This might overflow, but we don't
  651. // care because it only overflows if allocationSize does, too, and
  652. // if that overflows then we shouldn't use this.
  653. numElements = llvm::ConstantInt::get(CGF.SizeTy,
  654. adjustedCount * arraySizeMultiplier);
  655. // Compute the size before cookie, and track whether it overflowed.
  656. bool overflow;
  657. llvm::APInt allocationSize
  658. = adjustedCount.umul_ov(typeSizeMultiplier, overflow);
  659. hasAnyOverflow |= overflow;
  660. // Add in the cookie, and check whether it's overflowed.
  661. if (cookieSize != 0) {
  662. // Save the current size without a cookie. This shouldn't be
  663. // used if there was overflow.
  664. sizeWithoutCookie = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  665. allocationSize = allocationSize.uadd_ov(cookieSize, overflow);
  666. hasAnyOverflow |= overflow;
  667. }
  668. // On overflow, produce a -1 so operator new will fail.
  669. if (hasAnyOverflow) {
  670. size = llvm::Constant::getAllOnesValue(CGF.SizeTy);
  671. } else {
  672. size = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  673. }
  674. // Otherwise, we might need to use the overflow intrinsics.
  675. } else {
  676. // There are up to five conditions we need to test for:
  677. // 1) if isSigned, we need to check whether numElements is negative;
  678. // 2) if numElementsWidth > sizeWidth, we need to check whether
  679. // numElements is larger than something representable in size_t;
  680. // 3) if minElements > 0, we need to check whether numElements is smaller
  681. // than that.
  682. // 4) we need to compute
  683. // sizeWithoutCookie := numElements * typeSizeMultiplier
  684. // and check whether it overflows; and
  685. // 5) if we need a cookie, we need to compute
  686. // size := sizeWithoutCookie + cookieSize
  687. // and check whether it overflows.
  688. llvm::Value *hasOverflow = nullptr;
  689. // If numElementsWidth > sizeWidth, then one way or another, we're
  690. // going to have to do a comparison for (2), and this happens to
  691. // take care of (1), too.
  692. if (numElementsWidth > sizeWidth) {
  693. llvm::APInt threshold(numElementsWidth, 1);
  694. threshold <<= sizeWidth;
  695. llvm::Value *thresholdV
  696. = llvm::ConstantInt::get(numElementsType, threshold);
  697. hasOverflow = CGF.Builder.CreateICmpUGE(numElements, thresholdV);
  698. numElements = CGF.Builder.CreateTrunc(numElements, CGF.SizeTy);
  699. // Otherwise, if we're signed, we want to sext up to size_t.
  700. } else if (isSigned) {
  701. if (numElementsWidth < sizeWidth)
  702. numElements = CGF.Builder.CreateSExt(numElements, CGF.SizeTy);
  703. // If there's a non-1 type size multiplier, then we can do the
  704. // signedness check at the same time as we do the multiply
  705. // because a negative number times anything will cause an
  706. // unsigned overflow. Otherwise, we have to do it here. But at least
  707. // in this case, we can subsume the >= minElements check.
  708. if (typeSizeMultiplier == 1)
  709. hasOverflow = CGF.Builder.CreateICmpSLT(numElements,
  710. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  711. // Otherwise, zext up to size_t if necessary.
  712. } else if (numElementsWidth < sizeWidth) {
  713. numElements = CGF.Builder.CreateZExt(numElements, CGF.SizeTy);
  714. }
  715. assert(numElements->getType() == CGF.SizeTy);
  716. if (minElements) {
  717. // Don't allow allocation of fewer elements than we have initializers.
  718. if (!hasOverflow) {
  719. hasOverflow = CGF.Builder.CreateICmpULT(numElements,
  720. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  721. } else if (numElementsWidth > sizeWidth) {
  722. // The other existing overflow subsumes this check.
  723. // We do an unsigned comparison, since any signed value < -1 is
  724. // taken care of either above or below.
  725. hasOverflow = CGF.Builder.CreateOr(hasOverflow,
  726. CGF.Builder.CreateICmpULT(numElements,
  727. llvm::ConstantInt::get(CGF.SizeTy, minElements)));
  728. }
  729. }
  730. size = numElements;
  731. // Multiply by the type size if necessary. This multiplier
  732. // includes all the factors for nested arrays.
  733. //
  734. // This step also causes numElements to be scaled up by the
  735. // nested-array factor if necessary. Overflow on this computation
  736. // can be ignored because the result shouldn't be used if
  737. // allocation fails.
  738. if (typeSizeMultiplier != 1) {
  739. llvm::Value *umul_with_overflow
  740. = CGF.CGM.getIntrinsic(llvm::Intrinsic::umul_with_overflow, CGF.SizeTy);
  741. llvm::Value *tsmV =
  742. llvm::ConstantInt::get(CGF.SizeTy, typeSizeMultiplier);
  743. llvm::Value *result =
  744. CGF.Builder.CreateCall(umul_with_overflow, {size, tsmV});
  745. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  746. if (hasOverflow)
  747. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  748. else
  749. hasOverflow = overflowed;
  750. size = CGF.Builder.CreateExtractValue(result, 0);
  751. // Also scale up numElements by the array size multiplier.
  752. if (arraySizeMultiplier != 1) {
  753. // If the base element type size is 1, then we can re-use the
  754. // multiply we just did.
  755. if (typeSize.isOne()) {
  756. assert(arraySizeMultiplier == typeSizeMultiplier);
  757. numElements = size;
  758. // Otherwise we need a separate multiply.
  759. } else {
  760. llvm::Value *asmV =
  761. llvm::ConstantInt::get(CGF.SizeTy, arraySizeMultiplier);
  762. numElements = CGF.Builder.CreateMul(numElements, asmV);
  763. }
  764. }
  765. } else {
  766. // numElements doesn't need to be scaled.
  767. assert(arraySizeMultiplier == 1);
  768. }
  769. // Add in the cookie size if necessary.
  770. if (cookieSize != 0) {
  771. sizeWithoutCookie = size;
  772. llvm::Value *uadd_with_overflow
  773. = CGF.CGM.getIntrinsic(llvm::Intrinsic::uadd_with_overflow, CGF.SizeTy);
  774. llvm::Value *cookieSizeV = llvm::ConstantInt::get(CGF.SizeTy, cookieSize);
  775. llvm::Value *result =
  776. CGF.Builder.CreateCall(uadd_with_overflow, {size, cookieSizeV});
  777. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  778. if (hasOverflow)
  779. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  780. else
  781. hasOverflow = overflowed;
  782. size = CGF.Builder.CreateExtractValue(result, 0);
  783. }
  784. // If we had any possibility of dynamic overflow, make a select to
  785. // overwrite 'size' with an all-ones value, which should cause
  786. // operator new to throw.
  787. if (hasOverflow)
  788. size = CGF.Builder.CreateSelect(hasOverflow,
  789. llvm::Constant::getAllOnesValue(CGF.SizeTy),
  790. size);
  791. }
  792. if (cookieSize == 0)
  793. sizeWithoutCookie = size;
  794. else
  795. assert(sizeWithoutCookie && "didn't set sizeWithoutCookie?");
  796. return size;
  797. }
  798. static void StoreAnyExprIntoOneUnit(CodeGenFunction &CGF, const Expr *Init,
  799. QualType AllocType, llvm::Value *NewPtr) {
  800. // FIXME: Refactor with EmitExprAsInit.
  801. CharUnits Alignment = CGF.getContext().getTypeAlignInChars(AllocType);
  802. switch (CGF.getEvaluationKind(AllocType)) {
  803. case TEK_Scalar:
  804. CGF.EmitScalarInit(Init, nullptr,
  805. CGF.MakeAddrLValue(NewPtr, AllocType, Alignment), false);
  806. return;
  807. case TEK_Complex:
  808. CGF.EmitComplexExprIntoLValue(Init, CGF.MakeAddrLValue(NewPtr, AllocType,
  809. Alignment),
  810. /*isInit*/ true);
  811. return;
  812. case TEK_Aggregate: {
  813. AggValueSlot Slot
  814. = AggValueSlot::forAddr(NewPtr, Alignment, AllocType.getQualifiers(),
  815. AggValueSlot::IsDestructed,
  816. AggValueSlot::DoesNotNeedGCBarriers,
  817. AggValueSlot::IsNotAliased);
  818. CGF.EmitAggExpr(Init, Slot);
  819. return;
  820. }
  821. }
  822. llvm_unreachable("bad evaluation kind");
  823. }
  824. void CodeGenFunction::EmitNewArrayInitializer(
  825. const CXXNewExpr *E, QualType ElementType, llvm::Type *ElementTy,
  826. llvm::Value *BeginPtr, llvm::Value *NumElements,
  827. llvm::Value *AllocSizeWithoutCookie) {
  828. // If we have a type with trivial initialization and no initializer,
  829. // there's nothing to do.
  830. if (!E->hasInitializer())
  831. return;
  832. llvm::Value *CurPtr = BeginPtr;
  833. unsigned InitListElements = 0;
  834. const Expr *Init = E->getInitializer();
  835. llvm::AllocaInst *EndOfInit = nullptr;
  836. QualType::DestructionKind DtorKind = ElementType.isDestructedType();
  837. EHScopeStack::stable_iterator Cleanup;
  838. llvm::Instruction *CleanupDominator = nullptr;
  839. // If the initializer is an initializer list, first do the explicit elements.
  840. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(Init)) {
  841. InitListElements = ILE->getNumInits();
  842. // If this is a multi-dimensional array new, we will initialize multiple
  843. // elements with each init list element.
  844. QualType AllocType = E->getAllocatedType();
  845. if (const ConstantArrayType *CAT = dyn_cast_or_null<ConstantArrayType>(
  846. AllocType->getAsArrayTypeUnsafe())) {
  847. unsigned AS = CurPtr->getType()->getPointerAddressSpace();
  848. ElementTy = ConvertTypeForMem(AllocType);
  849. llvm::Type *AllocPtrTy = ElementTy->getPointerTo(AS);
  850. CurPtr = Builder.CreateBitCast(CurPtr, AllocPtrTy);
  851. InitListElements *= getContext().getConstantArrayElementCount(CAT);
  852. }
  853. // Enter a partial-destruction Cleanup if necessary.
  854. if (needsEHCleanup(DtorKind)) {
  855. // In principle we could tell the Cleanup where we are more
  856. // directly, but the control flow can get so varied here that it
  857. // would actually be quite complex. Therefore we go through an
  858. // alloca.
  859. EndOfInit = CreateTempAlloca(BeginPtr->getType(), "array.init.end");
  860. CleanupDominator = Builder.CreateStore(BeginPtr, EndOfInit);
  861. pushIrregularPartialArrayCleanup(BeginPtr, EndOfInit, ElementType,
  862. getDestroyer(DtorKind));
  863. Cleanup = EHStack.stable_begin();
  864. }
  865. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i) {
  866. // Tell the cleanup that it needs to destroy up to this
  867. // element. TODO: some of these stores can be trivially
  868. // observed to be unnecessary.
  869. if (EndOfInit)
  870. Builder.CreateStore(Builder.CreateBitCast(CurPtr, BeginPtr->getType()),
  871. EndOfInit);
  872. // FIXME: If the last initializer is an incomplete initializer list for
  873. // an array, and we have an array filler, we can fold together the two
  874. // initialization loops.
  875. StoreAnyExprIntoOneUnit(*this, ILE->getInit(i),
  876. ILE->getInit(i)->getType(), CurPtr);
  877. CurPtr = Builder.CreateConstInBoundsGEP1_32(ElementTy, CurPtr, 1,
  878. "array.exp.next");
  879. }
  880. // The remaining elements are filled with the array filler expression.
  881. Init = ILE->getArrayFiller();
  882. // Extract the initializer for the individual array elements by pulling
  883. // out the array filler from all the nested initializer lists. This avoids
  884. // generating a nested loop for the initialization.
  885. while (Init && Init->getType()->isConstantArrayType()) {
  886. auto *SubILE = dyn_cast<InitListExpr>(Init);
  887. if (!SubILE)
  888. break;
  889. assert(SubILE->getNumInits() == 0 && "explicit inits in array filler?");
  890. Init = SubILE->getArrayFiller();
  891. }
  892. // Switch back to initializing one base element at a time.
  893. CurPtr = Builder.CreateBitCast(CurPtr, BeginPtr->getType());
  894. }
  895. // Attempt to perform zero-initialization using memset.
  896. auto TryMemsetInitialization = [&]() -> bool {
  897. // FIXME: If the type is a pointer-to-data-member under the Itanium ABI,
  898. // we can initialize with a memset to -1.
  899. if (!CGM.getTypes().isZeroInitializable(ElementType))
  900. return false;
  901. // Optimization: since zero initialization will just set the memory
  902. // to all zeroes, generate a single memset to do it in one shot.
  903. // Subtract out the size of any elements we've already initialized.
  904. auto *RemainingSize = AllocSizeWithoutCookie;
  905. if (InitListElements) {
  906. // We know this can't overflow; we check this when doing the allocation.
  907. auto *InitializedSize = llvm::ConstantInt::get(
  908. RemainingSize->getType(),
  909. getContext().getTypeSizeInChars(ElementType).getQuantity() *
  910. InitListElements);
  911. RemainingSize = Builder.CreateSub(RemainingSize, InitializedSize);
  912. }
  913. // Create the memset.
  914. CharUnits Alignment = getContext().getTypeAlignInChars(ElementType);
  915. Builder.CreateMemSet(CurPtr, Builder.getInt8(0), RemainingSize,
  916. Alignment.getQuantity(), false);
  917. return true;
  918. };
  919. // If all elements have already been initialized, skip any further
  920. // initialization.
  921. llvm::ConstantInt *ConstNum = dyn_cast<llvm::ConstantInt>(NumElements);
  922. if (ConstNum && ConstNum->getZExtValue() <= InitListElements) {
  923. // If there was a Cleanup, deactivate it.
  924. if (CleanupDominator)
  925. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  926. return;
  927. }
  928. assert(Init && "have trailing elements to initialize but no initializer");
  929. // If this is a constructor call, try to optimize it out, and failing that
  930. // emit a single loop to initialize all remaining elements.
  931. if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
  932. CXXConstructorDecl *Ctor = CCE->getConstructor();
  933. if (Ctor->isTrivial()) {
  934. // If new expression did not specify value-initialization, then there
  935. // is no initialization.
  936. if (!CCE->requiresZeroInitialization() || Ctor->getParent()->isEmpty())
  937. return;
  938. if (TryMemsetInitialization())
  939. return;
  940. }
  941. // Store the new Cleanup position for irregular Cleanups.
  942. //
  943. // FIXME: Share this cleanup with the constructor call emission rather than
  944. // having it create a cleanup of its own.
  945. if (EndOfInit) Builder.CreateStore(CurPtr, EndOfInit);
  946. // Emit a constructor call loop to initialize the remaining elements.
  947. if (InitListElements)
  948. NumElements = Builder.CreateSub(
  949. NumElements,
  950. llvm::ConstantInt::get(NumElements->getType(), InitListElements));
  951. EmitCXXAggrConstructorCall(Ctor, NumElements, CurPtr, CCE,
  952. CCE->requiresZeroInitialization());
  953. return;
  954. }
  955. // If this is value-initialization, we can usually use memset.
  956. ImplicitValueInitExpr IVIE(ElementType);
  957. if (isa<ImplicitValueInitExpr>(Init)) {
  958. if (TryMemsetInitialization())
  959. return;
  960. // Switch to an ImplicitValueInitExpr for the element type. This handles
  961. // only one case: multidimensional array new of pointers to members. In
  962. // all other cases, we already have an initializer for the array element.
  963. Init = &IVIE;
  964. }
  965. // At this point we should have found an initializer for the individual
  966. // elements of the array.
  967. assert(getContext().hasSameUnqualifiedType(ElementType, Init->getType()) &&
  968. "got wrong type of element to initialize");
  969. // If we have an empty initializer list, we can usually use memset.
  970. if (auto *ILE = dyn_cast<InitListExpr>(Init))
  971. if (ILE->getNumInits() == 0 && TryMemsetInitialization())
  972. return;
  973. // If we have a struct whose every field is value-initialized, we can
  974. // usually use memset.
  975. if (auto *ILE = dyn_cast<InitListExpr>(Init)) {
  976. if (const RecordType *RType = ILE->getType()->getAs<RecordType>()) {
  977. if (RType->getDecl()->isStruct()) {
  978. unsigned NumFields = 0;
  979. for (auto *Field : RType->getDecl()->fields())
  980. if (!Field->isUnnamedBitfield())
  981. ++NumFields;
  982. if (ILE->getNumInits() == NumFields)
  983. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i)
  984. if (!isa<ImplicitValueInitExpr>(ILE->getInit(i)))
  985. --NumFields;
  986. if (ILE->getNumInits() == NumFields && TryMemsetInitialization())
  987. return;
  988. }
  989. }
  990. }
  991. // Create the loop blocks.
  992. llvm::BasicBlock *EntryBB = Builder.GetInsertBlock();
  993. llvm::BasicBlock *LoopBB = createBasicBlock("new.loop");
  994. llvm::BasicBlock *ContBB = createBasicBlock("new.loop.end");
  995. // Find the end of the array, hoisted out of the loop.
  996. llvm::Value *EndPtr =
  997. Builder.CreateInBoundsGEP(BeginPtr, NumElements, "array.end");
  998. // If the number of elements isn't constant, we have to now check if there is
  999. // anything left to initialize.
  1000. if (!ConstNum) {
  1001. llvm::Value *IsEmpty = Builder.CreateICmpEQ(CurPtr, EndPtr,
  1002. "array.isempty");
  1003. Builder.CreateCondBr(IsEmpty, ContBB, LoopBB);
  1004. }
  1005. // Enter the loop.
  1006. EmitBlock(LoopBB);
  1007. // Set up the current-element phi.
  1008. llvm::PHINode *CurPtrPhi =
  1009. Builder.CreatePHI(CurPtr->getType(), 2, "array.cur");
  1010. CurPtrPhi->addIncoming(CurPtr, EntryBB);
  1011. CurPtr = CurPtrPhi;
  1012. // Store the new Cleanup position for irregular Cleanups.
  1013. if (EndOfInit) Builder.CreateStore(CurPtr, EndOfInit);
  1014. // Enter a partial-destruction Cleanup if necessary.
  1015. if (!CleanupDominator && needsEHCleanup(DtorKind)) {
  1016. pushRegularPartialArrayCleanup(BeginPtr, CurPtr, ElementType,
  1017. getDestroyer(DtorKind));
  1018. Cleanup = EHStack.stable_begin();
  1019. CleanupDominator = Builder.CreateUnreachable();
  1020. }
  1021. // Emit the initializer into this element.
  1022. StoreAnyExprIntoOneUnit(*this, Init, Init->getType(), CurPtr);
  1023. // Leave the Cleanup if we entered one.
  1024. if (CleanupDominator) {
  1025. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  1026. CleanupDominator->eraseFromParent();
  1027. }
  1028. // Advance to the next element by adjusting the pointer type as necessary.
  1029. llvm::Value *NextPtr =
  1030. Builder.CreateConstInBoundsGEP1_32(ElementTy, CurPtr, 1, "array.next");
  1031. // Check whether we've gotten to the end of the array and, if so,
  1032. // exit the loop.
  1033. llvm::Value *IsEnd = Builder.CreateICmpEQ(NextPtr, EndPtr, "array.atend");
  1034. Builder.CreateCondBr(IsEnd, ContBB, LoopBB);
  1035. CurPtrPhi->addIncoming(NextPtr, Builder.GetInsertBlock());
  1036. EmitBlock(ContBB);
  1037. }
  1038. static void EmitNewInitializer(CodeGenFunction &CGF, const CXXNewExpr *E,
  1039. QualType ElementType, llvm::Type *ElementTy,
  1040. llvm::Value *NewPtr, llvm::Value *NumElements,
  1041. llvm::Value *AllocSizeWithoutCookie) {
  1042. ApplyDebugLocation DL(CGF, E);
  1043. if (E->isArray())
  1044. CGF.EmitNewArrayInitializer(E, ElementType, ElementTy, NewPtr, NumElements,
  1045. AllocSizeWithoutCookie);
  1046. else if (const Expr *Init = E->getInitializer())
  1047. StoreAnyExprIntoOneUnit(CGF, Init, E->getAllocatedType(), NewPtr);
  1048. }
  1049. /// Emit a call to an operator new or operator delete function, as implicitly
  1050. /// created by new-expressions and delete-expressions.
  1051. static RValue EmitNewDeleteCall(CodeGenFunction &CGF,
  1052. const FunctionDecl *Callee,
  1053. const FunctionProtoType *CalleeType,
  1054. const CallArgList &Args) {
  1055. llvm::Instruction *CallOrInvoke;
  1056. llvm::Value *CalleeAddr = CGF.CGM.GetAddrOfFunction(Callee);
  1057. RValue RV =
  1058. CGF.EmitCall(CGF.CGM.getTypes().arrangeFreeFunctionCall(
  1059. Args, CalleeType, /*chainCall=*/false),
  1060. CalleeAddr, ReturnValueSlot(), Args, Callee, &CallOrInvoke);
  1061. /// C++1y [expr.new]p10:
  1062. /// [In a new-expression,] an implementation is allowed to omit a call
  1063. /// to a replaceable global allocation function.
  1064. ///
  1065. /// We model such elidable calls with the 'builtin' attribute.
  1066. llvm::Function *Fn = dyn_cast<llvm::Function>(CalleeAddr);
  1067. if (Callee->isReplaceableGlobalAllocationFunction() &&
  1068. Fn && Fn->hasFnAttribute(llvm::Attribute::NoBuiltin)) {
  1069. // FIXME: Add addAttribute to CallSite.
  1070. if (llvm::CallInst *CI = dyn_cast<llvm::CallInst>(CallOrInvoke))
  1071. CI->addAttribute(llvm::AttributeSet::FunctionIndex,
  1072. llvm::Attribute::Builtin);
  1073. else if (llvm::InvokeInst *II = dyn_cast<llvm::InvokeInst>(CallOrInvoke))
  1074. II->addAttribute(llvm::AttributeSet::FunctionIndex,
  1075. llvm::Attribute::Builtin);
  1076. else
  1077. llvm_unreachable("unexpected kind of call instruction");
  1078. }
  1079. return RV;
  1080. }
  1081. RValue CodeGenFunction::EmitBuiltinNewDeleteCall(const FunctionProtoType *Type,
  1082. const Expr *Arg,
  1083. bool IsDelete) {
  1084. CallArgList Args;
  1085. const Stmt *ArgS = Arg;
  1086. EmitCallArgs(Args, *Type->param_type_begin(),
  1087. ConstExprIterator(&ArgS), ConstExprIterator(&ArgS + 1));
  1088. // Find the allocation or deallocation function that we're calling.
  1089. ASTContext &Ctx = getContext();
  1090. DeclarationName Name = Ctx.DeclarationNames
  1091. .getCXXOperatorName(IsDelete ? OO_Delete : OO_New);
  1092. for (auto *Decl : Ctx.getTranslationUnitDecl()->lookup(Name))
  1093. if (auto *FD = dyn_cast<FunctionDecl>(Decl))
  1094. if (Ctx.hasSameType(FD->getType(), QualType(Type, 0)))
  1095. return EmitNewDeleteCall(*this, cast<FunctionDecl>(Decl), Type, Args);
  1096. llvm_unreachable("predeclared global operator new/delete is missing");
  1097. }
  1098. namespace {
  1099. /// A cleanup to call the given 'operator delete' function upon
  1100. /// abnormal exit from a new expression.
  1101. class CallDeleteDuringNew : public EHScopeStack::Cleanup {
  1102. size_t NumPlacementArgs;
  1103. const FunctionDecl *OperatorDelete;
  1104. llvm::Value *Ptr;
  1105. llvm::Value *AllocSize;
  1106. RValue *getPlacementArgs() { return reinterpret_cast<RValue*>(this+1); }
  1107. public:
  1108. static size_t getExtraSize(size_t NumPlacementArgs) {
  1109. return NumPlacementArgs * sizeof(RValue);
  1110. }
  1111. CallDeleteDuringNew(size_t NumPlacementArgs,
  1112. const FunctionDecl *OperatorDelete,
  1113. llvm::Value *Ptr,
  1114. llvm::Value *AllocSize)
  1115. : NumPlacementArgs(NumPlacementArgs), OperatorDelete(OperatorDelete),
  1116. Ptr(Ptr), AllocSize(AllocSize) {}
  1117. void setPlacementArg(unsigned I, RValue Arg) {
  1118. assert(I < NumPlacementArgs && "index out of range");
  1119. getPlacementArgs()[I] = Arg;
  1120. }
  1121. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1122. const FunctionProtoType *FPT
  1123. = OperatorDelete->getType()->getAs<FunctionProtoType>();
  1124. assert(FPT->getNumParams() == NumPlacementArgs + 1 ||
  1125. (FPT->getNumParams() == 2 && NumPlacementArgs == 0));
  1126. CallArgList DeleteArgs;
  1127. // The first argument is always a void*.
  1128. FunctionProtoType::param_type_iterator AI = FPT->param_type_begin();
  1129. DeleteArgs.add(RValue::get(Ptr), *AI++);
  1130. // A member 'operator delete' can take an extra 'size_t' argument.
  1131. if (FPT->getNumParams() == NumPlacementArgs + 2)
  1132. DeleteArgs.add(RValue::get(AllocSize), *AI++);
  1133. // Pass the rest of the arguments, which must match exactly.
  1134. for (unsigned I = 0; I != NumPlacementArgs; ++I)
  1135. DeleteArgs.add(getPlacementArgs()[I], *AI++);
  1136. // Call 'operator delete'.
  1137. EmitNewDeleteCall(CGF, OperatorDelete, FPT, DeleteArgs);
  1138. }
  1139. };
  1140. /// A cleanup to call the given 'operator delete' function upon
  1141. /// abnormal exit from a new expression when the new expression is
  1142. /// conditional.
  1143. class CallDeleteDuringConditionalNew : public EHScopeStack::Cleanup {
  1144. size_t NumPlacementArgs;
  1145. const FunctionDecl *OperatorDelete;
  1146. DominatingValue<RValue>::saved_type Ptr;
  1147. DominatingValue<RValue>::saved_type AllocSize;
  1148. DominatingValue<RValue>::saved_type *getPlacementArgs() {
  1149. return reinterpret_cast<DominatingValue<RValue>::saved_type*>(this+1);
  1150. }
  1151. public:
  1152. static size_t getExtraSize(size_t NumPlacementArgs) {
  1153. return NumPlacementArgs * sizeof(DominatingValue<RValue>::saved_type);
  1154. }
  1155. CallDeleteDuringConditionalNew(size_t NumPlacementArgs,
  1156. const FunctionDecl *OperatorDelete,
  1157. DominatingValue<RValue>::saved_type Ptr,
  1158. DominatingValue<RValue>::saved_type AllocSize)
  1159. : NumPlacementArgs(NumPlacementArgs), OperatorDelete(OperatorDelete),
  1160. Ptr(Ptr), AllocSize(AllocSize) {}
  1161. void setPlacementArg(unsigned I, DominatingValue<RValue>::saved_type Arg) {
  1162. assert(I < NumPlacementArgs && "index out of range");
  1163. getPlacementArgs()[I] = Arg;
  1164. }
  1165. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1166. const FunctionProtoType *FPT
  1167. = OperatorDelete->getType()->getAs<FunctionProtoType>();
  1168. assert(FPT->getNumParams() == NumPlacementArgs + 1 ||
  1169. (FPT->getNumParams() == 2 && NumPlacementArgs == 0));
  1170. CallArgList DeleteArgs;
  1171. // The first argument is always a void*.
  1172. FunctionProtoType::param_type_iterator AI = FPT->param_type_begin();
  1173. DeleteArgs.add(Ptr.restore(CGF), *AI++);
  1174. // A member 'operator delete' can take an extra 'size_t' argument.
  1175. if (FPT->getNumParams() == NumPlacementArgs + 2) {
  1176. RValue RV = AllocSize.restore(CGF);
  1177. DeleteArgs.add(RV, *AI++);
  1178. }
  1179. // Pass the rest of the arguments, which must match exactly.
  1180. for (unsigned I = 0; I != NumPlacementArgs; ++I) {
  1181. RValue RV = getPlacementArgs()[I].restore(CGF);
  1182. DeleteArgs.add(RV, *AI++);
  1183. }
  1184. // Call 'operator delete'.
  1185. EmitNewDeleteCall(CGF, OperatorDelete, FPT, DeleteArgs);
  1186. }
  1187. };
  1188. }
  1189. /// Enter a cleanup to call 'operator delete' if the initializer in a
  1190. /// new-expression throws.
  1191. static void EnterNewDeleteCleanup(CodeGenFunction &CGF,
  1192. const CXXNewExpr *E,
  1193. llvm::Value *NewPtr,
  1194. llvm::Value *AllocSize,
  1195. const CallArgList &NewArgs) {
  1196. // If we're not inside a conditional branch, then the cleanup will
  1197. // dominate and we can do the easier (and more efficient) thing.
  1198. if (!CGF.isInConditionalBranch()) {
  1199. CallDeleteDuringNew *Cleanup = CGF.EHStack
  1200. .pushCleanupWithExtra<CallDeleteDuringNew>(EHCleanup,
  1201. E->getNumPlacementArgs(),
  1202. E->getOperatorDelete(),
  1203. NewPtr, AllocSize);
  1204. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I)
  1205. Cleanup->setPlacementArg(I, NewArgs[I+1].RV);
  1206. return;
  1207. }
  1208. // Otherwise, we need to save all this stuff.
  1209. DominatingValue<RValue>::saved_type SavedNewPtr =
  1210. DominatingValue<RValue>::save(CGF, RValue::get(NewPtr));
  1211. DominatingValue<RValue>::saved_type SavedAllocSize =
  1212. DominatingValue<RValue>::save(CGF, RValue::get(AllocSize));
  1213. CallDeleteDuringConditionalNew *Cleanup = CGF.EHStack
  1214. .pushCleanupWithExtra<CallDeleteDuringConditionalNew>(EHCleanup,
  1215. E->getNumPlacementArgs(),
  1216. E->getOperatorDelete(),
  1217. SavedNewPtr,
  1218. SavedAllocSize);
  1219. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I)
  1220. Cleanup->setPlacementArg(I,
  1221. DominatingValue<RValue>::save(CGF, NewArgs[I+1].RV));
  1222. CGF.initFullExprCleanup();
  1223. }
  1224. llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) {
  1225. // The element type being allocated.
  1226. QualType allocType = getContext().getBaseElementType(E->getAllocatedType());
  1227. // 1. Build a call to the allocation function.
  1228. FunctionDecl *allocator = E->getOperatorNew();
  1229. const FunctionProtoType *allocatorType =
  1230. allocator->getType()->castAs<FunctionProtoType>();
  1231. CallArgList allocatorArgs;
  1232. // The allocation size is the first argument.
  1233. QualType sizeType = getContext().getSizeType();
  1234. // If there is a brace-initializer, cannot allocate fewer elements than inits.
  1235. unsigned minElements = 0;
  1236. if (E->isArray() && E->hasInitializer()) {
  1237. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(E->getInitializer()))
  1238. minElements = ILE->getNumInits();
  1239. }
  1240. llvm::Value *numElements = nullptr;
  1241. llvm::Value *allocSizeWithoutCookie = nullptr;
  1242. llvm::Value *allocSize =
  1243. EmitCXXNewAllocSize(*this, E, minElements, numElements,
  1244. allocSizeWithoutCookie);
  1245. allocatorArgs.add(RValue::get(allocSize), sizeType);
  1246. // We start at 1 here because the first argument (the allocation size)
  1247. // has already been emitted.
  1248. EmitCallArgs(allocatorArgs, allocatorType, E->placement_arg_begin(),
  1249. E->placement_arg_end(), /* CalleeDecl */ nullptr,
  1250. /*ParamsToSkip*/ 1);
  1251. // Emit the allocation call. If the allocator is a global placement
  1252. // operator, just "inline" it directly.
  1253. RValue RV;
  1254. if (allocator->isReservedGlobalPlacementOperator()) {
  1255. assert(allocatorArgs.size() == 2);
  1256. RV = allocatorArgs[1].RV;
  1257. // TODO: kill any unnecessary computations done for the size
  1258. // argument.
  1259. } else {
  1260. RV = EmitNewDeleteCall(*this, allocator, allocatorType, allocatorArgs);
  1261. }
  1262. // Emit a null check on the allocation result if the allocation
  1263. // function is allowed to return null (because it has a non-throwing
  1264. // exception spec or is the reserved placement new) and we have an
  1265. // interesting initializer.
  1266. bool nullCheck = E->shouldNullCheckAllocation(getContext()) &&
  1267. (!allocType.isPODType(getContext()) || E->hasInitializer());
  1268. llvm::BasicBlock *nullCheckBB = nullptr;
  1269. llvm::BasicBlock *contBB = nullptr;
  1270. llvm::Value *allocation = RV.getScalarVal();
  1271. unsigned AS = allocation->getType()->getPointerAddressSpace();
  1272. // The null-check means that the initializer is conditionally
  1273. // evaluated.
  1274. ConditionalEvaluation conditional(*this);
  1275. if (nullCheck) {
  1276. conditional.begin(*this);
  1277. nullCheckBB = Builder.GetInsertBlock();
  1278. llvm::BasicBlock *notNullBB = createBasicBlock("new.notnull");
  1279. contBB = createBasicBlock("new.cont");
  1280. llvm::Value *isNull = Builder.CreateIsNull(allocation, "new.isnull");
  1281. Builder.CreateCondBr(isNull, contBB, notNullBB);
  1282. EmitBlock(notNullBB);
  1283. }
  1284. // If there's an operator delete, enter a cleanup to call it if an
  1285. // exception is thrown.
  1286. EHScopeStack::stable_iterator operatorDeleteCleanup;
  1287. llvm::Instruction *cleanupDominator = nullptr;
  1288. if (E->getOperatorDelete() &&
  1289. !E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
  1290. EnterNewDeleteCleanup(*this, E, allocation, allocSize, allocatorArgs);
  1291. operatorDeleteCleanup = EHStack.stable_begin();
  1292. cleanupDominator = Builder.CreateUnreachable();
  1293. }
  1294. assert((allocSize == allocSizeWithoutCookie) ==
  1295. CalculateCookiePadding(*this, E).isZero());
  1296. if (allocSize != allocSizeWithoutCookie) {
  1297. assert(E->isArray());
  1298. allocation = CGM.getCXXABI().InitializeArrayCookie(*this, allocation,
  1299. numElements,
  1300. E, allocType);
  1301. }
  1302. llvm::Type *elementTy = ConvertTypeForMem(allocType);
  1303. llvm::Type *elementPtrTy = elementTy->getPointerTo(AS);
  1304. llvm::Value *result = Builder.CreateBitCast(allocation, elementPtrTy);
  1305. EmitNewInitializer(*this, E, allocType, elementTy, result, numElements,
  1306. allocSizeWithoutCookie);
  1307. if (E->isArray()) {
  1308. // NewPtr is a pointer to the base element type. If we're
  1309. // allocating an array of arrays, we'll need to cast back to the
  1310. // array pointer type.
  1311. llvm::Type *resultType = ConvertTypeForMem(E->getType());
  1312. if (result->getType() != resultType)
  1313. result = Builder.CreateBitCast(result, resultType);
  1314. }
  1315. // Deactivate the 'operator delete' cleanup if we finished
  1316. // initialization.
  1317. if (operatorDeleteCleanup.isValid()) {
  1318. DeactivateCleanupBlock(operatorDeleteCleanup, cleanupDominator);
  1319. cleanupDominator->eraseFromParent();
  1320. }
  1321. if (nullCheck) {
  1322. conditional.end(*this);
  1323. llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
  1324. EmitBlock(contBB);
  1325. llvm::PHINode *PHI = Builder.CreatePHI(result->getType(), 2);
  1326. PHI->addIncoming(result, notNullBB);
  1327. PHI->addIncoming(llvm::Constant::getNullValue(result->getType()),
  1328. nullCheckBB);
  1329. result = PHI;
  1330. }
  1331. return result;
  1332. }
  1333. void CodeGenFunction::EmitDeleteCall(const FunctionDecl *DeleteFD,
  1334. llvm::Value *Ptr,
  1335. QualType DeleteTy) {
  1336. assert(DeleteFD->getOverloadedOperator() == OO_Delete);
  1337. const FunctionProtoType *DeleteFTy =
  1338. DeleteFD->getType()->getAs<FunctionProtoType>();
  1339. CallArgList DeleteArgs;
  1340. // Check if we need to pass the size to the delete operator.
  1341. llvm::Value *Size = nullptr;
  1342. QualType SizeTy;
  1343. if (DeleteFTy->getNumParams() == 2) {
  1344. SizeTy = DeleteFTy->getParamType(1);
  1345. CharUnits DeleteTypeSize = getContext().getTypeSizeInChars(DeleteTy);
  1346. Size = llvm::ConstantInt::get(ConvertType(SizeTy),
  1347. DeleteTypeSize.getQuantity());
  1348. }
  1349. QualType ArgTy = DeleteFTy->getParamType(0);
  1350. llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy));
  1351. DeleteArgs.add(RValue::get(DeletePtr), ArgTy);
  1352. if (Size)
  1353. DeleteArgs.add(RValue::get(Size), SizeTy);
  1354. // Emit the call to delete.
  1355. EmitNewDeleteCall(*this, DeleteFD, DeleteFTy, DeleteArgs);
  1356. }
  1357. namespace {
  1358. /// Calls the given 'operator delete' on a single object.
  1359. struct CallObjectDelete : EHScopeStack::Cleanup {
  1360. llvm::Value *Ptr;
  1361. const FunctionDecl *OperatorDelete;
  1362. QualType ElementType;
  1363. CallObjectDelete(llvm::Value *Ptr,
  1364. const FunctionDecl *OperatorDelete,
  1365. QualType ElementType)
  1366. : Ptr(Ptr), OperatorDelete(OperatorDelete), ElementType(ElementType) {}
  1367. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1368. CGF.EmitDeleteCall(OperatorDelete, Ptr, ElementType);
  1369. }
  1370. };
  1371. }
  1372. void
  1373. CodeGenFunction::pushCallObjectDeleteCleanup(const FunctionDecl *OperatorDelete,
  1374. llvm::Value *CompletePtr,
  1375. QualType ElementType) {
  1376. EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup, CompletePtr,
  1377. OperatorDelete, ElementType);
  1378. }
  1379. /// Emit the code for deleting a single object.
  1380. static void EmitObjectDelete(CodeGenFunction &CGF,
  1381. const CXXDeleteExpr *DE,
  1382. llvm::Value *Ptr,
  1383. QualType ElementType) {
  1384. // Find the destructor for the type, if applicable. If the
  1385. // destructor is virtual, we'll just emit the vcall and return.
  1386. const CXXDestructorDecl *Dtor = nullptr;
  1387. if (const RecordType *RT = ElementType->getAs<RecordType>()) {
  1388. CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  1389. if (RD->hasDefinition() && !RD->hasTrivialDestructor()) {
  1390. Dtor = RD->getDestructor();
  1391. if (Dtor->isVirtual()) {
  1392. CGF.CGM.getCXXABI().emitVirtualObjectDelete(CGF, DE, Ptr, ElementType,
  1393. Dtor);
  1394. return;
  1395. }
  1396. }
  1397. }
  1398. // Make sure that we call delete even if the dtor throws.
  1399. // This doesn't have to a conditional cleanup because we're going
  1400. // to pop it off in a second.
  1401. const FunctionDecl *OperatorDelete = DE->getOperatorDelete();
  1402. CGF.EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup,
  1403. Ptr, OperatorDelete, ElementType);
  1404. if (Dtor)
  1405. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
  1406. /*ForVirtualBase=*/false,
  1407. /*Delegating=*/false,
  1408. Ptr);
  1409. else if (CGF.getLangOpts().ObjCAutoRefCount &&
  1410. ElementType->isObjCLifetimeType()) {
  1411. switch (ElementType.getObjCLifetime()) {
  1412. case Qualifiers::OCL_None:
  1413. case Qualifiers::OCL_ExplicitNone:
  1414. case Qualifiers::OCL_Autoreleasing:
  1415. break;
  1416. case Qualifiers::OCL_Strong: {
  1417. // Load the pointer value.
  1418. llvm::Value *PtrValue = CGF.Builder.CreateLoad(Ptr,
  1419. ElementType.isVolatileQualified());
  1420. CGF.EmitARCRelease(PtrValue, ARCPreciseLifetime);
  1421. break;
  1422. }
  1423. case Qualifiers::OCL_Weak:
  1424. CGF.EmitARCDestroyWeak(Ptr);
  1425. break;
  1426. }
  1427. }
  1428. CGF.PopCleanupBlock();
  1429. }
  1430. namespace {
  1431. /// Calls the given 'operator delete' on an array of objects.
  1432. struct CallArrayDelete : EHScopeStack::Cleanup {
  1433. llvm::Value *Ptr;
  1434. const FunctionDecl *OperatorDelete;
  1435. llvm::Value *NumElements;
  1436. QualType ElementType;
  1437. CharUnits CookieSize;
  1438. CallArrayDelete(llvm::Value *Ptr,
  1439. const FunctionDecl *OperatorDelete,
  1440. llvm::Value *NumElements,
  1441. QualType ElementType,
  1442. CharUnits CookieSize)
  1443. : Ptr(Ptr), OperatorDelete(OperatorDelete), NumElements(NumElements),
  1444. ElementType(ElementType), CookieSize(CookieSize) {}
  1445. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1446. const FunctionProtoType *DeleteFTy =
  1447. OperatorDelete->getType()->getAs<FunctionProtoType>();
  1448. assert(DeleteFTy->getNumParams() == 1 || DeleteFTy->getNumParams() == 2);
  1449. CallArgList Args;
  1450. // Pass the pointer as the first argument.
  1451. QualType VoidPtrTy = DeleteFTy->getParamType(0);
  1452. llvm::Value *DeletePtr
  1453. = CGF.Builder.CreateBitCast(Ptr, CGF.ConvertType(VoidPtrTy));
  1454. Args.add(RValue::get(DeletePtr), VoidPtrTy);
  1455. // Pass the original requested size as the second argument.
  1456. if (DeleteFTy->getNumParams() == 2) {
  1457. QualType size_t = DeleteFTy->getParamType(1);
  1458. llvm::IntegerType *SizeTy
  1459. = cast<llvm::IntegerType>(CGF.ConvertType(size_t));
  1460. CharUnits ElementTypeSize =
  1461. CGF.CGM.getContext().getTypeSizeInChars(ElementType);
  1462. // The size of an element, multiplied by the number of elements.
  1463. llvm::Value *Size
  1464. = llvm::ConstantInt::get(SizeTy, ElementTypeSize.getQuantity());
  1465. if (NumElements)
  1466. Size = CGF.Builder.CreateMul(Size, NumElements);
  1467. // Plus the size of the cookie if applicable.
  1468. if (!CookieSize.isZero()) {
  1469. llvm::Value *CookieSizeV
  1470. = llvm::ConstantInt::get(SizeTy, CookieSize.getQuantity());
  1471. Size = CGF.Builder.CreateAdd(Size, CookieSizeV);
  1472. }
  1473. Args.add(RValue::get(Size), size_t);
  1474. }
  1475. // Emit the call to delete.
  1476. EmitNewDeleteCall(CGF, OperatorDelete, DeleteFTy, Args);
  1477. }
  1478. };
  1479. }
  1480. /// Emit the code for deleting an array of objects.
  1481. static void EmitArrayDelete(CodeGenFunction &CGF,
  1482. const CXXDeleteExpr *E,
  1483. llvm::Value *deletedPtr,
  1484. QualType elementType) {
  1485. llvm::Value *numElements = nullptr;
  1486. llvm::Value *allocatedPtr = nullptr;
  1487. CharUnits cookieSize;
  1488. CGF.CGM.getCXXABI().ReadArrayCookie(CGF, deletedPtr, E, elementType,
  1489. numElements, allocatedPtr, cookieSize);
  1490. assert(allocatedPtr && "ReadArrayCookie didn't set allocated pointer");
  1491. // Make sure that we call delete even if one of the dtors throws.
  1492. const FunctionDecl *operatorDelete = E->getOperatorDelete();
  1493. CGF.EHStack.pushCleanup<CallArrayDelete>(NormalAndEHCleanup,
  1494. allocatedPtr, operatorDelete,
  1495. numElements, elementType,
  1496. cookieSize);
  1497. // Destroy the elements.
  1498. if (QualType::DestructionKind dtorKind = elementType.isDestructedType()) {
  1499. assert(numElements && "no element count for a type with a destructor!");
  1500. llvm::Value *arrayEnd =
  1501. CGF.Builder.CreateInBoundsGEP(deletedPtr, numElements, "delete.end");
  1502. // Note that it is legal to allocate a zero-length array, and we
  1503. // can never fold the check away because the length should always
  1504. // come from a cookie.
  1505. CGF.emitArrayDestroy(deletedPtr, arrayEnd, elementType,
  1506. CGF.getDestroyer(dtorKind),
  1507. /*checkZeroLength*/ true,
  1508. CGF.needsEHCleanup(dtorKind));
  1509. }
  1510. // Pop the cleanup block.
  1511. CGF.PopCleanupBlock();
  1512. }
  1513. void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) {
  1514. const Expr *Arg = E->getArgument();
  1515. llvm::Value *Ptr = EmitScalarExpr(Arg);
  1516. // Null check the pointer.
  1517. llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull");
  1518. llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end");
  1519. llvm::Value *IsNull = Builder.CreateIsNull(Ptr, "isnull");
  1520. Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull);
  1521. EmitBlock(DeleteNotNull);
  1522. // We might be deleting a pointer to array. If so, GEP down to the
  1523. // first non-array element.
  1524. // (this assumes that A(*)[3][7] is converted to [3 x [7 x %A]]*)
  1525. QualType DeleteTy = Arg->getType()->getAs<PointerType>()->getPointeeType();
  1526. if (DeleteTy->isConstantArrayType()) {
  1527. llvm::Value *Zero = Builder.getInt32(0);
  1528. SmallVector<llvm::Value*,8> GEP;
  1529. GEP.push_back(Zero); // point at the outermost array
  1530. // For each layer of array type we're pointing at:
  1531. while (const ConstantArrayType *Arr
  1532. = getContext().getAsConstantArrayType(DeleteTy)) {
  1533. // 1. Unpeel the array type.
  1534. DeleteTy = Arr->getElementType();
  1535. // 2. GEP to the first element of the array.
  1536. GEP.push_back(Zero);
  1537. }
  1538. Ptr = Builder.CreateInBoundsGEP(Ptr, GEP, "del.first");
  1539. }
  1540. assert(ConvertTypeForMem(DeleteTy) ==
  1541. cast<llvm::PointerType>(Ptr->getType())->getElementType());
  1542. if (E->isArrayForm()) {
  1543. EmitArrayDelete(*this, E, Ptr, DeleteTy);
  1544. } else {
  1545. EmitObjectDelete(*this, E, Ptr, DeleteTy);
  1546. }
  1547. EmitBlock(DeleteEnd);
  1548. }
  1549. static bool isGLValueFromPointerDeref(const Expr *E) {
  1550. E = E->IgnoreParens();
  1551. if (const auto *CE = dyn_cast<CastExpr>(E)) {
  1552. if (!CE->getSubExpr()->isGLValue())
  1553. return false;
  1554. return isGLValueFromPointerDeref(CE->getSubExpr());
  1555. }
  1556. if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E))
  1557. return isGLValueFromPointerDeref(OVE->getSourceExpr());
  1558. if (const auto *BO = dyn_cast<BinaryOperator>(E))
  1559. if (BO->getOpcode() == BO_Comma)
  1560. return isGLValueFromPointerDeref(BO->getRHS());
  1561. if (const auto *ACO = dyn_cast<AbstractConditionalOperator>(E))
  1562. return isGLValueFromPointerDeref(ACO->getTrueExpr()) ||
  1563. isGLValueFromPointerDeref(ACO->getFalseExpr());
  1564. // C++11 [expr.sub]p1:
  1565. // The expression E1[E2] is identical (by definition) to *((E1)+(E2))
  1566. if (isa<ArraySubscriptExpr>(E))
  1567. return true;
  1568. if (const auto *UO = dyn_cast<UnaryOperator>(E))
  1569. if (UO->getOpcode() == UO_Deref)
  1570. return true;
  1571. return false;
  1572. }
  1573. static llvm::Value *EmitTypeidFromVTable(CodeGenFunction &CGF, const Expr *E,
  1574. llvm::Type *StdTypeInfoPtrTy) {
  1575. // Get the vtable pointer.
  1576. llvm::Value *ThisPtr = CGF.EmitLValue(E).getAddress();
  1577. // C++ [expr.typeid]p2:
  1578. // If the glvalue expression is obtained by applying the unary * operator to
  1579. // a pointer and the pointer is a null pointer value, the typeid expression
  1580. // throws the std::bad_typeid exception.
  1581. //
  1582. // However, this paragraph's intent is not clear. We choose a very generous
  1583. // interpretation which implores us to consider comma operators, conditional
  1584. // operators, parentheses and other such constructs.
  1585. QualType SrcRecordTy = E->getType();
  1586. if (CGF.CGM.getCXXABI().shouldTypeidBeNullChecked(
  1587. isGLValueFromPointerDeref(E), SrcRecordTy)) {
  1588. llvm::BasicBlock *BadTypeidBlock =
  1589. CGF.createBasicBlock("typeid.bad_typeid");
  1590. llvm::BasicBlock *EndBlock = CGF.createBasicBlock("typeid.end");
  1591. llvm::Value *IsNull = CGF.Builder.CreateIsNull(ThisPtr);
  1592. CGF.Builder.CreateCondBr(IsNull, BadTypeidBlock, EndBlock);
  1593. CGF.EmitBlock(BadTypeidBlock);
  1594. CGF.CGM.getCXXABI().EmitBadTypeidCall(CGF);
  1595. CGF.EmitBlock(EndBlock);
  1596. }
  1597. return CGF.CGM.getCXXABI().EmitTypeid(CGF, SrcRecordTy, ThisPtr,
  1598. StdTypeInfoPtrTy);
  1599. }
  1600. llvm::Value *CodeGenFunction::EmitCXXTypeidExpr(const CXXTypeidExpr *E) {
  1601. llvm::Type *StdTypeInfoPtrTy =
  1602. ConvertType(E->getType())->getPointerTo();
  1603. if (E->isTypeOperand()) {
  1604. llvm::Constant *TypeInfo =
  1605. CGM.GetAddrOfRTTIDescriptor(E->getTypeOperand(getContext()));
  1606. return Builder.CreateBitCast(TypeInfo, StdTypeInfoPtrTy);
  1607. }
  1608. // C++ [expr.typeid]p2:
  1609. // When typeid is applied to a glvalue expression whose type is a
  1610. // polymorphic class type, the result refers to a std::type_info object
  1611. // representing the type of the most derived object (that is, the dynamic
  1612. // type) to which the glvalue refers.
  1613. if (E->isPotentiallyEvaluated())
  1614. return EmitTypeidFromVTable(*this, E->getExprOperand(),
  1615. StdTypeInfoPtrTy);
  1616. QualType OperandTy = E->getExprOperand()->getType();
  1617. return Builder.CreateBitCast(CGM.GetAddrOfRTTIDescriptor(OperandTy),
  1618. StdTypeInfoPtrTy);
  1619. }
  1620. static llvm::Value *EmitDynamicCastToNull(CodeGenFunction &CGF,
  1621. QualType DestTy) {
  1622. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1623. if (DestTy->isPointerType())
  1624. return llvm::Constant::getNullValue(DestLTy);
  1625. /// C++ [expr.dynamic.cast]p9:
  1626. /// A failed cast to reference type throws std::bad_cast
  1627. if (!CGF.CGM.getCXXABI().EmitBadCastCall(CGF))
  1628. return nullptr;
  1629. CGF.EmitBlock(CGF.createBasicBlock("dynamic_cast.end"));
  1630. return llvm::UndefValue::get(DestLTy);
  1631. }
  1632. llvm::Value *CodeGenFunction::EmitDynamicCast(llvm::Value *Value,
  1633. const CXXDynamicCastExpr *DCE) {
  1634. QualType DestTy = DCE->getTypeAsWritten();
  1635. if (DCE->isAlwaysNull())
  1636. if (llvm::Value *T = EmitDynamicCastToNull(*this, DestTy))
  1637. return T;
  1638. QualType SrcTy = DCE->getSubExpr()->getType();
  1639. // C++ [expr.dynamic.cast]p7:
  1640. // If T is "pointer to cv void," then the result is a pointer to the most
  1641. // derived object pointed to by v.
  1642. const PointerType *DestPTy = DestTy->getAs<PointerType>();
  1643. bool isDynamicCastToVoid;
  1644. QualType SrcRecordTy;
  1645. QualType DestRecordTy;
  1646. if (DestPTy) {
  1647. isDynamicCastToVoid = DestPTy->getPointeeType()->isVoidType();
  1648. SrcRecordTy = SrcTy->castAs<PointerType>()->getPointeeType();
  1649. DestRecordTy = DestPTy->getPointeeType();
  1650. } else {
  1651. isDynamicCastToVoid = false;
  1652. SrcRecordTy = SrcTy;
  1653. DestRecordTy = DestTy->castAs<ReferenceType>()->getPointeeType();
  1654. }
  1655. assert(SrcRecordTy->isRecordType() && "source type must be a record type!");
  1656. // C++ [expr.dynamic.cast]p4:
  1657. // If the value of v is a null pointer value in the pointer case, the result
  1658. // is the null pointer value of type T.
  1659. bool ShouldNullCheckSrcValue =
  1660. CGM.getCXXABI().shouldDynamicCastCallBeNullChecked(SrcTy->isPointerType(),
  1661. SrcRecordTy);
  1662. llvm::BasicBlock *CastNull = nullptr;
  1663. llvm::BasicBlock *CastNotNull = nullptr;
  1664. llvm::BasicBlock *CastEnd = createBasicBlock("dynamic_cast.end");
  1665. if (ShouldNullCheckSrcValue) {
  1666. CastNull = createBasicBlock("dynamic_cast.null");
  1667. CastNotNull = createBasicBlock("dynamic_cast.notnull");
  1668. llvm::Value *IsNull = Builder.CreateIsNull(Value);
  1669. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  1670. EmitBlock(CastNotNull);
  1671. }
  1672. if (isDynamicCastToVoid) {
  1673. Value = CGM.getCXXABI().EmitDynamicCastToVoid(*this, Value, SrcRecordTy,
  1674. DestTy);
  1675. } else {
  1676. assert(DestRecordTy->isRecordType() &&
  1677. "destination type must be a record type!");
  1678. Value = CGM.getCXXABI().EmitDynamicCastCall(*this, Value, SrcRecordTy,
  1679. DestTy, DestRecordTy, CastEnd);
  1680. }
  1681. if (ShouldNullCheckSrcValue) {
  1682. EmitBranch(CastEnd);
  1683. EmitBlock(CastNull);
  1684. EmitBranch(CastEnd);
  1685. }
  1686. EmitBlock(CastEnd);
  1687. if (ShouldNullCheckSrcValue) {
  1688. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
  1689. PHI->addIncoming(Value, CastNotNull);
  1690. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
  1691. Value = PHI;
  1692. }
  1693. return Value;
  1694. }
  1695. void CodeGenFunction::EmitLambdaExpr(const LambdaExpr *E, AggValueSlot Slot) {
  1696. RunCleanupsScope Scope(*this);
  1697. LValue SlotLV =
  1698. MakeAddrLValue(Slot.getAddr(), E->getType(), Slot.getAlignment());
  1699. CXXRecordDecl::field_iterator CurField = E->getLambdaClass()->field_begin();
  1700. for (LambdaExpr::capture_init_iterator i = E->capture_init_begin(),
  1701. e = E->capture_init_end();
  1702. i != e; ++i, ++CurField) {
  1703. // Emit initialization
  1704. LValue LV = EmitLValueForFieldInitialization(SlotLV, *CurField);
  1705. if (CurField->hasCapturedVLAType()) {
  1706. auto VAT = CurField->getCapturedVLAType();
  1707. EmitStoreThroughLValue(RValue::get(VLASizeMap[VAT->getSizeExpr()]), LV);
  1708. } else {
  1709. ArrayRef<VarDecl *> ArrayIndexes;
  1710. if (CurField->getType()->isArrayType())
  1711. ArrayIndexes = E->getCaptureInitIndexVars(i);
  1712. EmitInitializerForField(*CurField, LV, *i, ArrayIndexes);
  1713. }
  1714. }
  1715. }