CGExpr.cpp 157 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134
  1. //===--- CGExpr.cpp - Emit LLVM Code from Expressions ---------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code to emit Expr nodes as LLVM code.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CodeGenFunction.h"
  14. #include "CGCXXABI.h"
  15. #include "CGCall.h"
  16. #include "CGDebugInfo.h"
  17. #include "CGObjCRuntime.h"
  18. #include "CGOpenMPRuntime.h"
  19. #include "CGHLSLRuntime.h" // HLSL Change
  20. #include "dxc/HLSL/HLOperations.h" // HLSL Change
  21. #include "dxc/DXIL/DxilUtil.h" // HLSL Change
  22. #include "dxc/DXIL/DxilResource.h" // HLSL Change
  23. #include "CGRecordLayout.h"
  24. #include "CodeGenModule.h"
  25. #include "TargetInfo.h"
  26. #include "clang/AST/ASTContext.h"
  27. #include "clang/AST/Attr.h"
  28. #include "clang/AST/DeclObjC.h"
  29. #include "clang/Frontend/CodeGenOptions.h"
  30. #include "llvm/ADT/Hashing.h"
  31. #include "llvm/ADT/StringExtras.h"
  32. #include "llvm/IR/DataLayout.h"
  33. #include "llvm/IR/Intrinsics.h"
  34. #include "llvm/IR/LLVMContext.h"
  35. #include "llvm/IR/MDBuilder.h"
  36. #include "llvm/Support/ConvertUTF.h"
  37. #include "llvm/Support/MathExtras.h"
  38. using namespace clang;
  39. using namespace CodeGen;
  40. //===--------------------------------------------------------------------===//
  41. // Miscellaneous Helper Methods
  42. //===--------------------------------------------------------------------===//
  43. llvm::Value *CodeGenFunction::EmitCastToVoidPtr(llvm::Value *value) {
  44. unsigned addressSpace =
  45. cast<llvm::PointerType>(value->getType())->getAddressSpace();
  46. llvm::PointerType *destType = Int8PtrTy;
  47. if (addressSpace)
  48. destType = llvm::Type::getInt8PtrTy(getLLVMContext(), addressSpace);
  49. if (value->getType() == destType) return value;
  50. return Builder.CreateBitCast(value, destType);
  51. }
  52. /// CreateTempAlloca - This creates a alloca and inserts it into the entry
  53. /// block.
  54. llvm::AllocaInst *CodeGenFunction::CreateTempAlloca(llvm::Type *Ty,
  55. const Twine &Name) {
  56. if (!Builder.isNamePreserving())
  57. return new llvm::AllocaInst(Ty, nullptr, "", AllocaInsertPt);
  58. return new llvm::AllocaInst(Ty, nullptr, Name, AllocaInsertPt);
  59. }
  60. void CodeGenFunction::InitTempAlloca(llvm::AllocaInst *Var,
  61. llvm::Value *Init) {
  62. auto *Store = new llvm::StoreInst(Init, Var);
  63. llvm::BasicBlock *Block = AllocaInsertPt->getParent();
  64. Block->getInstList().insertAfter(&*AllocaInsertPt, Store);
  65. }
  66. llvm::AllocaInst *CodeGenFunction::CreateIRTemp(QualType Ty,
  67. const Twine &Name) {
  68. llvm::AllocaInst *Alloc = CreateTempAlloca(ConvertType(Ty), Name);
  69. // FIXME: Should we prefer the preferred type alignment here?
  70. CharUnits Align = getContext().getTypeAlignInChars(Ty);
  71. Alloc->setAlignment(Align.getQuantity());
  72. return Alloc;
  73. }
  74. llvm::AllocaInst *CodeGenFunction::CreateMemTemp(QualType Ty,
  75. const Twine &Name) {
  76. llvm::AllocaInst *Alloc = CreateTempAlloca(ConvertTypeForMem(Ty), Name);
  77. // FIXME: Should we prefer the preferred type alignment here?
  78. CharUnits Align = getContext().getTypeAlignInChars(Ty);
  79. Alloc->setAlignment(Align.getQuantity());
  80. return Alloc;
  81. }
  82. /// EvaluateExprAsBool - Perform the usual unary conversions on the specified
  83. /// expression and compare the result against zero, returning an Int1Ty value.
  84. llvm::Value *CodeGenFunction::EvaluateExprAsBool(const Expr *E) {
  85. PGO.setCurrentStmt(E);
  86. if (const MemberPointerType *MPT = E->getType()->getAs<MemberPointerType>()) {
  87. llvm::Value *MemPtr = EmitScalarExpr(E);
  88. return CGM.getCXXABI().EmitMemberPointerIsNotNull(*this, MemPtr, MPT);
  89. }
  90. QualType BoolTy = getContext().BoolTy;
  91. if (!E->getType()->isAnyComplexType())
  92. return EmitScalarConversion(EmitScalarExpr(E), E->getType(), BoolTy);
  93. return EmitComplexToScalarConversion(EmitComplexExpr(E), E->getType(),BoolTy);
  94. }
  95. /// EmitIgnoredExpr - Emit code to compute the specified expression,
  96. /// ignoring the result.
  97. void CodeGenFunction::EmitIgnoredExpr(const Expr *E) {
  98. if (E->isRValue())
  99. return (void) EmitAnyExpr(E, AggValueSlot::ignored(), true);
  100. // Just emit it as an l-value and drop the result.
  101. EmitLValue(E);
  102. }
  103. /// EmitAnyExpr - Emit code to compute the specified expression which
  104. /// can have any type. The result is returned as an RValue struct.
  105. /// If this is an aggregate expression, AggSlot indicates where the
  106. /// result should be returned.
  107. RValue CodeGenFunction::EmitAnyExpr(const Expr *E,
  108. AggValueSlot aggSlot,
  109. bool ignoreResult) {
  110. switch (getEvaluationKind(E->getType())) {
  111. case TEK_Scalar:
  112. return RValue::get(EmitScalarExpr(E, ignoreResult));
  113. case TEK_Complex:
  114. return RValue::getComplex(EmitComplexExpr(E, ignoreResult, ignoreResult));
  115. case TEK_Aggregate:
  116. if (!ignoreResult && aggSlot.isIgnored())
  117. aggSlot = CreateAggTemp(E->getType(), "agg-temp");
  118. EmitAggExpr(E, aggSlot);
  119. return aggSlot.asRValue();
  120. }
  121. llvm_unreachable("bad evaluation kind");
  122. }
  123. /// EmitAnyExprToTemp - Similary to EmitAnyExpr(), however, the result will
  124. /// always be accessible even if no aggregate location is provided.
  125. RValue CodeGenFunction::EmitAnyExprToTemp(const Expr *E) {
  126. AggValueSlot AggSlot = AggValueSlot::ignored();
  127. if (hasAggregateEvaluationKind(E->getType()))
  128. AggSlot = CreateAggTemp(E->getType(), "agg.tmp");
  129. return EmitAnyExpr(E, AggSlot);
  130. }
  131. /// EmitAnyExprToMem - Evaluate an expression into a given memory
  132. /// location.
  133. void CodeGenFunction::EmitAnyExprToMem(const Expr *E,
  134. llvm::Value *Location,
  135. Qualifiers Quals,
  136. bool IsInit) {
  137. // FIXME: This function should take an LValue as an argument.
  138. switch (getEvaluationKind(E->getType())) {
  139. case TEK_Complex:
  140. EmitComplexExprIntoLValue(E,
  141. MakeNaturalAlignAddrLValue(Location, E->getType()),
  142. /*isInit*/ false);
  143. return;
  144. case TEK_Aggregate: {
  145. CharUnits Alignment = getContext().getTypeAlignInChars(E->getType());
  146. EmitAggExpr(E, AggValueSlot::forAddr(Location, Alignment, Quals,
  147. AggValueSlot::IsDestructed_t(IsInit),
  148. AggValueSlot::DoesNotNeedGCBarriers,
  149. AggValueSlot::IsAliased_t(!IsInit)));
  150. return;
  151. }
  152. case TEK_Scalar: {
  153. RValue RV = RValue::get(EmitScalarExpr(E, /*Ignore*/ false));
  154. LValue LV = MakeAddrLValue(Location, E->getType());
  155. EmitStoreThroughLValue(RV, LV);
  156. return;
  157. }
  158. }
  159. llvm_unreachable("bad evaluation kind");
  160. }
  161. static void
  162. pushTemporaryCleanup(CodeGenFunction &CGF, const MaterializeTemporaryExpr *M,
  163. const Expr *E, llvm::Value *ReferenceTemporary) {
  164. // Objective-C++ ARC:
  165. // If we are binding a reference to a temporary that has ownership, we
  166. // need to perform retain/release operations on the temporary.
  167. //
  168. // FIXME: This should be looking at E, not M.
  169. if (CGF.getLangOpts().ObjCAutoRefCount &&
  170. M->getType()->isObjCLifetimeType()) {
  171. QualType ObjCARCReferenceLifetimeType = M->getType();
  172. switch (Qualifiers::ObjCLifetime Lifetime =
  173. ObjCARCReferenceLifetimeType.getObjCLifetime()) {
  174. case Qualifiers::OCL_None:
  175. case Qualifiers::OCL_ExplicitNone:
  176. // Carry on to normal cleanup handling.
  177. break;
  178. case Qualifiers::OCL_Autoreleasing:
  179. // Nothing to do; cleaned up by an autorelease pool.
  180. return;
  181. case Qualifiers::OCL_Strong:
  182. case Qualifiers::OCL_Weak:
  183. switch (StorageDuration Duration = M->getStorageDuration()) {
  184. case SD_Static:
  185. // Note: we intentionally do not register a cleanup to release
  186. // the object on program termination.
  187. return;
  188. case SD_Thread:
  189. // FIXME: We should probably register a cleanup in this case.
  190. return;
  191. case SD_Automatic:
  192. case SD_FullExpression:
  193. CodeGenFunction::Destroyer *Destroy;
  194. CleanupKind CleanupKind;
  195. if (Lifetime == Qualifiers::OCL_Strong) {
  196. const ValueDecl *VD = M->getExtendingDecl();
  197. bool Precise =
  198. VD && isa<VarDecl>(VD) && VD->hasAttr<ObjCPreciseLifetimeAttr>();
  199. CleanupKind = CGF.getARCCleanupKind();
  200. Destroy = Precise ? &CodeGenFunction::destroyARCStrongPrecise
  201. : &CodeGenFunction::destroyARCStrongImprecise;
  202. } else {
  203. // __weak objects always get EH cleanups; otherwise, exceptions
  204. // could cause really nasty crashes instead of mere leaks.
  205. CleanupKind = NormalAndEHCleanup;
  206. Destroy = &CodeGenFunction::destroyARCWeak;
  207. }
  208. if (Duration == SD_FullExpression)
  209. CGF.pushDestroy(CleanupKind, ReferenceTemporary,
  210. ObjCARCReferenceLifetimeType, *Destroy,
  211. CleanupKind & EHCleanup);
  212. else
  213. CGF.pushLifetimeExtendedDestroy(CleanupKind, ReferenceTemporary,
  214. ObjCARCReferenceLifetimeType,
  215. *Destroy, CleanupKind & EHCleanup);
  216. return;
  217. case SD_Dynamic:
  218. llvm_unreachable("temporary cannot have dynamic storage duration");
  219. }
  220. llvm_unreachable("unknown storage duration");
  221. }
  222. }
  223. CXXDestructorDecl *ReferenceTemporaryDtor = nullptr;
  224. if (const RecordType *RT =
  225. E->getType()->getBaseElementTypeUnsafe()->getAs<RecordType>()) {
  226. // Get the destructor for the reference temporary.
  227. auto *ClassDecl = cast<CXXRecordDecl>(RT->getDecl());
  228. if (!ClassDecl->hasTrivialDestructor())
  229. ReferenceTemporaryDtor = ClassDecl->getDestructor();
  230. }
  231. if (!ReferenceTemporaryDtor)
  232. return;
  233. // Call the destructor for the temporary.
  234. switch (M->getStorageDuration()) {
  235. case SD_Static:
  236. case SD_Thread: {
  237. llvm::Constant *CleanupFn;
  238. llvm::Constant *CleanupArg;
  239. if (E->getType()->isArrayType()) {
  240. CleanupFn = CodeGenFunction(CGF.CGM).generateDestroyHelper(
  241. cast<llvm::Constant>(ReferenceTemporary), E->getType(),
  242. CodeGenFunction::destroyCXXObject, CGF.getLangOpts().Exceptions,
  243. dyn_cast_or_null<VarDecl>(M->getExtendingDecl()));
  244. CleanupArg = llvm::Constant::getNullValue(CGF.Int8PtrTy);
  245. } else {
  246. CleanupFn = CGF.CGM.getAddrOfCXXStructor(ReferenceTemporaryDtor,
  247. StructorType::Complete);
  248. CleanupArg = cast<llvm::Constant>(ReferenceTemporary);
  249. }
  250. CGF.CGM.getCXXABI().registerGlobalDtor(
  251. CGF, *cast<VarDecl>(M->getExtendingDecl()), CleanupFn, CleanupArg);
  252. break;
  253. }
  254. case SD_FullExpression:
  255. CGF.pushDestroy(NormalAndEHCleanup, ReferenceTemporary, E->getType(),
  256. CodeGenFunction::destroyCXXObject,
  257. CGF.getLangOpts().Exceptions);
  258. break;
  259. case SD_Automatic:
  260. CGF.pushLifetimeExtendedDestroy(NormalAndEHCleanup,
  261. ReferenceTemporary, E->getType(),
  262. CodeGenFunction::destroyCXXObject,
  263. CGF.getLangOpts().Exceptions);
  264. break;
  265. case SD_Dynamic:
  266. llvm_unreachable("temporary cannot have dynamic storage duration");
  267. }
  268. }
  269. static llvm::Value *
  270. createReferenceTemporary(CodeGenFunction &CGF,
  271. const MaterializeTemporaryExpr *M, const Expr *Inner) {
  272. switch (M->getStorageDuration()) {
  273. case SD_FullExpression:
  274. case SD_Automatic: {
  275. // If we have a constant temporary array or record try to promote it into a
  276. // constant global under the same rules a normal constant would've been
  277. // promoted. This is easier on the optimizer and generally emits fewer
  278. // instructions.
  279. QualType Ty = Inner->getType();
  280. if (CGF.CGM.getCodeGenOpts().MergeAllConstants &&
  281. (Ty->isArrayType() || Ty->isRecordType()) &&
  282. CGF.CGM.isTypeConstant(Ty, true))
  283. if (llvm::Constant *Init = CGF.CGM.EmitConstantExpr(Inner, Ty, &CGF)) {
  284. auto *GV = new llvm::GlobalVariable(
  285. CGF.CGM.getModule(), Init->getType(), /*isConstant=*/true,
  286. llvm::GlobalValue::PrivateLinkage, Init, ".ref.tmp");
  287. GV->setAlignment(
  288. CGF.getContext().getTypeAlignInChars(Ty).getQuantity());
  289. // FIXME: Should we put the new global into a COMDAT?
  290. return GV;
  291. }
  292. return CGF.CreateMemTemp(Ty, "ref.tmp");
  293. }
  294. case SD_Thread:
  295. case SD_Static:
  296. return CGF.CGM.GetAddrOfGlobalTemporary(M, Inner);
  297. case SD_Dynamic:
  298. llvm_unreachable("temporary can't have dynamic storage duration");
  299. }
  300. llvm_unreachable("unknown storage duration");
  301. }
  302. LValue CodeGenFunction::
  303. EmitMaterializeTemporaryExpr(const MaterializeTemporaryExpr *M) {
  304. const Expr *E = M->GetTemporaryExpr();
  305. // FIXME: ideally this would use EmitAnyExprToMem, however, we cannot do so
  306. // as that will cause the lifetime adjustment to be lost for ARC
  307. if (getLangOpts().ObjCAutoRefCount &&
  308. M->getType()->isObjCLifetimeType() &&
  309. M->getType().getObjCLifetime() != Qualifiers::OCL_None &&
  310. M->getType().getObjCLifetime() != Qualifiers::OCL_ExplicitNone) {
  311. llvm::Value *Object = createReferenceTemporary(*this, M, E);
  312. if (auto *Var = dyn_cast<llvm::GlobalVariable>(Object)) {
  313. Object = llvm::ConstantExpr::getBitCast(
  314. Var, ConvertTypeForMem(E->getType())->getPointerTo());
  315. // We should not have emitted the initializer for this temporary as a
  316. // constant.
  317. assert(!Var->hasInitializer());
  318. Var->setInitializer(CGM.EmitNullConstant(E->getType()));
  319. }
  320. LValue RefTempDst = MakeAddrLValue(Object, M->getType());
  321. switch (getEvaluationKind(E->getType())) {
  322. default: llvm_unreachable("expected scalar or aggregate expression");
  323. case TEK_Scalar:
  324. EmitScalarInit(E, M->getExtendingDecl(), RefTempDst, false);
  325. break;
  326. case TEK_Aggregate: {
  327. CharUnits Alignment = getContext().getTypeAlignInChars(E->getType());
  328. EmitAggExpr(E, AggValueSlot::forAddr(Object, Alignment,
  329. E->getType().getQualifiers(),
  330. AggValueSlot::IsDestructed,
  331. AggValueSlot::DoesNotNeedGCBarriers,
  332. AggValueSlot::IsNotAliased));
  333. break;
  334. }
  335. }
  336. pushTemporaryCleanup(*this, M, E, Object);
  337. return RefTempDst;
  338. }
  339. SmallVector<const Expr *, 2> CommaLHSs;
  340. SmallVector<SubobjectAdjustment, 2> Adjustments;
  341. E = E->skipRValueSubobjectAdjustments(CommaLHSs, Adjustments);
  342. for (const auto &Ignored : CommaLHSs)
  343. EmitIgnoredExpr(Ignored);
  344. if (const auto *opaque = dyn_cast<OpaqueValueExpr>(E)) {
  345. if (opaque->getType()->isRecordType()) {
  346. assert(Adjustments.empty());
  347. return EmitOpaqueValueLValue(opaque);
  348. }
  349. }
  350. // Create and initialize the reference temporary.
  351. llvm::Value *Object = createReferenceTemporary(*this, M, E);
  352. if (auto *Var = dyn_cast<llvm::GlobalVariable>(Object)) {
  353. Object = llvm::ConstantExpr::getBitCast(
  354. Var, ConvertTypeForMem(E->getType())->getPointerTo());
  355. // If the temporary is a global and has a constant initializer or is a
  356. // constant temporary that we promoted to a global, we may have already
  357. // initialized it.
  358. if (!Var->hasInitializer()) {
  359. Var->setInitializer(CGM.EmitNullConstant(E->getType()));
  360. EmitAnyExprToMem(E, Object, Qualifiers(), /*IsInit*/true);
  361. }
  362. } else {
  363. EmitAnyExprToMem(E, Object, Qualifiers(), /*IsInit*/true);
  364. }
  365. pushTemporaryCleanup(*this, M, E, Object);
  366. // Perform derived-to-base casts and/or field accesses, to get from the
  367. // temporary object we created (and, potentially, for which we extended
  368. // the lifetime) to the subobject we're binding the reference to.
  369. for (unsigned I = Adjustments.size(); I != 0; --I) {
  370. SubobjectAdjustment &Adjustment = Adjustments[I-1];
  371. switch (Adjustment.Kind) {
  372. case SubobjectAdjustment::DerivedToBaseAdjustment:
  373. Object =
  374. GetAddressOfBaseClass(Object, Adjustment.DerivedToBase.DerivedClass,
  375. Adjustment.DerivedToBase.BasePath->path_begin(),
  376. Adjustment.DerivedToBase.BasePath->path_end(),
  377. /*NullCheckValue=*/ false, E->getExprLoc());
  378. break;
  379. case SubobjectAdjustment::FieldAdjustment: {
  380. LValue LV = MakeAddrLValue(Object, E->getType());
  381. LV = EmitLValueForField(LV, Adjustment.Field);
  382. assert(LV.isSimple() &&
  383. "materialized temporary field is not a simple lvalue");
  384. Object = LV.getAddress();
  385. break;
  386. }
  387. case SubobjectAdjustment::MemberPointerAdjustment: {
  388. llvm::Value *Ptr = EmitScalarExpr(Adjustment.Ptr.RHS);
  389. Object = CGM.getCXXABI().EmitMemberDataPointerAddress(
  390. *this, E, Object, Ptr, Adjustment.Ptr.MPT);
  391. break;
  392. }
  393. }
  394. }
  395. return MakeAddrLValue(Object, M->getType());
  396. }
  397. RValue
  398. CodeGenFunction::EmitReferenceBindingToExpr(const Expr *E) {
  399. // Emit the expression as an lvalue.
  400. LValue LV = EmitLValue(E);
  401. assert(LV.isSimple());
  402. llvm::Value *Value = LV.getAddress();
  403. if (sanitizePerformTypeCheck() && !E->getType()->isFunctionType()) {
  404. // C++11 [dcl.ref]p5 (as amended by core issue 453):
  405. // If a glvalue to which a reference is directly bound designates neither
  406. // an existing object or function of an appropriate type nor a region of
  407. // storage of suitable size and alignment to contain an object of the
  408. // reference's type, the behavior is undefined.
  409. QualType Ty = E->getType();
  410. EmitTypeCheck(TCK_ReferenceBinding, E->getExprLoc(), Value, Ty);
  411. }
  412. return RValue::get(Value);
  413. }
  414. /// getAccessedFieldNo - Given an encoded value and a result number, return the
  415. /// input field number being accessed.
  416. unsigned CodeGenFunction::getAccessedFieldNo(unsigned Idx,
  417. const llvm::Constant *Elts) {
  418. return cast<llvm::ConstantInt>(Elts->getAggregateElement(Idx))
  419. ->getZExtValue();
  420. }
  421. /// Emit the hash_16_bytes function from include/llvm/ADT/Hashing.h.
  422. static llvm::Value *emitHash16Bytes(CGBuilderTy &Builder, llvm::Value *Low,
  423. llvm::Value *High) {
  424. llvm::Value *KMul = Builder.getInt64(0x9ddfea08eb382d69ULL);
  425. llvm::Value *K47 = Builder.getInt64(47);
  426. llvm::Value *A0 = Builder.CreateMul(Builder.CreateXor(Low, High), KMul);
  427. llvm::Value *A1 = Builder.CreateXor(Builder.CreateLShr(A0, K47), A0);
  428. llvm::Value *B0 = Builder.CreateMul(Builder.CreateXor(High, A1), KMul);
  429. llvm::Value *B1 = Builder.CreateXor(Builder.CreateLShr(B0, K47), B0);
  430. return Builder.CreateMul(B1, KMul);
  431. }
  432. bool CodeGenFunction::sanitizePerformTypeCheck() const {
  433. return SanOpts.has(SanitizerKind::Null) |
  434. SanOpts.has(SanitizerKind::Alignment) |
  435. SanOpts.has(SanitizerKind::ObjectSize) |
  436. SanOpts.has(SanitizerKind::Vptr);
  437. }
  438. void CodeGenFunction::EmitTypeCheck(TypeCheckKind TCK, SourceLocation Loc,
  439. llvm::Value *Address, QualType Ty,
  440. CharUnits Alignment, bool SkipNullCheck) {
  441. if (!sanitizePerformTypeCheck())
  442. return;
  443. // Don't check pointers outside the default address space. The null check
  444. // isn't correct, the object-size check isn't supported by LLVM, and we can't
  445. // communicate the addresses to the runtime handler for the vptr check.
  446. if (Address->getType()->getPointerAddressSpace())
  447. return;
  448. SanitizerScope SanScope(this);
  449. SmallVector<std::pair<llvm::Value *, SanitizerMask>, 3> Checks;
  450. llvm::BasicBlock *Done = nullptr;
  451. bool AllowNullPointers = TCK == TCK_DowncastPointer || TCK == TCK_Upcast ||
  452. TCK == TCK_UpcastToVirtualBase;
  453. if ((SanOpts.has(SanitizerKind::Null) || AllowNullPointers) &&
  454. !SkipNullCheck) {
  455. // The glvalue must not be an empty glvalue.
  456. llvm::Value *IsNonNull = Builder.CreateICmpNE(
  457. Address, llvm::Constant::getNullValue(Address->getType()));
  458. if (AllowNullPointers) {
  459. // When performing pointer casts, it's OK if the value is null.
  460. // Skip the remaining checks in that case.
  461. Done = createBasicBlock("null");
  462. llvm::BasicBlock *Rest = createBasicBlock("not.null");
  463. Builder.CreateCondBr(IsNonNull, Rest, Done);
  464. EmitBlock(Rest);
  465. } else {
  466. Checks.push_back(std::make_pair(IsNonNull, SanitizerKind::Null));
  467. }
  468. }
  469. if (SanOpts.has(SanitizerKind::ObjectSize) && !Ty->isIncompleteType()) {
  470. uint64_t Size = getContext().getTypeSizeInChars(Ty).getQuantity();
  471. // The glvalue must refer to a large enough storage region.
  472. // FIXME: If Address Sanitizer is enabled, insert dynamic instrumentation
  473. // to check this.
  474. // FIXME: Get object address space
  475. llvm::Type *Tys[2] = { IntPtrTy, Int8PtrTy };
  476. llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::objectsize, Tys);
  477. llvm::Value *Min = Builder.getFalse();
  478. llvm::Value *CastAddr = Builder.CreateBitCast(Address, Int8PtrTy);
  479. llvm::Value *LargeEnough =
  480. Builder.CreateICmpUGE(Builder.CreateCall(F, {CastAddr, Min}),
  481. llvm::ConstantInt::get(IntPtrTy, Size));
  482. Checks.push_back(std::make_pair(LargeEnough, SanitizerKind::ObjectSize));
  483. }
  484. uint64_t AlignVal = 0;
  485. if (SanOpts.has(SanitizerKind::Alignment)) {
  486. AlignVal = Alignment.getQuantity();
  487. if (!Ty->isIncompleteType() && !AlignVal)
  488. AlignVal = getContext().getTypeAlignInChars(Ty).getQuantity();
  489. // The glvalue must be suitably aligned.
  490. if (AlignVal) {
  491. llvm::Value *Align =
  492. Builder.CreateAnd(Builder.CreatePtrToInt(Address, IntPtrTy),
  493. llvm::ConstantInt::get(IntPtrTy, AlignVal - 1));
  494. llvm::Value *Aligned =
  495. Builder.CreateICmpEQ(Align, llvm::ConstantInt::get(IntPtrTy, 0));
  496. Checks.push_back(std::make_pair(Aligned, SanitizerKind::Alignment));
  497. }
  498. }
  499. if (Checks.size() > 0) {
  500. llvm::Constant *StaticData[] = {
  501. EmitCheckSourceLocation(Loc),
  502. EmitCheckTypeDescriptor(Ty),
  503. llvm::ConstantInt::get(SizeTy, AlignVal),
  504. llvm::ConstantInt::get(Int8Ty, TCK)
  505. };
  506. EmitCheck(Checks, "type_mismatch", StaticData, Address);
  507. }
  508. // If possible, check that the vptr indicates that there is a subobject of
  509. // type Ty at offset zero within this object.
  510. //
  511. // C++11 [basic.life]p5,6:
  512. // [For storage which does not refer to an object within its lifetime]
  513. // The program has undefined behavior if:
  514. // -- the [pointer or glvalue] is used to access a non-static data member
  515. // or call a non-static member function
  516. CXXRecordDecl *RD = Ty->getAsCXXRecordDecl();
  517. if (SanOpts.has(SanitizerKind::Vptr) &&
  518. (TCK == TCK_MemberAccess || TCK == TCK_MemberCall ||
  519. TCK == TCK_DowncastPointer || TCK == TCK_DowncastReference ||
  520. TCK == TCK_UpcastToVirtualBase) &&
  521. RD && RD->hasDefinition() && RD->isDynamicClass()) {
  522. // Compute a hash of the mangled name of the type.
  523. //
  524. // FIXME: This is not guaranteed to be deterministic! Move to a
  525. // fingerprinting mechanism once LLVM provides one. For the time
  526. // being the implementation happens to be deterministic.
  527. SmallString<64> MangledName;
  528. llvm::raw_svector_ostream Out(MangledName);
  529. CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty.getUnqualifiedType(),
  530. Out);
  531. // Blacklist based on the mangled type.
  532. if (!CGM.getContext().getSanitizerBlacklist().isBlacklistedType(
  533. Out.str())) {
  534. llvm::hash_code TypeHash = hash_value(Out.str());
  535. // Load the vptr, and compute hash_16_bytes(TypeHash, vptr).
  536. llvm::Value *Low = llvm::ConstantInt::get(Int64Ty, TypeHash);
  537. llvm::Type *VPtrTy = llvm::PointerType::get(IntPtrTy, 0);
  538. llvm::Value *VPtrAddr = Builder.CreateBitCast(Address, VPtrTy);
  539. llvm::Value *VPtrVal = Builder.CreateLoad(VPtrAddr);
  540. llvm::Value *High = Builder.CreateZExt(VPtrVal, Int64Ty);
  541. llvm::Value *Hash = emitHash16Bytes(Builder, Low, High);
  542. Hash = Builder.CreateTrunc(Hash, IntPtrTy);
  543. // Look the hash up in our cache.
  544. const int CacheSize = 128;
  545. llvm::Type *HashTable = llvm::ArrayType::get(IntPtrTy, CacheSize);
  546. llvm::Value *Cache = CGM.CreateRuntimeVariable(HashTable,
  547. "__ubsan_vptr_type_cache");
  548. llvm::Value *Slot = Builder.CreateAnd(Hash,
  549. llvm::ConstantInt::get(IntPtrTy,
  550. CacheSize-1));
  551. llvm::Value *Indices[] = { Builder.getInt32(0), Slot };
  552. llvm::Value *CacheVal =
  553. Builder.CreateLoad(Builder.CreateInBoundsGEP(Cache, Indices));
  554. // If the hash isn't in the cache, call a runtime handler to perform the
  555. // hard work of checking whether the vptr is for an object of the right
  556. // type. This will either fill in the cache and return, or produce a
  557. // diagnostic.
  558. llvm::Value *EqualHash = Builder.CreateICmpEQ(CacheVal, Hash);
  559. llvm::Constant *StaticData[] = {
  560. EmitCheckSourceLocation(Loc),
  561. EmitCheckTypeDescriptor(Ty),
  562. CGM.GetAddrOfRTTIDescriptor(Ty.getUnqualifiedType()),
  563. llvm::ConstantInt::get(Int8Ty, TCK)
  564. };
  565. llvm::Value *DynamicData[] = { Address, Hash };
  566. EmitCheck(std::make_pair(EqualHash, SanitizerKind::Vptr),
  567. "dynamic_type_cache_miss", StaticData, DynamicData);
  568. }
  569. }
  570. if (Done) {
  571. Builder.CreateBr(Done);
  572. EmitBlock(Done);
  573. }
  574. }
  575. /// Determine whether this expression refers to a flexible array member in a
  576. /// struct. We disable array bounds checks for such members.
  577. static bool isFlexibleArrayMemberExpr(const Expr *E) {
  578. // For compatibility with existing code, we treat arrays of length 0 or
  579. // 1 as flexible array members.
  580. const ArrayType *AT = E->getType()->castAsArrayTypeUnsafe();
  581. if (const auto *CAT = dyn_cast<ConstantArrayType>(AT)) {
  582. if (CAT->getSize().ugt(1))
  583. return false;
  584. } else if (!isa<IncompleteArrayType>(AT))
  585. return false;
  586. E = E->IgnoreParens();
  587. // A flexible array member must be the last member in the class.
  588. if (const auto *ME = dyn_cast<MemberExpr>(E)) {
  589. // FIXME: If the base type of the member expr is not FD->getParent(),
  590. // this should not be treated as a flexible array member access.
  591. if (const auto *FD = dyn_cast<FieldDecl>(ME->getMemberDecl())) {
  592. RecordDecl::field_iterator FI(
  593. DeclContext::decl_iterator(const_cast<FieldDecl *>(FD)));
  594. return ++FI == FD->getParent()->field_end();
  595. }
  596. }
  597. return false;
  598. }
  599. /// If Base is known to point to the start of an array, return the length of
  600. /// that array. Return 0 if the length cannot be determined.
  601. static llvm::Value *getArrayIndexingBound(
  602. CodeGenFunction &CGF, const Expr *Base, QualType &IndexedType) {
  603. // For the vector indexing extension, the bound is the number of elements.
  604. if (const VectorType *VT = Base->getType()->getAs<VectorType>()) {
  605. IndexedType = Base->getType();
  606. return CGF.Builder.getInt32(VT->getNumElements());
  607. }
  608. Base = Base->IgnoreParens();
  609. if (const auto *CE = dyn_cast<CastExpr>(Base)) {
  610. if (CE->getCastKind() == CK_ArrayToPointerDecay &&
  611. !isFlexibleArrayMemberExpr(CE->getSubExpr())) {
  612. IndexedType = CE->getSubExpr()->getType();
  613. const ArrayType *AT = IndexedType->castAsArrayTypeUnsafe();
  614. if (const auto *CAT = dyn_cast<ConstantArrayType>(AT))
  615. return CGF.Builder.getInt(CAT->getSize());
  616. else if (const auto *VAT = dyn_cast<VariableArrayType>(AT))
  617. return CGF.getVLASize(VAT).first;
  618. }
  619. }
  620. return nullptr;
  621. }
  622. void CodeGenFunction::EmitBoundsCheck(const Expr *E, const Expr *Base,
  623. llvm::Value *Index, QualType IndexType,
  624. bool Accessed) {
  625. assert(SanOpts.has(SanitizerKind::ArrayBounds) &&
  626. "should not be called unless adding bounds checks");
  627. SanitizerScope SanScope(this);
  628. QualType IndexedType;
  629. llvm::Value *Bound = getArrayIndexingBound(*this, Base, IndexedType);
  630. if (!Bound)
  631. return;
  632. bool IndexSigned = IndexType->isSignedIntegerOrEnumerationType();
  633. llvm::Value *IndexVal = Builder.CreateIntCast(Index, SizeTy, IndexSigned);
  634. llvm::Value *BoundVal = Builder.CreateIntCast(Bound, SizeTy, false);
  635. llvm::Constant *StaticData[] = {
  636. EmitCheckSourceLocation(E->getExprLoc()),
  637. EmitCheckTypeDescriptor(IndexedType),
  638. EmitCheckTypeDescriptor(IndexType)
  639. };
  640. llvm::Value *Check = Accessed ? Builder.CreateICmpULT(IndexVal, BoundVal)
  641. : Builder.CreateICmpULE(IndexVal, BoundVal);
  642. EmitCheck(std::make_pair(Check, SanitizerKind::ArrayBounds), "out_of_bounds",
  643. StaticData, Index);
  644. }
  645. CodeGenFunction::ComplexPairTy CodeGenFunction::
  646. EmitComplexPrePostIncDec(const UnaryOperator *E, LValue LV,
  647. bool isInc, bool isPre) {
  648. ComplexPairTy InVal = EmitLoadOfComplex(LV, E->getExprLoc());
  649. llvm::Value *NextVal;
  650. if (isa<llvm::IntegerType>(InVal.first->getType())) {
  651. uint64_t AmountVal = isInc ? 1 : -1;
  652. NextVal = llvm::ConstantInt::get(InVal.first->getType(), AmountVal, true);
  653. // Add the inc/dec to the real part.
  654. NextVal = Builder.CreateAdd(InVal.first, NextVal, isInc ? "inc" : "dec");
  655. } else {
  656. QualType ElemTy = E->getType()->getAs<ComplexType>()->getElementType();
  657. llvm::APFloat FVal(getContext().getFloatTypeSemantics(ElemTy), 1);
  658. if (!isInc)
  659. FVal.changeSign();
  660. NextVal = llvm::ConstantFP::get(getLLVMContext(), FVal);
  661. // Add the inc/dec to the real part.
  662. NextVal = Builder.CreateFAdd(InVal.first, NextVal, isInc ? "inc" : "dec");
  663. }
  664. ComplexPairTy IncVal(NextVal, InVal.second);
  665. // Store the updated result through the lvalue.
  666. EmitStoreOfComplex(IncVal, LV, /*init*/ false);
  667. // If this is a postinc, return the value read from memory, otherwise use the
  668. // updated value.
  669. return isPre ? IncVal : InVal;
  670. }
  671. //===----------------------------------------------------------------------===//
  672. // LValue Expression Emission
  673. //===----------------------------------------------------------------------===//
  674. RValue CodeGenFunction::GetUndefRValue(QualType Ty) {
  675. if (Ty->isVoidType())
  676. return RValue::get(nullptr);
  677. switch (getEvaluationKind(Ty)) {
  678. case TEK_Complex: {
  679. llvm::Type *EltTy =
  680. ConvertType(Ty->castAs<ComplexType>()->getElementType());
  681. llvm::Value *U = llvm::UndefValue::get(EltTy);
  682. return RValue::getComplex(std::make_pair(U, U));
  683. }
  684. // If this is a use of an undefined aggregate type, the aggregate must have an
  685. // identifiable address. Just because the contents of the value are undefined
  686. // doesn't mean that the address can't be taken and compared.
  687. case TEK_Aggregate: {
  688. llvm::Value *DestPtr = CreateMemTemp(Ty, "undef.agg.tmp");
  689. return RValue::getAggregate(DestPtr);
  690. }
  691. case TEK_Scalar:
  692. return RValue::get(llvm::UndefValue::get(ConvertType(Ty)));
  693. }
  694. llvm_unreachable("bad evaluation kind");
  695. }
  696. RValue CodeGenFunction::EmitUnsupportedRValue(const Expr *E,
  697. const char *Name) {
  698. ErrorUnsupported(E, Name);
  699. return GetUndefRValue(E->getType());
  700. }
  701. LValue CodeGenFunction::EmitUnsupportedLValue(const Expr *E,
  702. const char *Name) {
  703. ErrorUnsupported(E, Name);
  704. llvm::Type *Ty = llvm::PointerType::getUnqual(ConvertType(E->getType()));
  705. return MakeAddrLValue(llvm::UndefValue::get(Ty), E->getType());
  706. }
  707. LValue CodeGenFunction::EmitCheckedLValue(const Expr *E, TypeCheckKind TCK) {
  708. LValue LV;
  709. if (SanOpts.has(SanitizerKind::ArrayBounds) && isa<ArraySubscriptExpr>(E))
  710. LV = EmitArraySubscriptExpr(cast<ArraySubscriptExpr>(E), /*Accessed*/true);
  711. else
  712. LV = EmitLValue(E);
  713. if (!isa<DeclRefExpr>(E) && !LV.isBitField() && LV.isSimple())
  714. EmitTypeCheck(TCK, E->getExprLoc(), LV.getAddress(),
  715. E->getType(), LV.getAlignment());
  716. return LV;
  717. }
  718. /// EmitLValue - Emit code to compute a designator that specifies the location
  719. /// of the expression.
  720. ///
  721. /// This can return one of two things: a simple address or a bitfield reference.
  722. /// In either case, the LLVM Value* in the LValue structure is guaranteed to be
  723. /// an LLVM pointer type.
  724. ///
  725. /// If this returns a bitfield reference, nothing about the pointee type of the
  726. /// LLVM value is known: For example, it may not be a pointer to an integer.
  727. ///
  728. /// If this returns a normal address, and if the lvalue's C type is fixed size,
  729. /// this method guarantees that the returned pointer type will point to an LLVM
  730. /// type of the same size of the lvalue's type. If the lvalue has a variable
  731. /// length type, this is not possible.
  732. ///
  733. LValue CodeGenFunction::EmitLValue(const Expr *E) {
  734. ApplyDebugLocation DL(*this, E);
  735. switch (E->getStmtClass()) {
  736. default: return EmitUnsupportedLValue(E, "l-value expression");
  737. case Expr::ObjCPropertyRefExprClass:
  738. llvm_unreachable("cannot emit a property reference directly");
  739. case Expr::ObjCSelectorExprClass:
  740. return EmitObjCSelectorLValue(cast<ObjCSelectorExpr>(E));
  741. case Expr::ObjCIsaExprClass:
  742. return EmitObjCIsaExpr(cast<ObjCIsaExpr>(E));
  743. case Expr::BinaryOperatorClass:
  744. return EmitBinaryOperatorLValue(cast<BinaryOperator>(E));
  745. case Expr::CompoundAssignOperatorClass: {
  746. QualType Ty = E->getType();
  747. if (const AtomicType *AT = Ty->getAs<AtomicType>())
  748. Ty = AT->getValueType();
  749. if (!Ty->isAnyComplexType())
  750. return EmitCompoundAssignmentLValue(cast<CompoundAssignOperator>(E));
  751. return EmitComplexCompoundAssignmentLValue(cast<CompoundAssignOperator>(E));
  752. }
  753. case Expr::CallExprClass:
  754. case Expr::CXXMemberCallExprClass:
  755. case Expr::CXXOperatorCallExprClass:
  756. case Expr::UserDefinedLiteralClass:
  757. return EmitCallExprLValue(cast<CallExpr>(E));
  758. case Expr::VAArgExprClass:
  759. return EmitVAArgExprLValue(cast<VAArgExpr>(E));
  760. case Expr::DeclRefExprClass:
  761. return EmitDeclRefLValue(cast<DeclRefExpr>(E));
  762. case Expr::ParenExprClass:
  763. return EmitLValue(cast<ParenExpr>(E)->getSubExpr());
  764. case Expr::GenericSelectionExprClass:
  765. return EmitLValue(cast<GenericSelectionExpr>(E)->getResultExpr());
  766. case Expr::PredefinedExprClass:
  767. return EmitPredefinedLValue(cast<PredefinedExpr>(E));
  768. case Expr::StringLiteralClass:
  769. return EmitStringLiteralLValue(cast<StringLiteral>(E));
  770. case Expr::ObjCEncodeExprClass:
  771. return EmitObjCEncodeExprLValue(cast<ObjCEncodeExpr>(E));
  772. case Expr::PseudoObjectExprClass:
  773. return EmitPseudoObjectLValue(cast<PseudoObjectExpr>(E));
  774. case Expr::InitListExprClass:
  775. return EmitInitListLValue(cast<InitListExpr>(E));
  776. case Expr::CXXTemporaryObjectExprClass:
  777. case Expr::CXXConstructExprClass:
  778. return EmitCXXConstructLValue(cast<CXXConstructExpr>(E));
  779. case Expr::CXXBindTemporaryExprClass:
  780. return EmitCXXBindTemporaryLValue(cast<CXXBindTemporaryExpr>(E));
  781. case Expr::CXXUuidofExprClass:
  782. return EmitCXXUuidofLValue(cast<CXXUuidofExpr>(E));
  783. case Expr::LambdaExprClass:
  784. return EmitLambdaLValue(cast<LambdaExpr>(E));
  785. case Expr::ExprWithCleanupsClass: {
  786. const auto *cleanups = cast<ExprWithCleanups>(E);
  787. enterFullExpression(cleanups);
  788. RunCleanupsScope Scope(*this);
  789. return EmitLValue(cleanups->getSubExpr());
  790. }
  791. case Expr::CXXDefaultArgExprClass:
  792. return EmitLValue(cast<CXXDefaultArgExpr>(E)->getExpr());
  793. case Expr::CXXDefaultInitExprClass: {
  794. CXXDefaultInitExprScope Scope(*this);
  795. return EmitLValue(cast<CXXDefaultInitExpr>(E)->getExpr());
  796. }
  797. case Expr::CXXTypeidExprClass:
  798. return EmitCXXTypeidLValue(cast<CXXTypeidExpr>(E));
  799. case Expr::ObjCMessageExprClass:
  800. return EmitObjCMessageExprLValue(cast<ObjCMessageExpr>(E));
  801. case Expr::ObjCIvarRefExprClass:
  802. return EmitObjCIvarRefLValue(cast<ObjCIvarRefExpr>(E));
  803. case Expr::StmtExprClass:
  804. return EmitStmtExprLValue(cast<StmtExpr>(E));
  805. case Expr::UnaryOperatorClass:
  806. return EmitUnaryOpLValue(cast<UnaryOperator>(E));
  807. case Expr::ArraySubscriptExprClass:
  808. return EmitArraySubscriptExpr(cast<ArraySubscriptExpr>(E));
  809. case Expr::ExtVectorElementExprClass:
  810. return EmitExtVectorElementExpr(cast<ExtVectorElementExpr>(E));
  811. // HLSL Change Starts
  812. case Expr::ExtMatrixElementExprClass:
  813. return EmitExtMatrixElementExpr(cast<ExtMatrixElementExpr>(E));
  814. case Expr::HLSLVectorElementExprClass:
  815. return EmitHLSLVectorElementExpr(cast<HLSLVectorElementExpr>(E));
  816. case Expr::CXXThisExprClass:
  817. return MakeAddrLValue(LoadCXXThis(), E->getType());
  818. // HLSL Change Ends
  819. case Expr::MemberExprClass:
  820. return EmitMemberExpr(cast<MemberExpr>(E));
  821. case Expr::CompoundLiteralExprClass:
  822. return EmitCompoundLiteralLValue(cast<CompoundLiteralExpr>(E));
  823. case Expr::ConditionalOperatorClass:
  824. return EmitConditionalOperatorLValue(cast<ConditionalOperator>(E));
  825. case Expr::BinaryConditionalOperatorClass:
  826. return EmitConditionalOperatorLValue(cast<BinaryConditionalOperator>(E));
  827. case Expr::ChooseExprClass:
  828. return EmitLValue(cast<ChooseExpr>(E)->getChosenSubExpr());
  829. case Expr::OpaqueValueExprClass:
  830. return EmitOpaqueValueLValue(cast<OpaqueValueExpr>(E));
  831. case Expr::SubstNonTypeTemplateParmExprClass:
  832. return EmitLValue(cast<SubstNonTypeTemplateParmExpr>(E)->getReplacement());
  833. case Expr::ImplicitCastExprClass:
  834. case Expr::CStyleCastExprClass:
  835. case Expr::CXXFunctionalCastExprClass:
  836. case Expr::CXXStaticCastExprClass:
  837. case Expr::CXXDynamicCastExprClass:
  838. case Expr::CXXReinterpretCastExprClass:
  839. case Expr::CXXConstCastExprClass:
  840. case Expr::ObjCBridgedCastExprClass:
  841. return EmitCastLValue(cast<CastExpr>(E));
  842. case Expr::MaterializeTemporaryExprClass:
  843. return EmitMaterializeTemporaryExpr(cast<MaterializeTemporaryExpr>(E));
  844. }
  845. }
  846. /// Given an object of the given canonical type, can we safely copy a
  847. /// value out of it based on its initializer?
  848. static bool isConstantEmittableObjectType(QualType type) {
  849. assert(type.isCanonical());
  850. assert(!type->isReferenceType());
  851. // Must be const-qualified but non-volatile.
  852. Qualifiers qs = type.getLocalQualifiers();
  853. if (!qs.hasConst() || qs.hasVolatile()) return false;
  854. // Otherwise, all object types satisfy this except C++ classes with
  855. // mutable subobjects or non-trivial copy/destroy behavior.
  856. if (const auto *RT = dyn_cast<RecordType>(type))
  857. if (const auto *RD = dyn_cast<CXXRecordDecl>(RT->getDecl()))
  858. if (RD->hasMutableFields() || !RD->isTrivial())
  859. return false;
  860. return true;
  861. }
  862. /// Can we constant-emit a load of a reference to a variable of the
  863. /// given type? This is different from predicates like
  864. /// Decl::isUsableInConstantExpressions because we do want it to apply
  865. /// in situations that don't necessarily satisfy the language's rules
  866. /// for this (e.g. C++'s ODR-use rules). For example, we want to able
  867. /// to do this with const float variables even if those variables
  868. /// aren't marked 'constexpr'.
  869. enum ConstantEmissionKind {
  870. CEK_None,
  871. CEK_AsReferenceOnly,
  872. CEK_AsValueOrReference,
  873. CEK_AsValueOnly
  874. };
  875. static ConstantEmissionKind checkVarTypeForConstantEmission(QualType type) {
  876. type = type.getCanonicalType();
  877. if (const auto *ref = dyn_cast<ReferenceType>(type)) {
  878. if (isConstantEmittableObjectType(ref->getPointeeType()))
  879. return CEK_AsValueOrReference;
  880. return CEK_AsReferenceOnly;
  881. }
  882. if (isConstantEmittableObjectType(type))
  883. return CEK_AsValueOnly;
  884. return CEK_None;
  885. }
  886. /// Try to emit a reference to the given value without producing it as
  887. /// an l-value. This is actually more than an optimization: we can't
  888. /// produce an l-value for variables that we never actually captured
  889. /// in a block or lambda, which means const int variables or constexpr
  890. /// literals or similar.
  891. CodeGenFunction::ConstantEmission
  892. CodeGenFunction::tryEmitAsConstant(DeclRefExpr *refExpr) {
  893. ValueDecl *value = refExpr->getDecl();
  894. // The value needs to be an enum constant or a constant variable.
  895. ConstantEmissionKind CEK;
  896. if (isa<ParmVarDecl>(value)) {
  897. CEK = CEK_None;
  898. } else if (auto *var = dyn_cast<VarDecl>(value)) {
  899. CEK = checkVarTypeForConstantEmission(var->getType());
  900. } else if (isa<EnumConstantDecl>(value)) {
  901. CEK = CEK_AsValueOnly;
  902. } else {
  903. CEK = CEK_None;
  904. }
  905. if (CEK == CEK_None) return ConstantEmission();
  906. Expr::EvalResult result;
  907. bool resultIsReference;
  908. QualType resultType;
  909. // It's best to evaluate all the way as an r-value if that's permitted.
  910. if (CEK != CEK_AsReferenceOnly &&
  911. refExpr->EvaluateAsRValue(result, getContext())) {
  912. resultIsReference = false;
  913. resultType = refExpr->getType();
  914. // Otherwise, try to evaluate as an l-value.
  915. } else if (CEK != CEK_AsValueOnly &&
  916. refExpr->EvaluateAsLValue(result, getContext())) {
  917. resultIsReference = true;
  918. resultType = value->getType();
  919. // Failure.
  920. } else {
  921. return ConstantEmission();
  922. }
  923. // In any case, if the initializer has side-effects, abandon ship.
  924. if (result.HasSideEffects)
  925. return ConstantEmission();
  926. // Emit as a constant.
  927. llvm::Constant *C = CGM.EmitConstantValue(result.Val, resultType, this);
  928. // Make sure we emit a debug reference to the global variable.
  929. // This should probably fire even for
  930. if (isa<VarDecl>(value)) {
  931. if (!getContext().DeclMustBeEmitted(cast<VarDecl>(value)))
  932. EmitDeclRefExprDbgValue(refExpr, C);
  933. } else {
  934. assert(isa<EnumConstantDecl>(value));
  935. EmitDeclRefExprDbgValue(refExpr, C);
  936. }
  937. // If we emitted a reference constant, we need to dereference that.
  938. if (resultIsReference)
  939. return ConstantEmission::forReference(C);
  940. return ConstantEmission::forValue(C);
  941. }
  942. llvm::Value *CodeGenFunction::EmitLoadOfScalar(LValue lvalue,
  943. SourceLocation Loc) {
  944. return EmitLoadOfScalar(lvalue.getAddress(), lvalue.isVolatile(),
  945. lvalue.getAlignment().getQuantity(),
  946. lvalue.getType(), Loc, lvalue.getTBAAInfo(),
  947. lvalue.getTBAABaseType(), lvalue.getTBAAOffset());
  948. }
  949. static bool hasBooleanRepresentation(QualType Ty) {
  950. if (Ty->isBooleanType())
  951. return true;
  952. if (const EnumType *ET = Ty->getAs<EnumType>())
  953. return ET->getDecl()->getIntegerType()->isBooleanType();
  954. if (const AtomicType *AT = Ty->getAs<AtomicType>())
  955. return hasBooleanRepresentation(AT->getValueType());
  956. return false;
  957. }
  958. // HLSL Change Begin.
  959. static bool hasBooleanScalarOrVectorRepresentation(QualType Ty) {
  960. if (hlsl::IsHLSLVecType(Ty))
  961. return hasBooleanRepresentation(hlsl::GetElementTypeOrType(Ty));
  962. return hasBooleanRepresentation(Ty);
  963. }
  964. // HLSL Change End.
  965. static bool getRangeForType(CodeGenFunction &CGF, QualType Ty,
  966. llvm::APInt &Min, llvm::APInt &End,
  967. bool StrictEnums) {
  968. const EnumType *ET = Ty->getAs<EnumType>();
  969. bool IsRegularCPlusPlusEnum = CGF.getLangOpts().CPlusPlus && StrictEnums &&
  970. ET && !ET->getDecl()->isFixed();
  971. bool IsBool = hasBooleanRepresentation(Ty);
  972. if (!IsBool && !IsRegularCPlusPlusEnum)
  973. return false;
  974. if (IsBool) {
  975. Min = llvm::APInt(CGF.getContext().getTypeSize(Ty), 0);
  976. End = llvm::APInt(CGF.getContext().getTypeSize(Ty), 2);
  977. } else {
  978. const EnumDecl *ED = ET->getDecl();
  979. llvm::Type *LTy = CGF.ConvertTypeForMem(ED->getIntegerType());
  980. unsigned Bitwidth = LTy->getScalarSizeInBits();
  981. unsigned NumNegativeBits = ED->getNumNegativeBits();
  982. unsigned NumPositiveBits = ED->getNumPositiveBits();
  983. if (NumNegativeBits) {
  984. unsigned NumBits = std::max(NumNegativeBits, NumPositiveBits + 1);
  985. assert(NumBits <= Bitwidth);
  986. End = llvm::APInt(Bitwidth, 1) << (NumBits - 1);
  987. Min = -End;
  988. } else {
  989. assert(NumPositiveBits <= Bitwidth);
  990. End = llvm::APInt(Bitwidth, 1) << NumPositiveBits;
  991. Min = llvm::APInt(Bitwidth, 0);
  992. }
  993. }
  994. return true;
  995. }
  996. llvm::MDNode *CodeGenFunction::getRangeForLoadFromType(QualType Ty) {
  997. llvm::APInt Min, End;
  998. if (!getRangeForType(*this, Ty, Min, End,
  999. CGM.getCodeGenOpts().StrictEnums))
  1000. return nullptr;
  1001. llvm::MDBuilder MDHelper(getLLVMContext());
  1002. return MDHelper.createRange(Min, End);
  1003. }
  1004. llvm::Value *CodeGenFunction::EmitLoadOfScalar(llvm::Value *Addr, bool Volatile,
  1005. unsigned Alignment, QualType Ty,
  1006. SourceLocation Loc,
  1007. llvm::MDNode *TBAAInfo,
  1008. QualType TBAABaseType,
  1009. uint64_t TBAAOffset) {
  1010. // For better performance, handle vector loads differently.
  1011. if (Ty->isVectorType()) {
  1012. llvm::Value *V;
  1013. const llvm::Type *EltTy =
  1014. cast<llvm::PointerType>(Addr->getType())->getElementType();
  1015. const auto *VTy = cast<llvm::VectorType>(EltTy);
  1016. // Handle vectors of size 3, like size 4 for better performance.
  1017. if (VTy->getNumElements() == 3) {
  1018. // Bitcast to vec4 type.
  1019. llvm::VectorType *vec4Ty = llvm::VectorType::get(VTy->getElementType(),
  1020. 4);
  1021. llvm::PointerType *ptVec4Ty =
  1022. llvm::PointerType::get(vec4Ty,
  1023. (cast<llvm::PointerType>(
  1024. Addr->getType()))->getAddressSpace());
  1025. llvm::Value *Cast = Builder.CreateBitCast(Addr, ptVec4Ty,
  1026. "castToVec4");
  1027. // Now load value.
  1028. llvm::Value *LoadVal = Builder.CreateLoad(Cast, Volatile, "loadVec4");
  1029. // Shuffle vector to get vec3.
  1030. llvm::Constant *Mask[] = {
  1031. llvm::ConstantInt::get(llvm::Type::getInt32Ty(getLLVMContext()), 0),
  1032. llvm::ConstantInt::get(llvm::Type::getInt32Ty(getLLVMContext()), 1),
  1033. llvm::ConstantInt::get(llvm::Type::getInt32Ty(getLLVMContext()), 2)
  1034. };
  1035. llvm::Value *MaskV = llvm::ConstantVector::get(Mask);
  1036. V = Builder.CreateShuffleVector(LoadVal,
  1037. llvm::UndefValue::get(vec4Ty),
  1038. MaskV, "extractVec");
  1039. return EmitFromMemory(V, Ty);
  1040. }
  1041. }
  1042. // Atomic operations have to be done on integral types.
  1043. if (Ty->isAtomicType() || typeIsSuitableForInlineAtomic(Ty, Volatile)) {
  1044. LValue lvalue = LValue::MakeAddr(Addr, Ty,
  1045. CharUnits::fromQuantity(Alignment),
  1046. getContext(), TBAAInfo);
  1047. return EmitAtomicLoad(lvalue, Loc).getScalarVal();
  1048. }
  1049. // HLSL Change Begins
  1050. if (hlsl::IsHLSLMatType(Ty)) {
  1051. // Use matrix load to keep major info.
  1052. return CGM.getHLSLRuntime().EmitHLSLMatrixLoad(*this, Addr, Ty);
  1053. }
  1054. // HLSL Change Ends
  1055. llvm::LoadInst *Load = Builder.CreateLoad(Addr);
  1056. if (Volatile)
  1057. Load->setVolatile(true);
  1058. if (Alignment)
  1059. Load->setAlignment(Alignment);
  1060. if (TBAAInfo) {
  1061. llvm::MDNode *TBAAPath = CGM.getTBAAStructTagInfo(TBAABaseType, TBAAInfo,
  1062. TBAAOffset);
  1063. if (TBAAPath)
  1064. CGM.DecorateInstruction(Load, TBAAPath, false/*ConvertTypeToTag*/);
  1065. }
  1066. bool NeedsBoolCheck =
  1067. SanOpts.has(SanitizerKind::Bool) && hasBooleanRepresentation(Ty);
  1068. bool NeedsEnumCheck =
  1069. SanOpts.has(SanitizerKind::Enum) && Ty->getAs<EnumType>();
  1070. if (NeedsBoolCheck || NeedsEnumCheck) {
  1071. SanitizerScope SanScope(this);
  1072. llvm::APInt Min, End;
  1073. if (getRangeForType(*this, Ty, Min, End, true)) {
  1074. --End;
  1075. llvm::Value *Check;
  1076. if (!Min)
  1077. Check = Builder.CreateICmpULE(
  1078. Load, llvm::ConstantInt::get(getLLVMContext(), End));
  1079. else {
  1080. llvm::Value *Upper = Builder.CreateICmpSLE(
  1081. Load, llvm::ConstantInt::get(getLLVMContext(), End));
  1082. llvm::Value *Lower = Builder.CreateICmpSGE(
  1083. Load, llvm::ConstantInt::get(getLLVMContext(), Min));
  1084. Check = Builder.CreateAnd(Upper, Lower);
  1085. }
  1086. llvm::Constant *StaticArgs[] = {
  1087. EmitCheckSourceLocation(Loc),
  1088. EmitCheckTypeDescriptor(Ty)
  1089. };
  1090. SanitizerMask Kind = NeedsEnumCheck ? SanitizerKind::Enum : SanitizerKind::Bool;
  1091. EmitCheck(std::make_pair(Check, Kind), "load_invalid_value", StaticArgs,
  1092. EmitCheckValue(Load));
  1093. }
  1094. } else if (CGM.getCodeGenOpts().OptimizationLevel > 0)
  1095. if (llvm::MDNode *RangeInfo = getRangeForLoadFromType(Ty))
  1096. Load->setMetadata(llvm::LLVMContext::MD_range, RangeInfo);
  1097. return EmitFromMemory(Load, Ty);
  1098. }
  1099. llvm::Value *CodeGenFunction::EmitToMemory(llvm::Value *Value, QualType Ty) {
  1100. // HLSL Change Begin.
  1101. // Bool scalar and vectors have a different representation in memory than in registers.
  1102. if (hasBooleanScalarOrVectorRepresentation(Ty)) {
  1103. if (Value->getType()->getScalarType()->isIntegerTy(1))
  1104. return Builder.CreateZExt(Value, ConvertTypeForMem(Ty), "frombool");
  1105. }
  1106. // HLSL Change End.
  1107. return Value;
  1108. }
  1109. llvm::Value *CodeGenFunction::EmitFromMemory(llvm::Value *Value, QualType Ty) {
  1110. // HLSL Change Begin.
  1111. // Bool scalar and vectors have a different representation in memory than in registers.
  1112. if (hasBooleanScalarOrVectorRepresentation(Ty)) {
  1113. // Use ne v, 0 to convert to i1 instead of trunc.
  1114. return Builder.CreateICmpNE(
  1115. Value, llvm::ConstantVector::getNullValue(Value->getType()), "tobool");
  1116. }
  1117. // HLSL Change End.
  1118. return Value;
  1119. }
  1120. void CodeGenFunction::EmitStoreOfScalar(llvm::Value *Value, llvm::Value *Addr,
  1121. bool Volatile, unsigned Alignment,
  1122. QualType Ty, llvm::MDNode *TBAAInfo,
  1123. bool isInit, QualType TBAABaseType,
  1124. uint64_t TBAAOffset) {
  1125. // Handle vectors differently to get better performance.
  1126. if (Ty->isVectorType()) {
  1127. llvm::Type *SrcTy = Value->getType();
  1128. auto *VecTy = cast<llvm::VectorType>(SrcTy);
  1129. // Handle vec3 special.
  1130. if (VecTy->getNumElements() == 3) {
  1131. llvm::LLVMContext &VMContext = getLLVMContext();
  1132. // Our source is a vec3, do a shuffle vector to make it a vec4.
  1133. SmallVector<llvm::Constant*, 4> Mask;
  1134. Mask.push_back(llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext),
  1135. 0));
  1136. Mask.push_back(llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext),
  1137. 1));
  1138. Mask.push_back(llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext),
  1139. 2));
  1140. Mask.push_back(llvm::UndefValue::get(llvm::Type::getInt32Ty(VMContext)));
  1141. llvm::Value *MaskV = llvm::ConstantVector::get(Mask);
  1142. Value = Builder.CreateShuffleVector(Value,
  1143. llvm::UndefValue::get(VecTy),
  1144. MaskV, "extractVec");
  1145. SrcTy = llvm::VectorType::get(VecTy->getElementType(), 4);
  1146. }
  1147. auto *DstPtr = cast<llvm::PointerType>(Addr->getType());
  1148. if (DstPtr->getElementType() != SrcTy) {
  1149. llvm::Type *MemTy =
  1150. llvm::PointerType::get(SrcTy, DstPtr->getAddressSpace());
  1151. Addr = Builder.CreateBitCast(Addr, MemTy, "storetmp");
  1152. }
  1153. }
  1154. Value = EmitToMemory(Value, Ty);
  1155. if (Ty->isAtomicType() ||
  1156. (!isInit && typeIsSuitableForInlineAtomic(Ty, Volatile))) {
  1157. EmitAtomicStore(RValue::get(Value),
  1158. LValue::MakeAddr(Addr, Ty,
  1159. CharUnits::fromQuantity(Alignment),
  1160. getContext(), TBAAInfo),
  1161. isInit);
  1162. return;
  1163. }
  1164. // HLSL Change Begins
  1165. if (hlsl::IsHLSLMatType(Ty)) {
  1166. // Use matrix store to keep major info.
  1167. CGM.getHLSLRuntime().EmitHLSLMatrixStore(*this, Value, Addr, Ty);
  1168. return;
  1169. }
  1170. // HLSL Change Ends
  1171. llvm::StoreInst *Store = Builder.CreateStore(Value, Addr, Volatile);
  1172. if (Alignment)
  1173. Store->setAlignment(Alignment);
  1174. if (TBAAInfo) {
  1175. llvm::MDNode *TBAAPath = CGM.getTBAAStructTagInfo(TBAABaseType, TBAAInfo,
  1176. TBAAOffset);
  1177. if (TBAAPath)
  1178. CGM.DecorateInstruction(Store, TBAAPath, false/*ConvertTypeToTag*/);
  1179. }
  1180. }
  1181. void CodeGenFunction::EmitStoreOfScalar(llvm::Value *value, LValue lvalue,
  1182. bool isInit) {
  1183. EmitStoreOfScalar(value, lvalue.getAddress(), lvalue.isVolatile(),
  1184. lvalue.getAlignment().getQuantity(), lvalue.getType(),
  1185. lvalue.getTBAAInfo(), isInit, lvalue.getTBAABaseType(),
  1186. lvalue.getTBAAOffset());
  1187. }
  1188. // HLSL Change Begin - find immediate value for literal.
  1189. static llvm::Value *GetStoredValue(llvm::Value *Ptr) {
  1190. llvm::Value *V = nullptr;
  1191. for (llvm::User *U : Ptr->users()) {
  1192. if (llvm::StoreInst *ST = dyn_cast<llvm::StoreInst>(U)) {
  1193. if (V) {
  1194. // More than one store.
  1195. // Skip.
  1196. V = nullptr;
  1197. break;
  1198. }
  1199. V = ST->getValueOperand();
  1200. }
  1201. }
  1202. return V;
  1203. }
  1204. static bool IsLiteralType(QualType QT) {
  1205. if (const BuiltinType *BTy = QT->getAs<BuiltinType>()) {
  1206. if (BTy->getKind() == BuiltinType::LitFloat ||
  1207. BTy->getKind() == BuiltinType::LitInt)
  1208. return true;
  1209. }
  1210. return false;
  1211. }
  1212. // HLSL Change End.
  1213. /// EmitLoadOfLValue - Given an expression that represents a value lvalue, this
  1214. /// method emits the address of the lvalue, then loads the result as an rvalue,
  1215. /// returning the rvalue.
  1216. RValue CodeGenFunction::EmitLoadOfLValue(LValue LV, SourceLocation Loc) {
  1217. if (LV.isObjCWeak()) {
  1218. // load of a __weak object.
  1219. llvm::Value *AddrWeakObj = LV.getAddress();
  1220. return RValue::get(CGM.getObjCRuntime().EmitObjCWeakRead(*this,
  1221. AddrWeakObj));
  1222. }
  1223. if (LV.getQuals().getObjCLifetime() == Qualifiers::OCL_Weak) {
  1224. llvm::Value *Object = EmitARCLoadWeakRetained(LV.getAddress());
  1225. Object = EmitObjCConsumeObject(LV.getType(), Object);
  1226. return RValue::get(Object);
  1227. }
  1228. if (LV.isSimple()) {
  1229. assert(!LV.getType()->isFunctionType());
  1230. // HLSL Change Begin - find immediate value for literal.
  1231. if (IsLiteralType(LV.getType())) {
  1232. // The value must be stored only once.
  1233. // Scan all use to find it.
  1234. llvm::Value *Ptr = LV.getAddress();
  1235. if (llvm::Value *V = GetStoredValue(Ptr)) {
  1236. return RValue::get(V);
  1237. }
  1238. }
  1239. if (hlsl::IsHLSLAggregateType(LV.getType())) {
  1240. // We cannot load the value because we don't expect to ever have
  1241. // user-defined struct or array-typed llvm registers, only pointers to them.
  1242. // To preserve the snapshot semantics of LValue loads, we copy the
  1243. // value to a temporary and return a pointer to it.
  1244. llvm::Value *Alloca = CreateMemTemp(LV.getType(), "rval");
  1245. auto CharSizeAlignPair = getContext().getTypeInfoInChars(LV.getType());
  1246. Builder.CreateMemCpy(Alloca, LV.getAddress(),
  1247. static_cast<uint64_t>(CharSizeAlignPair.first.getQuantity()),
  1248. static_cast<unsigned>(CharSizeAlignPair.second.getQuantity()));
  1249. return RValue::get(Alloca);
  1250. }
  1251. // HLSL Change End.
  1252. // Everything needs a load.
  1253. return RValue::get(EmitLoadOfScalar(LV, Loc));
  1254. }
  1255. if (LV.isVectorElt()) {
  1256. // HLSL Change Begin - find immediate value for literal.
  1257. if (IsLiteralType(LV.getType())) {
  1258. // The value must be stored only once.
  1259. // Scan all use to find it.
  1260. llvm::Value *Ptr = LV.getAddress();
  1261. if (llvm::Value *V = GetStoredValue(Ptr)) {
  1262. return RValue::get(Builder.CreateExtractElement(V,
  1263. LV.getVectorIdx(), "vecext"));
  1264. }
  1265. }
  1266. // HLSL Change End.
  1267. llvm::LoadInst *Load = Builder.CreateLoad(LV.getVectorAddr(),
  1268. LV.isVolatileQualified());
  1269. Load->setAlignment(LV.getAlignment().getQuantity());
  1270. return RValue::get(Builder.CreateExtractElement(Load, LV.getVectorIdx(),
  1271. "vecext"));
  1272. }
  1273. // If this is a reference to a subset of the elements of a vector, either
  1274. // shuffle the input or extract/insert them as appropriate.
  1275. if (LV.isExtVectorElt())
  1276. return EmitLoadOfExtVectorElementLValue(LV);
  1277. // Global Register variables always invoke intrinsics
  1278. if (LV.isGlobalReg())
  1279. return EmitLoadOfGlobalRegLValue(LV);
  1280. // HLSL Change Starts
  1281. if (LV.isExtMatrixElt())
  1282. return EmitLoadOfExtMatrixElementLValue(LV);
  1283. // HLSL Change Ends
  1284. assert(LV.isBitField() && "Unknown LValue type!");
  1285. return EmitLoadOfBitfieldLValue(LV);
  1286. }
  1287. RValue CodeGenFunction::EmitLoadOfBitfieldLValue(LValue LV) {
  1288. const CGBitFieldInfo &Info = LV.getBitFieldInfo();
  1289. CharUnits Align = LV.getAlignment().alignmentAtOffset(Info.StorageOffset);
  1290. // Get the output type.
  1291. llvm::Type *ResLTy = ConvertType(LV.getType());
  1292. llvm::Value *Ptr = LV.getBitFieldAddr();
  1293. llvm::Value *Val = Builder.CreateAlignedLoad(Ptr, Align.getQuantity(),
  1294. LV.isVolatileQualified(),
  1295. "bf.load");
  1296. if (Info.IsSigned) {
  1297. assert(static_cast<unsigned>(Info.Offset + Info.Size) <= Info.StorageSize);
  1298. unsigned HighBits = Info.StorageSize - Info.Offset - Info.Size;
  1299. if (HighBits)
  1300. Val = Builder.CreateShl(Val, HighBits, "bf.shl");
  1301. if (Info.Offset + HighBits)
  1302. Val = Builder.CreateAShr(Val, Info.Offset + HighBits, "bf.ashr");
  1303. } else {
  1304. if (Info.Offset)
  1305. Val = Builder.CreateLShr(Val, Info.Offset, "bf.lshr");
  1306. if (static_cast<unsigned>(Info.Offset) + Info.Size < Info.StorageSize)
  1307. Val = Builder.CreateAnd(Val, llvm::APInt::getLowBitsSet(Info.StorageSize,
  1308. Info.Size),
  1309. "bf.clear");
  1310. }
  1311. Val = Builder.CreateIntCast(Val, ResLTy, Info.IsSigned, "bf.cast");
  1312. return RValue::get(Val);
  1313. }
  1314. // If this is a reference to a subset of the elements of a vector, create an
  1315. // appropriate shufflevector.
  1316. RValue CodeGenFunction::EmitLoadOfExtVectorElementLValue(LValue LV) {
  1317. llvm::LoadInst *Load = Builder.CreateLoad(LV.getExtVectorAddr(),
  1318. LV.isVolatileQualified());
  1319. Load->setAlignment(LV.getAlignment().getQuantity());
  1320. llvm::Value *Vec = Load;
  1321. Vec = EmitFromMemory(Vec, LV.getType()); // HLSL Change
  1322. const llvm::Constant *Elts = LV.getExtVectorElts();
  1323. // If the result of the expression is a non-vector type, we must be extracting
  1324. // a single element. Just codegen as an extractelement.
  1325. const VectorType *ExprVT = LV.getType()->getAs<VectorType>();
  1326. // HLSL Change Starts
  1327. if (ExprVT == nullptr && getContext().getLangOpts().HLSL)
  1328. ExprVT =
  1329. hlsl::ConvertHLSLVecMatTypeToExtVectorType(getContext(), LV.getType());
  1330. // HLSL Change Ends
  1331. // HLSL Change Begin - find immediate value for literal.
  1332. QualType QT = LV.getType();
  1333. if (ExprVT) {
  1334. QT = ExprVT->getElementType();
  1335. }
  1336. if (IsLiteralType(QT)) {
  1337. // The value must be stored only once.
  1338. // Scan all use to find it.
  1339. llvm::Value *Ptr = LV.getExtVectorAddr();
  1340. if (llvm::Value *V = GetStoredValue(Ptr)) {
  1341. Vec = V;
  1342. }
  1343. }
  1344. // HLSL Change End.
  1345. if (!ExprVT) {
  1346. unsigned InIdx = getAccessedFieldNo(0, Elts);
  1347. llvm::Value *Elt = llvm::ConstantInt::get(SizeTy, InIdx);
  1348. return RValue::get(Builder.CreateExtractElement(Vec, Elt));
  1349. }
  1350. // Always use shuffle vector to try to retain the original program structure
  1351. unsigned NumResultElts = ExprVT->getNumElements();
  1352. SmallVector<llvm::Constant*, 4> Mask;
  1353. for (unsigned i = 0; i != NumResultElts; ++i)
  1354. Mask.push_back(Builder.getInt32(getAccessedFieldNo(i, Elts)));
  1355. llvm::Value *MaskV = llvm::ConstantVector::get(Mask);
  1356. Vec = Builder.CreateShuffleVector(Vec, llvm::UndefValue::get(Vec->getType()),
  1357. MaskV);
  1358. return RValue::get(Vec);
  1359. }
  1360. /// @brief Generates lvalue for partial ext_vector access.
  1361. llvm::Value *CodeGenFunction::EmitExtVectorElementLValue(LValue LV) {
  1362. llvm::Value *VectorAddress = LV.getExtVectorAddr();
  1363. const VectorType *ExprVT = LV.getType()->getAs<VectorType>();
  1364. QualType EQT = ExprVT->getElementType();
  1365. llvm::Type *VectorElementTy = CGM.getTypes().ConvertType(EQT);
  1366. llvm::Type *VectorElementPtrToTy = VectorElementTy->getPointerTo();
  1367. llvm::Value *CastToPointerElement =
  1368. Builder.CreateBitCast(VectorAddress,
  1369. VectorElementPtrToTy, "conv.ptr.element");
  1370. const llvm::Constant *Elts = LV.getExtVectorElts();
  1371. unsigned ix = getAccessedFieldNo(0, Elts);
  1372. llvm::Value *VectorBasePtrPlusIx =
  1373. Builder.CreateInBoundsGEP(CastToPointerElement,
  1374. llvm::ConstantInt::get(SizeTy, ix), "add.ptr");
  1375. return VectorBasePtrPlusIx;
  1376. }
  1377. /// @brief Load of global gamed gegisters are always calls to intrinsics.
  1378. RValue CodeGenFunction::EmitLoadOfGlobalRegLValue(LValue LV) {
  1379. assert((LV.getType()->isIntegerType() || LV.getType()->isPointerType()) &&
  1380. "Bad type for register variable");
  1381. llvm::MDNode *RegName = cast<llvm::MDNode>(
  1382. cast<llvm::MetadataAsValue>(LV.getGlobalReg())->getMetadata());
  1383. // We accept integer and pointer types only
  1384. llvm::Type *OrigTy = CGM.getTypes().ConvertType(LV.getType());
  1385. llvm::Type *Ty = OrigTy;
  1386. if (OrigTy->isPointerTy())
  1387. Ty = CGM.getTypes().getDataLayout().getIntPtrType(OrigTy);
  1388. llvm::Type *Types[] = { Ty };
  1389. llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::read_register, Types);
  1390. llvm::Value *Call = Builder.CreateCall(
  1391. F, llvm::MetadataAsValue::get(Ty->getContext(), RegName));
  1392. if (OrigTy->isPointerTy())
  1393. Call = Builder.CreateIntToPtr(Call, OrigTy);
  1394. return RValue::get(Call);
  1395. }
  1396. // HLSL Change Starts
  1397. RValue CodeGenFunction::EmitLoadOfExtMatrixElementLValue(LValue LV) {
  1398. // TODO: Matrix swizzle members - emit
  1399. return RValue();
  1400. }
  1401. // HLSL Change Ends
  1402. /// EmitStoreThroughLValue - Store the specified rvalue into the specified
  1403. /// lvalue, where both are guaranteed to the have the same type, and that type
  1404. /// is 'Ty'.
  1405. void CodeGenFunction::EmitStoreThroughLValue(RValue Src, LValue Dst,
  1406. bool isInit) {
  1407. if (!Dst.isSimple()) {
  1408. if (Dst.isVectorElt()) {
  1409. // Read/modify/write the vector, inserting the new element.
  1410. llvm::LoadInst *Load = Builder.CreateLoad(Dst.getVectorAddr(),
  1411. Dst.isVolatileQualified());
  1412. Load->setAlignment(Dst.getAlignment().getQuantity());
  1413. llvm::Value *Vec = Load;
  1414. Vec = Builder.CreateInsertElement(Vec, Src.getScalarVal(),
  1415. Dst.getVectorIdx(), "vecins");
  1416. llvm::StoreInst *Store = Builder.CreateStore(Vec, Dst.getVectorAddr(),
  1417. Dst.isVolatileQualified());
  1418. Store->setAlignment(Dst.getAlignment().getQuantity());
  1419. return;
  1420. }
  1421. // If this is an update of extended vector elements, insert them as
  1422. // appropriate.
  1423. if (Dst.isExtVectorElt())
  1424. return EmitStoreThroughExtVectorComponentLValue(Src, Dst);
  1425. if (Dst.isGlobalReg())
  1426. return EmitStoreThroughGlobalRegLValue(Src, Dst);
  1427. assert(Dst.isBitField() && "Unknown LValue type");
  1428. return EmitStoreThroughBitfieldLValue(Src, Dst);
  1429. }
  1430. // There's special magic for assigning into an ARC-qualified l-value.
  1431. if (Qualifiers::ObjCLifetime Lifetime = Dst.getQuals().getObjCLifetime()) {
  1432. switch (Lifetime) {
  1433. case Qualifiers::OCL_None:
  1434. llvm_unreachable("present but none");
  1435. case Qualifiers::OCL_ExplicitNone:
  1436. // nothing special
  1437. break;
  1438. case Qualifiers::OCL_Strong:
  1439. EmitARCStoreStrong(Dst, Src.getScalarVal(), /*ignore*/ true);
  1440. return;
  1441. case Qualifiers::OCL_Weak:
  1442. EmitARCStoreWeak(Dst.getAddress(), Src.getScalarVal(), /*ignore*/ true);
  1443. return;
  1444. case Qualifiers::OCL_Autoreleasing:
  1445. Src = RValue::get(EmitObjCExtendObjectLifetime(Dst.getType(),
  1446. Src.getScalarVal()));
  1447. // fall into the normal path
  1448. break;
  1449. }
  1450. }
  1451. if (Dst.isObjCWeak() && !Dst.isNonGC()) {
  1452. // load of a __weak object.
  1453. llvm::Value *LvalueDst = Dst.getAddress();
  1454. llvm::Value *src = Src.getScalarVal();
  1455. CGM.getObjCRuntime().EmitObjCWeakAssign(*this, src, LvalueDst);
  1456. return;
  1457. }
  1458. if (Dst.isObjCStrong() && !Dst.isNonGC()) {
  1459. // load of a __strong object.
  1460. llvm::Value *LvalueDst = Dst.getAddress();
  1461. llvm::Value *src = Src.getScalarVal();
  1462. if (Dst.isObjCIvar()) {
  1463. assert(Dst.getBaseIvarExp() && "BaseIvarExp is NULL");
  1464. llvm::Type *ResultType = ConvertType(getContext().LongTy);
  1465. llvm::Value *RHS = EmitScalarExpr(Dst.getBaseIvarExp());
  1466. llvm::Value *dst = RHS;
  1467. RHS = Builder.CreatePtrToInt(RHS, ResultType, "sub.ptr.rhs.cast");
  1468. llvm::Value *LHS =
  1469. Builder.CreatePtrToInt(LvalueDst, ResultType, "sub.ptr.lhs.cast");
  1470. llvm::Value *BytesBetween = Builder.CreateSub(LHS, RHS, "ivar.offset");
  1471. CGM.getObjCRuntime().EmitObjCIvarAssign(*this, src, dst,
  1472. BytesBetween);
  1473. } else if (Dst.isGlobalObjCRef()) {
  1474. CGM.getObjCRuntime().EmitObjCGlobalAssign(*this, src, LvalueDst,
  1475. Dst.isThreadLocalRef());
  1476. }
  1477. else
  1478. CGM.getObjCRuntime().EmitObjCStrongCastAssign(*this, src, LvalueDst);
  1479. return;
  1480. }
  1481. assert(Src.isScalar() && "Can't emit an agg store with this method");
  1482. EmitStoreOfScalar(Src.getScalarVal(), Dst, isInit);
  1483. }
  1484. void CodeGenFunction::EmitStoreThroughBitfieldLValue(RValue Src, LValue Dst,
  1485. llvm::Value **Result) {
  1486. const CGBitFieldInfo &Info = Dst.getBitFieldInfo();
  1487. CharUnits Align = Dst.getAlignment().alignmentAtOffset(Info.StorageOffset);
  1488. llvm::Type *ResLTy = ConvertTypeForMem(Dst.getType());
  1489. llvm::Value *Ptr = Dst.getBitFieldAddr();
  1490. // Get the source value, truncated to the width of the bit-field.
  1491. llvm::Value *SrcVal = Src.getScalarVal();
  1492. // Cast the source to the storage type and shift it into place.
  1493. SrcVal = Builder.CreateIntCast(SrcVal,
  1494. Ptr->getType()->getPointerElementType(),
  1495. /*IsSigned=*/false);
  1496. llvm::Value *MaskedVal = SrcVal;
  1497. // See if there are other bits in the bitfield's storage we'll need to load
  1498. // and mask together with source before storing.
  1499. if (Info.StorageSize != Info.Size) {
  1500. assert(Info.StorageSize > Info.Size && "Invalid bitfield size.");
  1501. llvm::Value *Val = Builder.CreateAlignedLoad(Ptr, Align.getQuantity(),
  1502. Dst.isVolatileQualified(),
  1503. "bf.load");
  1504. // Mask the source value as needed.
  1505. if (!hasBooleanRepresentation(Dst.getType()))
  1506. SrcVal = Builder.CreateAnd(SrcVal,
  1507. llvm::APInt::getLowBitsSet(Info.StorageSize,
  1508. Info.Size),
  1509. "bf.value");
  1510. MaskedVal = SrcVal;
  1511. if (Info.Offset)
  1512. SrcVal = Builder.CreateShl(SrcVal, Info.Offset, "bf.shl");
  1513. // Mask out the original value.
  1514. Val = Builder.CreateAnd(Val,
  1515. ~llvm::APInt::getBitsSet(Info.StorageSize,
  1516. Info.Offset,
  1517. Info.Offset + Info.Size),
  1518. "bf.clear");
  1519. // Or together the unchanged values and the source value.
  1520. SrcVal = Builder.CreateOr(Val, SrcVal, "bf.set");
  1521. } else {
  1522. assert(Info.Offset == 0);
  1523. }
  1524. // Write the new value back out.
  1525. Builder.CreateAlignedStore(SrcVal, Ptr, Align.getQuantity(),
  1526. Dst.isVolatileQualified());
  1527. // Return the new value of the bit-field, if requested.
  1528. if (Result) {
  1529. llvm::Value *ResultVal = MaskedVal;
  1530. // Sign extend the value if needed.
  1531. if (Info.IsSigned) {
  1532. assert(Info.Size <= Info.StorageSize);
  1533. unsigned HighBits = Info.StorageSize - Info.Size;
  1534. if (HighBits) {
  1535. ResultVal = Builder.CreateShl(ResultVal, HighBits, "bf.result.shl");
  1536. ResultVal = Builder.CreateAShr(ResultVal, HighBits, "bf.result.ashr");
  1537. }
  1538. }
  1539. ResultVal = Builder.CreateIntCast(ResultVal, ResLTy, Info.IsSigned,
  1540. "bf.result.cast");
  1541. *Result = EmitFromMemory(ResultVal, Dst.getType());
  1542. }
  1543. }
  1544. // HLSL Change - begin
  1545. static bool IsHLSubscriptOfTypedBuffer(llvm::Value *V) {
  1546. llvm::CallInst *CI = nullptr;
  1547. llvm::Function *F = nullptr;
  1548. if ((CI = dyn_cast<llvm::CallInst>(V)) &&
  1549. (F = CI->getCalledFunction()) &&
  1550. hlsl::GetHLOpcodeGroup(F) == hlsl::HLOpcodeGroup::HLSubscript)
  1551. {
  1552. for (llvm::Value *arg : CI->arg_operands()) {
  1553. llvm::Type *Ty = arg->getType();
  1554. if (Ty->isPointerTy()) {
  1555. std::pair<bool, hlsl::DxilResourceProperties> Result =
  1556. hlsl::dxilutil::GetHLSLResourceProperties(Ty->getPointerElementType());
  1557. if (Result.first &&
  1558. Result.second.isUAV() &&
  1559. // These are the types of buffers that are typed.
  1560. (hlsl::DxilResource::IsAnyTexture(Result.second.getResourceKind()) ||
  1561. Result.second.getResourceKind() == hlsl::DXIL::ResourceKind::TypedBuffer))
  1562. {
  1563. return true;
  1564. }
  1565. }
  1566. }
  1567. }
  1568. return false;
  1569. }
  1570. // HLSL Change - end
  1571. void CodeGenFunction::EmitStoreThroughExtVectorComponentLValue(RValue Src,
  1572. LValue Dst) {
  1573. // This access turns into a read/modify/write of the vector. Load the input
  1574. // value now.
  1575. llvm::LoadInst *Load = Builder.CreateLoad(Dst.getExtVectorAddr(),
  1576. Dst.isVolatileQualified());
  1577. Load->setAlignment(Dst.getAlignment().getQuantity());
  1578. llvm::Value *Vec = Load;
  1579. const llvm::Constant *Elts = Dst.getExtVectorElts();
  1580. llvm::Value *SrcVal = Src.getScalarVal();
  1581. // HLSL Change Starts
  1582. SrcVal = EmitToMemory(SrcVal, Dst.getType());
  1583. const VectorType *VTy = Dst.getType()->getAs<VectorType>();
  1584. if (VTy == nullptr && getContext().getLangOpts().HLSL)
  1585. VTy =
  1586. hlsl::ConvertHLSLVecMatTypeToExtVectorType(getContext(), Dst.getType());
  1587. llvm::Value * VecDstPtr = Dst.getExtVectorAddr();
  1588. llvm::Value *Zero = Builder.getInt32(0);
  1589. if (VTy) {
  1590. llvm::Type *VecTy = VecDstPtr->getType()->getPointerElementType();
  1591. unsigned NumSrcElts = VTy->getNumElements();
  1592. if (VecTy->getVectorNumElements() == NumSrcElts) {
  1593. // Full vector write, create one store.
  1594. for (unsigned i = 0; i < VecTy->getVectorNumElements(); i++) {
  1595. if (llvm::Constant *Elt = Elts->getAggregateElement(i)) {
  1596. llvm::Value *SrcElt = Builder.CreateExtractElement(SrcVal, i);
  1597. Vec = Builder.CreateInsertElement(Vec, SrcElt, Elt);
  1598. }
  1599. }
  1600. Builder.CreateStore(Vec, VecDstPtr);
  1601. } else {
  1602. // If the vector pointer comes from subscripting a typed rw buffer (Buffer<>, Texture*<>, etc.),
  1603. // insert the elements from the load.
  1604. //
  1605. // This is to avoid the final DXIL producing a load+store for each component later down the line,
  1606. // as there's no clean way to associate the geps+store with each other.
  1607. //
  1608. if (IsHLSubscriptOfTypedBuffer(VecDstPtr)) {
  1609. llvm::Value *vec = Load;
  1610. for (unsigned i = 0; i < VecTy->getVectorNumElements(); i++) {
  1611. if (llvm::Constant *Elt = Elts->getAggregateElement(i)) {
  1612. llvm::Value *SrcElt = Builder.CreateExtractElement(SrcVal, i);
  1613. vec = Builder.CreateInsertElement(vec, SrcElt, Elt);
  1614. }
  1615. }
  1616. Builder.CreateStore(vec, VecDstPtr);
  1617. }
  1618. // Otherwise just do a gep + store for each component that we're writing to.
  1619. else {
  1620. for (unsigned i = 0; i < VecTy->getVectorNumElements(); i++) {
  1621. if (llvm::Constant *Elt = Elts->getAggregateElement(i)) {
  1622. llvm::Value *EltGEP = Builder.CreateGEP(VecDstPtr, {Zero, Elt});
  1623. llvm::Value *SrcElt = Builder.CreateExtractElement(SrcVal, i);
  1624. Builder.CreateStore(SrcElt, EltGEP);
  1625. }
  1626. }
  1627. }
  1628. }
  1629. } else {
  1630. // If the Src is a scalar (not a vector) it must be updating one element.
  1631. llvm::Value *EltGEP = Builder.CreateGEP(
  1632. VecDstPtr, {Zero, Elts->getAggregateElement((unsigned)0)});
  1633. Builder.CreateStore(SrcVal, EltGEP);
  1634. }
  1635. return;
  1636. // HLSL Change Ends
  1637. if (VTy) { // HLSL Change
  1638. unsigned NumSrcElts = VTy->getNumElements();
  1639. unsigned NumDstElts =
  1640. cast<llvm::VectorType>(Vec->getType())->getNumElements();
  1641. if (NumDstElts == NumSrcElts) {
  1642. // Use shuffle vector is the src and destination are the same number of
  1643. // elements and restore the vector mask since it is on the side it will be
  1644. // stored.
  1645. SmallVector<llvm::Constant*, 4> Mask(NumDstElts);
  1646. for (unsigned i = 0; i != NumSrcElts; ++i)
  1647. Mask[getAccessedFieldNo(i, Elts)] = Builder.getInt32(i);
  1648. llvm::Value *MaskV = llvm::ConstantVector::get(Mask);
  1649. Vec = Builder.CreateShuffleVector(SrcVal,
  1650. llvm::UndefValue::get(Vec->getType()),
  1651. MaskV);
  1652. } else if (NumDstElts > NumSrcElts) {
  1653. // Extended the source vector to the same length and then shuffle it
  1654. // into the destination.
  1655. // FIXME: since we're shuffling with undef, can we just use the indices
  1656. // into that? This could be simpler.
  1657. SmallVector<llvm::Constant*, 4> ExtMask;
  1658. for (unsigned i = 0; i != NumSrcElts; ++i)
  1659. ExtMask.push_back(Builder.getInt32(i));
  1660. ExtMask.resize(NumDstElts, llvm::UndefValue::get(Int32Ty));
  1661. llvm::Value *ExtMaskV = llvm::ConstantVector::get(ExtMask);
  1662. llvm::Value *ExtSrcVal =
  1663. Builder.CreateShuffleVector(SrcVal,
  1664. llvm::UndefValue::get(SrcVal->getType()),
  1665. ExtMaskV);
  1666. // build identity
  1667. SmallVector<llvm::Constant*, 4> Mask;
  1668. for (unsigned i = 0; i != NumDstElts; ++i)
  1669. Mask.push_back(Builder.getInt32(i));
  1670. // When the vector size is odd and .odd or .hi is used, the last element
  1671. // of the Elts constant array will be one past the size of the vector.
  1672. // Ignore the last element here, if it is greater than the mask size.
  1673. if (getAccessedFieldNo(NumSrcElts - 1, Elts) == Mask.size())
  1674. NumSrcElts--;
  1675. // modify when what gets shuffled in
  1676. for (unsigned i = 0; i != NumSrcElts; ++i)
  1677. Mask[getAccessedFieldNo(i, Elts)] = Builder.getInt32(i+NumDstElts);
  1678. llvm::Value *MaskV = llvm::ConstantVector::get(Mask);
  1679. Vec = Builder.CreateShuffleVector(Vec, ExtSrcVal, MaskV);
  1680. } else {
  1681. // We should never shorten the vector
  1682. llvm_unreachable("unexpected shorten vector length");
  1683. }
  1684. } else {
  1685. // If the Src is a scalar (not a vector) it must be updating one element.
  1686. unsigned InIdx = getAccessedFieldNo(0, Elts);
  1687. llvm::Value *Elt = llvm::ConstantInt::get(SizeTy, InIdx);
  1688. Vec = Builder.CreateInsertElement(Vec, SrcVal, Elt);
  1689. }
  1690. llvm::StoreInst *Store = Builder.CreateStore(Vec, Dst.getExtVectorAddr(),
  1691. Dst.isVolatileQualified());
  1692. Store->setAlignment(Dst.getAlignment().getQuantity());
  1693. }
  1694. /// @brief Store of global named registers are always calls to intrinsics.
  1695. void CodeGenFunction::EmitStoreThroughGlobalRegLValue(RValue Src, LValue Dst) {
  1696. assert((Dst.getType()->isIntegerType() || Dst.getType()->isPointerType()) &&
  1697. "Bad type for register variable");
  1698. llvm::MDNode *RegName = cast<llvm::MDNode>(
  1699. cast<llvm::MetadataAsValue>(Dst.getGlobalReg())->getMetadata());
  1700. assert(RegName && "Register LValue is not metadata");
  1701. // We accept integer and pointer types only
  1702. llvm::Type *OrigTy = CGM.getTypes().ConvertType(Dst.getType());
  1703. llvm::Type *Ty = OrigTy;
  1704. if (OrigTy->isPointerTy())
  1705. Ty = CGM.getTypes().getDataLayout().getIntPtrType(OrigTy);
  1706. llvm::Type *Types[] = { Ty };
  1707. llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::write_register, Types);
  1708. llvm::Value *Value = Src.getScalarVal();
  1709. if (OrigTy->isPointerTy())
  1710. Value = Builder.CreatePtrToInt(Value, Ty);
  1711. Builder.CreateCall(
  1712. F, {llvm::MetadataAsValue::get(Ty->getContext(), RegName), Value});
  1713. }
  1714. // setObjCGCLValueClass - sets class of the lvalue for the purpose of
  1715. // generating write-barries API. It is currently a global, ivar,
  1716. // or neither.
  1717. static void setObjCGCLValueClass(const ASTContext &Ctx, const Expr *E,
  1718. LValue &LV,
  1719. bool IsMemberAccess=false) {
  1720. if (Ctx.getLangOpts().getGC() == LangOptions::NonGC)
  1721. return;
  1722. if (isa<ObjCIvarRefExpr>(E)) {
  1723. QualType ExpTy = E->getType();
  1724. if (IsMemberAccess && ExpTy->isPointerType()) {
  1725. // If ivar is a structure pointer, assigning to field of
  1726. // this struct follows gcc's behavior and makes it a non-ivar
  1727. // writer-barrier conservatively.
  1728. ExpTy = ExpTy->getAs<PointerType>()->getPointeeType();
  1729. if (ExpTy->isRecordType()) {
  1730. LV.setObjCIvar(false);
  1731. return;
  1732. }
  1733. }
  1734. LV.setObjCIvar(true);
  1735. auto *Exp = cast<ObjCIvarRefExpr>(const_cast<Expr *>(E));
  1736. LV.setBaseIvarExp(Exp->getBase());
  1737. LV.setObjCArray(E->getType()->isArrayType());
  1738. return;
  1739. }
  1740. if (const auto *Exp = dyn_cast<DeclRefExpr>(E)) {
  1741. if (const auto *VD = dyn_cast<VarDecl>(Exp->getDecl())) {
  1742. if (VD->hasGlobalStorage()) {
  1743. LV.setGlobalObjCRef(true);
  1744. LV.setThreadLocalRef(VD->getTLSKind() != VarDecl::TLS_None);
  1745. }
  1746. }
  1747. LV.setObjCArray(E->getType()->isArrayType());
  1748. return;
  1749. }
  1750. if (const auto *Exp = dyn_cast<UnaryOperator>(E)) {
  1751. setObjCGCLValueClass(Ctx, Exp->getSubExpr(), LV, IsMemberAccess);
  1752. return;
  1753. }
  1754. if (const auto *Exp = dyn_cast<ParenExpr>(E)) {
  1755. setObjCGCLValueClass(Ctx, Exp->getSubExpr(), LV, IsMemberAccess);
  1756. if (LV.isObjCIvar()) {
  1757. // If cast is to a structure pointer, follow gcc's behavior and make it
  1758. // a non-ivar write-barrier.
  1759. QualType ExpTy = E->getType();
  1760. if (ExpTy->isPointerType())
  1761. ExpTy = ExpTy->getAs<PointerType>()->getPointeeType();
  1762. if (ExpTy->isRecordType())
  1763. LV.setObjCIvar(false);
  1764. }
  1765. return;
  1766. }
  1767. if (const auto *Exp = dyn_cast<GenericSelectionExpr>(E)) {
  1768. setObjCGCLValueClass(Ctx, Exp->getResultExpr(), LV);
  1769. return;
  1770. }
  1771. if (const auto *Exp = dyn_cast<ImplicitCastExpr>(E)) {
  1772. setObjCGCLValueClass(Ctx, Exp->getSubExpr(), LV, IsMemberAccess);
  1773. return;
  1774. }
  1775. if (const auto *Exp = dyn_cast<CStyleCastExpr>(E)) {
  1776. setObjCGCLValueClass(Ctx, Exp->getSubExpr(), LV, IsMemberAccess);
  1777. return;
  1778. }
  1779. if (const auto *Exp = dyn_cast<ObjCBridgedCastExpr>(E)) {
  1780. setObjCGCLValueClass(Ctx, Exp->getSubExpr(), LV, IsMemberAccess);
  1781. return;
  1782. }
  1783. if (const auto *Exp = dyn_cast<ArraySubscriptExpr>(E)) {
  1784. setObjCGCLValueClass(Ctx, Exp->getBase(), LV);
  1785. if (LV.isObjCIvar() && !LV.isObjCArray())
  1786. // Using array syntax to assigning to what an ivar points to is not
  1787. // same as assigning to the ivar itself. {id *Names;} Names[i] = 0;
  1788. LV.setObjCIvar(false);
  1789. else if (LV.isGlobalObjCRef() && !LV.isObjCArray())
  1790. // Using array syntax to assigning to what global points to is not
  1791. // same as assigning to the global itself. {id *G;} G[i] = 0;
  1792. LV.setGlobalObjCRef(false);
  1793. return;
  1794. }
  1795. if (const auto *Exp = dyn_cast<MemberExpr>(E)) {
  1796. setObjCGCLValueClass(Ctx, Exp->getBase(), LV, true);
  1797. // We don't know if member is an 'ivar', but this flag is looked at
  1798. // only in the context of LV.isObjCIvar().
  1799. LV.setObjCArray(E->getType()->isArrayType());
  1800. return;
  1801. }
  1802. }
  1803. static llvm::Value *
  1804. EmitBitCastOfLValueToProperType(CodeGenFunction &CGF,
  1805. llvm::Value *V, llvm::Type *IRType,
  1806. StringRef Name = StringRef()) {
  1807. unsigned AS = cast<llvm::PointerType>(V->getType())->getAddressSpace();
  1808. return CGF.Builder.CreateBitCast(V, IRType->getPointerTo(AS), Name);
  1809. }
  1810. static LValue EmitThreadPrivateVarDeclLValue(
  1811. CodeGenFunction &CGF, const VarDecl *VD, QualType T, llvm::Value *V,
  1812. llvm::Type *RealVarTy, CharUnits Alignment, SourceLocation Loc) {
  1813. V = CGF.CGM.getOpenMPRuntime().getAddrOfThreadPrivate(CGF, VD, V, Loc);
  1814. V = EmitBitCastOfLValueToProperType(CGF, V, RealVarTy);
  1815. return CGF.MakeAddrLValue(V, T, Alignment);
  1816. }
  1817. static LValue EmitGlobalVarDeclLValue(CodeGenFunction &CGF,
  1818. const Expr *E, const VarDecl *VD) {
  1819. QualType T = E->getType();
  1820. // If it's thread_local, emit a call to its wrapper function instead.
  1821. if (VD->getTLSKind() == VarDecl::TLS_Dynamic &&
  1822. CGF.CGM.getCXXABI().usesThreadWrapperFunction())
  1823. return CGF.CGM.getCXXABI().EmitThreadLocalVarDeclLValue(CGF, VD, T);
  1824. llvm::Value *V = CGF.CGM.GetAddrOfGlobalVar(VD);
  1825. llvm::Type *RealVarTy = CGF.getTypes().ConvertTypeForMem(VD->getType());
  1826. V = EmitBitCastOfLValueToProperType(CGF, V, RealVarTy);
  1827. CharUnits Alignment = CGF.getContext().getDeclAlign(VD);
  1828. LValue LV;
  1829. // Emit reference to the private copy of the variable if it is an OpenMP
  1830. // threadprivate variable.
  1831. if (CGF.getLangOpts().OpenMP && VD->hasAttr<OMPThreadPrivateDeclAttr>())
  1832. return EmitThreadPrivateVarDeclLValue(CGF, VD, T, V, RealVarTy, Alignment,
  1833. E->getExprLoc());
  1834. if (VD->getType()->isReferenceType()) {
  1835. llvm::LoadInst *LI = CGF.Builder.CreateLoad(V);
  1836. LI->setAlignment(Alignment.getQuantity());
  1837. V = LI;
  1838. LV = CGF.MakeNaturalAlignAddrLValue(V, T);
  1839. } else {
  1840. LV = CGF.MakeAddrLValue(V, T, Alignment);
  1841. }
  1842. setObjCGCLValueClass(CGF.getContext(), E, LV);
  1843. return LV;
  1844. }
  1845. static LValue EmitFunctionDeclLValue(CodeGenFunction &CGF,
  1846. const Expr *E, const FunctionDecl *FD) {
  1847. llvm::Value *V = CGF.CGM.GetAddrOfFunction(FD);
  1848. if (!FD->hasPrototype()) {
  1849. if (const FunctionProtoType *Proto =
  1850. FD->getType()->getAs<FunctionProtoType>()) {
  1851. // Ugly case: for a K&R-style definition, the type of the definition
  1852. // isn't the same as the type of a use. Correct for this with a
  1853. // bitcast.
  1854. QualType NoProtoType =
  1855. CGF.getContext().getFunctionNoProtoType(Proto->getReturnType());
  1856. NoProtoType = CGF.getContext().getPointerType(NoProtoType);
  1857. V = CGF.Builder.CreateBitCast(V, CGF.ConvertType(NoProtoType));
  1858. }
  1859. }
  1860. CharUnits Alignment = CGF.getContext().getDeclAlign(FD);
  1861. return CGF.MakeAddrLValue(V, E->getType(), Alignment);
  1862. }
  1863. static LValue EmitCapturedFieldLValue(CodeGenFunction &CGF, const FieldDecl *FD,
  1864. llvm::Value *ThisValue) {
  1865. QualType TagType = CGF.getContext().getTagDeclType(FD->getParent());
  1866. LValue LV = CGF.MakeNaturalAlignAddrLValue(ThisValue, TagType);
  1867. return CGF.EmitLValueForField(LV, FD);
  1868. }
  1869. /// Named Registers are named metadata pointing to the register name
  1870. /// which will be read from/written to as an argument to the intrinsic
  1871. /// @llvm.read/write_register.
  1872. /// So far, only the name is being passed down, but other options such as
  1873. /// register type, allocation type or even optimization options could be
  1874. /// passed down via the metadata node.
  1875. static LValue EmitGlobalNamedRegister(const VarDecl *VD,
  1876. CodeGenModule &CGM,
  1877. CharUnits Alignment) {
  1878. SmallString<64> Name("llvm.named.register.");
  1879. AsmLabelAttr *Asm = VD->getAttr<AsmLabelAttr>();
  1880. assert(Asm->getLabel().size() < 64-Name.size() &&
  1881. "Register name too big");
  1882. Name.append(Asm->getLabel());
  1883. llvm::NamedMDNode *M =
  1884. CGM.getModule().getOrInsertNamedMetadata(Name);
  1885. if (M->getNumOperands() == 0) {
  1886. llvm::MDString *Str = llvm::MDString::get(CGM.getLLVMContext(),
  1887. Asm->getLabel());
  1888. llvm::Metadata *Ops[] = {Str};
  1889. M->addOperand(llvm::MDNode::get(CGM.getLLVMContext(), Ops));
  1890. }
  1891. return LValue::MakeGlobalReg(
  1892. llvm::MetadataAsValue::get(CGM.getLLVMContext(), M->getOperand(0)),
  1893. VD->getType(), Alignment);
  1894. }
  1895. LValue CodeGenFunction::EmitDeclRefLValue(const DeclRefExpr *E) {
  1896. const NamedDecl *ND = E->getDecl();
  1897. CharUnits Alignment = getContext().getDeclAlign(ND);
  1898. QualType T = E->getType();
  1899. if (const auto *VD = dyn_cast<VarDecl>(ND)) {
  1900. // Global Named registers access via intrinsics only
  1901. if (VD->getStorageClass() == SC_Register &&
  1902. VD->hasAttr<AsmLabelAttr>() && !VD->isLocalVarDecl())
  1903. return EmitGlobalNamedRegister(VD, CGM, Alignment);
  1904. // A DeclRefExpr for a reference initialized by a constant expression can
  1905. // appear without being odr-used. Directly emit the constant initializer.
  1906. const Expr *Init = VD->getAnyInitializer(VD);
  1907. if (Init && !isa<ParmVarDecl>(VD) && VD->getType()->isReferenceType() &&
  1908. VD->isUsableInConstantExpressions(getContext()) &&
  1909. VD->checkInitIsICE()) {
  1910. llvm::Constant *Val =
  1911. CGM.EmitConstantValue(*VD->evaluateValue(), VD->getType(), this);
  1912. assert(Val && "failed to emit reference constant expression");
  1913. // FIXME: Eventually we will want to emit vector element references.
  1914. return MakeAddrLValue(Val, T, Alignment);
  1915. }
  1916. // Check for captured variables.
  1917. if (E->refersToEnclosingVariableOrCapture()) {
  1918. if (auto *FD = LambdaCaptureFields.lookup(VD))
  1919. return EmitCapturedFieldLValue(*this, FD, CXXABIThisValue);
  1920. else if (CapturedStmtInfo) {
  1921. if (auto *V = LocalDeclMap.lookup(VD))
  1922. return MakeAddrLValue(V, T, Alignment);
  1923. else
  1924. return EmitCapturedFieldLValue(*this, CapturedStmtInfo->lookup(VD),
  1925. CapturedStmtInfo->getContextValue());
  1926. }
  1927. assert(isa<BlockDecl>(CurCodeDecl));
  1928. return MakeAddrLValue(GetAddrOfBlockDecl(VD, VD->hasAttr<BlocksAttr>()),
  1929. T, Alignment);
  1930. }
  1931. }
  1932. // FIXME: We should be able to assert this for FunctionDecls as well!
  1933. // FIXME: We should be able to assert this for all DeclRefExprs, not just
  1934. // those with a valid source location.
  1935. assert((ND->isUsed(false) || !isa<VarDecl>(ND) ||
  1936. !E->getLocation().isValid()) &&
  1937. "Should not use decl without marking it used!");
  1938. if (ND->hasAttr<WeakRefAttr>()) {
  1939. const auto *VD = cast<ValueDecl>(ND);
  1940. llvm::Constant *Aliasee = CGM.GetWeakRefReference(VD);
  1941. return MakeAddrLValue(Aliasee, T, Alignment);
  1942. }
  1943. if (const auto *VD = dyn_cast<VarDecl>(ND)) {
  1944. // Check if this is a global variable.
  1945. if (VD->hasLinkage() || VD->isStaticDataMember())
  1946. return EmitGlobalVarDeclLValue(*this, E, VD);
  1947. bool isBlockVariable = VD->hasAttr<BlocksAttr>();
  1948. llvm::Value *V = LocalDeclMap.lookup(VD);
  1949. if (!V && VD->isStaticLocal())
  1950. V = CGM.getOrCreateStaticVarDecl(
  1951. *VD, CGM.getLLVMLinkageVarDefinition(VD, /*isConstant=*/false));
  1952. // Check if variable is threadprivate.
  1953. if (V && getLangOpts().OpenMP && VD->hasAttr<OMPThreadPrivateDeclAttr>())
  1954. return EmitThreadPrivateVarDeclLValue(
  1955. *this, VD, T, V, getTypes().ConvertTypeForMem(VD->getType()),
  1956. Alignment, E->getExprLoc());
  1957. assert(V && "DeclRefExpr not entered in LocalDeclMap?");
  1958. if (isBlockVariable)
  1959. V = BuildBlockByrefAddress(V, VD);
  1960. LValue LV;
  1961. // HLSL Change Begins
  1962. if (getLangOpts().HLSL) {
  1963. // In hlsl, the referent type is for out parameter.
  1964. // No pointer of pointer temp alloca created for it.
  1965. // So always use V directly.
  1966. LV = MakeAddrLValue(V, T, Alignment);
  1967. } else
  1968. // HLSL Change Ends
  1969. if (VD->getType()->isReferenceType()) {
  1970. llvm::LoadInst *LI = Builder.CreateLoad(V);
  1971. LI->setAlignment(Alignment.getQuantity());
  1972. V = LI;
  1973. LV = MakeNaturalAlignAddrLValue(V, T);
  1974. } else {
  1975. LV = MakeAddrLValue(V, T, Alignment);
  1976. }
  1977. bool isLocalStorage = VD->hasLocalStorage();
  1978. bool NonGCable = isLocalStorage &&
  1979. !VD->getType()->isReferenceType() &&
  1980. !isBlockVariable;
  1981. if (NonGCable) {
  1982. LV.getQuals().removeObjCGCAttr();
  1983. LV.setNonGC(true);
  1984. }
  1985. bool isImpreciseLifetime =
  1986. (isLocalStorage && !VD->hasAttr<ObjCPreciseLifetimeAttr>());
  1987. if (isImpreciseLifetime)
  1988. LV.setARCPreciseLifetime(ARCImpreciseLifetime);
  1989. setObjCGCLValueClass(getContext(), E, LV);
  1990. return LV;
  1991. }
  1992. if (const auto *FD = dyn_cast<FunctionDecl>(ND))
  1993. return EmitFunctionDeclLValue(*this, E, FD);
  1994. llvm_unreachable("Unhandled DeclRefExpr");
  1995. }
  1996. LValue CodeGenFunction::EmitUnaryOpLValue(const UnaryOperator *E) {
  1997. // __extension__ doesn't affect lvalue-ness.
  1998. if (E->getOpcode() == UO_Extension)
  1999. return EmitLValue(E->getSubExpr());
  2000. QualType ExprTy = getContext().getCanonicalType(E->getSubExpr()->getType());
  2001. switch (E->getOpcode()) {
  2002. default: llvm_unreachable("Unknown unary operator lvalue!");
  2003. case UO_Deref: {
  2004. QualType T = E->getSubExpr()->getType()->getPointeeType();
  2005. assert(!T.isNull() && "CodeGenFunction::EmitUnaryOpLValue: Illegal type");
  2006. LValue LV = MakeNaturalAlignAddrLValue(EmitScalarExpr(E->getSubExpr()), T);
  2007. LV.getQuals().setAddressSpace(ExprTy.getAddressSpace());
  2008. // We should not generate __weak write barrier on indirect reference
  2009. // of a pointer to object; as in void foo (__weak id *param); *param = 0;
  2010. // But, we continue to generate __strong write barrier on indirect write
  2011. // into a pointer to object.
  2012. if (getLangOpts().ObjC1 &&
  2013. getLangOpts().getGC() != LangOptions::NonGC &&
  2014. LV.isObjCWeak())
  2015. LV.setNonGC(!E->isOBJCGCCandidate(getContext()));
  2016. return LV;
  2017. }
  2018. case UO_Real:
  2019. case UO_Imag: {
  2020. LValue LV = EmitLValue(E->getSubExpr());
  2021. assert(LV.isSimple() && "real/imag on non-ordinary l-value");
  2022. llvm::Value *Addr = LV.getAddress();
  2023. // __real is valid on scalars. This is a faster way of testing that.
  2024. // __imag can only produce an rvalue on scalars.
  2025. if (E->getOpcode() == UO_Real &&
  2026. !cast<llvm::PointerType>(Addr->getType())
  2027. ->getElementType()->isStructTy()) {
  2028. assert(E->getSubExpr()->getType()->isArithmeticType());
  2029. return LV;
  2030. }
  2031. assert(E->getSubExpr()->getType()->isAnyComplexType());
  2032. unsigned Idx = E->getOpcode() == UO_Imag;
  2033. return MakeAddrLValue(
  2034. Builder.CreateStructGEP(nullptr, LV.getAddress(), Idx, "idx"), ExprTy);
  2035. }
  2036. case UO_PreInc:
  2037. case UO_PreDec: {
  2038. LValue LV = EmitLValue(E->getSubExpr());
  2039. bool isInc = E->getOpcode() == UO_PreInc;
  2040. if (E->getType()->isAnyComplexType())
  2041. EmitComplexPrePostIncDec(E, LV, isInc, true/*isPre*/);
  2042. else
  2043. EmitScalarPrePostIncDec(E, LV, isInc, true/*isPre*/);
  2044. return LV;
  2045. }
  2046. }
  2047. }
  2048. LValue CodeGenFunction::EmitStringLiteralLValue(const StringLiteral *E) {
  2049. return MakeAddrLValue(CGM.GetAddrOfConstantStringFromLiteral(E),
  2050. E->getType());
  2051. }
  2052. LValue CodeGenFunction::EmitObjCEncodeExprLValue(const ObjCEncodeExpr *E) {
  2053. return MakeAddrLValue(CGM.GetAddrOfConstantStringFromObjCEncode(E),
  2054. E->getType());
  2055. }
  2056. LValue CodeGenFunction::EmitPredefinedLValue(const PredefinedExpr *E) {
  2057. auto SL = E->getFunctionName();
  2058. assert(SL != nullptr && "No StringLiteral name in PredefinedExpr");
  2059. StringRef FnName = CurFn->getName();
  2060. if (FnName.startswith("\01"))
  2061. FnName = FnName.substr(1);
  2062. StringRef NameItems[] = {
  2063. PredefinedExpr::getIdentTypeName(E->getIdentType()), FnName};
  2064. std::string GVName = llvm::join(NameItems, NameItems + 2, ".");
  2065. if (CurCodeDecl && isa<BlockDecl>(CurCodeDecl)) {
  2066. auto C = CGM.GetAddrOfConstantCString(FnName, GVName.c_str(), 1);
  2067. return MakeAddrLValue(C, E->getType());
  2068. }
  2069. auto C = CGM.GetAddrOfConstantStringFromLiteral(SL, GVName);
  2070. return MakeAddrLValue(C, E->getType());
  2071. }
  2072. /// Emit a type description suitable for use by a runtime sanitizer library. The
  2073. /// format of a type descriptor is
  2074. ///
  2075. /// \code
  2076. /// { i16 TypeKind, i16 TypeInfo }
  2077. /// \endcode
  2078. ///
  2079. /// followed by an array of i8 containing the type name. TypeKind is 0 for an
  2080. /// integer, 1 for a floating point value, and -1 for anything else.
  2081. llvm::Constant *CodeGenFunction::EmitCheckTypeDescriptor(QualType T) {
  2082. // Only emit each type's descriptor once.
  2083. if (llvm::Constant *C = CGM.getTypeDescriptorFromMap(T))
  2084. return C;
  2085. uint16_t TypeKind = -1;
  2086. uint16_t TypeInfo = 0;
  2087. if (T->isIntegerType()) {
  2088. TypeKind = 0;
  2089. TypeInfo = (llvm::Log2_32(getContext().getTypeSize(T)) << 1) |
  2090. (T->isSignedIntegerType() ? 1 : 0);
  2091. } else if (T->isFloatingType()) {
  2092. TypeKind = 1;
  2093. TypeInfo = getContext().getTypeSize(T);
  2094. }
  2095. // Format the type name as if for a diagnostic, including quotes and
  2096. // optionally an 'aka'.
  2097. SmallString<32> Buffer;
  2098. CGM.getDiags().ConvertArgToString(DiagnosticsEngine::ak_qualtype,
  2099. (intptr_t)T.getAsOpaquePtr(),
  2100. StringRef(), StringRef(), None, Buffer,
  2101. None);
  2102. llvm::Constant *Components[] = {
  2103. Builder.getInt16(TypeKind), Builder.getInt16(TypeInfo),
  2104. llvm::ConstantDataArray::getString(getLLVMContext(), Buffer)
  2105. };
  2106. llvm::Constant *Descriptor = llvm::ConstantStruct::getAnon(Components);
  2107. auto *GV = new llvm::GlobalVariable(
  2108. CGM.getModule(), Descriptor->getType(),
  2109. /*isConstant=*/true, llvm::GlobalVariable::PrivateLinkage, Descriptor);
  2110. GV->setUnnamedAddr(true);
  2111. CGM.getSanitizerMetadata()->disableSanitizerForGlobal(GV);
  2112. // Remember the descriptor for this type.
  2113. CGM.setTypeDescriptorInMap(T, GV);
  2114. return GV;
  2115. }
  2116. llvm::Value *CodeGenFunction::EmitCheckValue(llvm::Value *V) {
  2117. llvm::Type *TargetTy = IntPtrTy;
  2118. // Floating-point types which fit into intptr_t are bitcast to integers
  2119. // and then passed directly (after zero-extension, if necessary).
  2120. if (V->getType()->isFloatingPointTy()) {
  2121. unsigned Bits = V->getType()->getPrimitiveSizeInBits();
  2122. if (Bits <= TargetTy->getIntegerBitWidth())
  2123. V = Builder.CreateBitCast(V, llvm::Type::getIntNTy(getLLVMContext(),
  2124. Bits));
  2125. }
  2126. // Integers which fit in intptr_t are zero-extended and passed directly.
  2127. if (V->getType()->isIntegerTy() &&
  2128. V->getType()->getIntegerBitWidth() <= TargetTy->getIntegerBitWidth())
  2129. return Builder.CreateZExt(V, TargetTy);
  2130. // Pointers are passed directly, everything else is passed by address.
  2131. if (!V->getType()->isPointerTy()) {
  2132. llvm::Value *Ptr = CreateTempAlloca(V->getType());
  2133. Builder.CreateStore(V, Ptr);
  2134. V = Ptr;
  2135. }
  2136. return Builder.CreatePtrToInt(V, TargetTy);
  2137. }
  2138. /// \brief Emit a representation of a SourceLocation for passing to a handler
  2139. /// in a sanitizer runtime library. The format for this data is:
  2140. /// \code
  2141. /// struct SourceLocation {
  2142. /// const char *Filename;
  2143. /// int32_t Line, Column;
  2144. /// };
  2145. /// \endcode
  2146. /// For an invalid SourceLocation, the Filename pointer is null.
  2147. llvm::Constant *CodeGenFunction::EmitCheckSourceLocation(SourceLocation Loc) {
  2148. llvm::Constant *Filename;
  2149. int Line, Column;
  2150. PresumedLoc PLoc = getContext().getSourceManager().getPresumedLoc(Loc);
  2151. if (PLoc.isValid()) {
  2152. auto FilenameGV = CGM.GetAddrOfConstantCString(PLoc.getFilename(), ".src");
  2153. CGM.getSanitizerMetadata()->disableSanitizerForGlobal(FilenameGV);
  2154. Filename = FilenameGV;
  2155. Line = PLoc.getLine();
  2156. Column = PLoc.getColumn();
  2157. } else {
  2158. Filename = llvm::Constant::getNullValue(Int8PtrTy);
  2159. Line = Column = 0;
  2160. }
  2161. llvm::Constant *Data[] = {Filename, Builder.getInt32(Line),
  2162. Builder.getInt32(Column)};
  2163. return llvm::ConstantStruct::getAnon(Data);
  2164. }
  2165. namespace {
  2166. /// \brief Specify under what conditions this check can be recovered
  2167. enum class CheckRecoverableKind {
  2168. /// Always terminate program execution if this check fails.
  2169. Unrecoverable,
  2170. /// Check supports recovering, runtime has both fatal (noreturn) and
  2171. /// non-fatal handlers for this check.
  2172. Recoverable,
  2173. /// Runtime conditionally aborts, always need to support recovery.
  2174. AlwaysRecoverable
  2175. };
  2176. }
  2177. static CheckRecoverableKind getRecoverableKind(SanitizerMask Kind) {
  2178. assert(llvm::countPopulation(Kind) == 1);
  2179. switch (Kind) {
  2180. case SanitizerKind::Vptr:
  2181. return CheckRecoverableKind::AlwaysRecoverable;
  2182. case SanitizerKind::Return:
  2183. case SanitizerKind::Unreachable:
  2184. return CheckRecoverableKind::Unrecoverable;
  2185. default:
  2186. return CheckRecoverableKind::Recoverable;
  2187. }
  2188. }
  2189. static void emitCheckHandlerCall(CodeGenFunction &CGF,
  2190. llvm::FunctionType *FnType,
  2191. ArrayRef<llvm::Value *> FnArgs,
  2192. StringRef CheckName,
  2193. CheckRecoverableKind RecoverKind, bool IsFatal,
  2194. llvm::BasicBlock *ContBB) {
  2195. assert(IsFatal || RecoverKind != CheckRecoverableKind::Unrecoverable);
  2196. bool NeedsAbortSuffix =
  2197. IsFatal && RecoverKind != CheckRecoverableKind::Unrecoverable;
  2198. std::string FnName = ("__ubsan_handle_" + CheckName +
  2199. (NeedsAbortSuffix ? "_abort" : "")).str();
  2200. bool MayReturn =
  2201. !IsFatal || RecoverKind == CheckRecoverableKind::AlwaysRecoverable;
  2202. llvm::AttrBuilder B;
  2203. if (!MayReturn) {
  2204. B.addAttribute(llvm::Attribute::NoReturn)
  2205. .addAttribute(llvm::Attribute::NoUnwind);
  2206. }
  2207. B.addAttribute(llvm::Attribute::UWTable);
  2208. llvm::Value *Fn = CGF.CGM.CreateRuntimeFunction(
  2209. FnType, FnName,
  2210. llvm::AttributeSet::get(CGF.getLLVMContext(),
  2211. llvm::AttributeSet::FunctionIndex, B));
  2212. llvm::CallInst *HandlerCall = CGF.EmitNounwindRuntimeCall(Fn, FnArgs);
  2213. if (!MayReturn) {
  2214. HandlerCall->setDoesNotReturn();
  2215. CGF.Builder.CreateUnreachable();
  2216. } else {
  2217. CGF.Builder.CreateBr(ContBB);
  2218. }
  2219. }
  2220. void CodeGenFunction::EmitCheck(
  2221. ArrayRef<std::pair<llvm::Value *, SanitizerMask>> Checked,
  2222. StringRef CheckName, ArrayRef<llvm::Constant *> StaticArgs,
  2223. ArrayRef<llvm::Value *> DynamicArgs) {
  2224. assert(IsSanitizerScope);
  2225. assert(Checked.size() > 0);
  2226. llvm::Value *FatalCond = nullptr;
  2227. llvm::Value *RecoverableCond = nullptr;
  2228. llvm::Value *TrapCond = nullptr;
  2229. for (int i = 0, n = Checked.size(); i < n; ++i) {
  2230. llvm::Value *Check = Checked[i].first;
  2231. // -fsanitize-trap= overrides -fsanitize-recover=.
  2232. llvm::Value *&Cond =
  2233. CGM.getCodeGenOpts().SanitizeTrap.has(Checked[i].second)
  2234. ? TrapCond
  2235. : CGM.getCodeGenOpts().SanitizeRecover.has(Checked[i].second)
  2236. ? RecoverableCond
  2237. : FatalCond;
  2238. Cond = Cond ? Builder.CreateAnd(Cond, Check) : Check;
  2239. }
  2240. if (TrapCond)
  2241. EmitTrapCheck(TrapCond);
  2242. if (!FatalCond && !RecoverableCond)
  2243. return;
  2244. llvm::Value *JointCond;
  2245. if (FatalCond && RecoverableCond)
  2246. JointCond = Builder.CreateAnd(FatalCond, RecoverableCond);
  2247. else
  2248. JointCond = FatalCond ? FatalCond : RecoverableCond;
  2249. assert(JointCond);
  2250. CheckRecoverableKind RecoverKind = getRecoverableKind(Checked[0].second);
  2251. assert(SanOpts.has(Checked[0].second));
  2252. #ifndef NDEBUG
  2253. for (int i = 1, n = Checked.size(); i < n; ++i) {
  2254. assert(RecoverKind == getRecoverableKind(Checked[i].second) &&
  2255. "All recoverable kinds in a single check must be same!");
  2256. assert(SanOpts.has(Checked[i].second));
  2257. }
  2258. #endif
  2259. llvm::BasicBlock *Cont = createBasicBlock("cont");
  2260. llvm::BasicBlock *Handlers = createBasicBlock("handler." + CheckName);
  2261. llvm::Instruction *Branch = Builder.CreateCondBr(JointCond, Cont, Handlers);
  2262. // Give hint that we very much don't expect to execute the handler
  2263. // Value chosen to match UR_NONTAKEN_WEIGHT, see BranchProbabilityInfo.cpp
  2264. llvm::MDBuilder MDHelper(getLLVMContext());
  2265. llvm::MDNode *Node = MDHelper.createBranchWeights((1U << 20) - 1, 1);
  2266. Branch->setMetadata(llvm::LLVMContext::MD_prof, Node);
  2267. EmitBlock(Handlers);
  2268. // Emit handler arguments and create handler function type.
  2269. llvm::Constant *Info = llvm::ConstantStruct::getAnon(StaticArgs);
  2270. auto *InfoPtr =
  2271. new llvm::GlobalVariable(CGM.getModule(), Info->getType(), false,
  2272. llvm::GlobalVariable::PrivateLinkage, Info);
  2273. InfoPtr->setUnnamedAddr(true);
  2274. CGM.getSanitizerMetadata()->disableSanitizerForGlobal(InfoPtr);
  2275. SmallVector<llvm::Value *, 4> Args;
  2276. SmallVector<llvm::Type *, 4> ArgTypes;
  2277. Args.reserve(DynamicArgs.size() + 1);
  2278. ArgTypes.reserve(DynamicArgs.size() + 1);
  2279. // Handler functions take an i8* pointing to the (handler-specific) static
  2280. // information block, followed by a sequence of intptr_t arguments
  2281. // representing operand values.
  2282. Args.push_back(Builder.CreateBitCast(InfoPtr, Int8PtrTy));
  2283. ArgTypes.push_back(Int8PtrTy);
  2284. for (size_t i = 0, n = DynamicArgs.size(); i != n; ++i) {
  2285. Args.push_back(EmitCheckValue(DynamicArgs[i]));
  2286. ArgTypes.push_back(IntPtrTy);
  2287. }
  2288. llvm::FunctionType *FnType =
  2289. llvm::FunctionType::get(CGM.VoidTy, ArgTypes, false);
  2290. if (!FatalCond || !RecoverableCond) {
  2291. // Simple case: we need to generate a single handler call, either
  2292. // fatal, or non-fatal.
  2293. emitCheckHandlerCall(*this, FnType, Args, CheckName, RecoverKind,
  2294. (FatalCond != nullptr), Cont);
  2295. } else {
  2296. // Emit two handler calls: first one for set of unrecoverable checks,
  2297. // another one for recoverable.
  2298. llvm::BasicBlock *NonFatalHandlerBB =
  2299. createBasicBlock("non_fatal." + CheckName);
  2300. llvm::BasicBlock *FatalHandlerBB = createBasicBlock("fatal." + CheckName);
  2301. Builder.CreateCondBr(FatalCond, NonFatalHandlerBB, FatalHandlerBB);
  2302. EmitBlock(FatalHandlerBB);
  2303. emitCheckHandlerCall(*this, FnType, Args, CheckName, RecoverKind, true,
  2304. NonFatalHandlerBB);
  2305. EmitBlock(NonFatalHandlerBB);
  2306. emitCheckHandlerCall(*this, FnType, Args, CheckName, RecoverKind, false,
  2307. Cont);
  2308. }
  2309. EmitBlock(Cont);
  2310. }
  2311. void CodeGenFunction::EmitTrapCheck(llvm::Value *Checked) {
  2312. llvm::BasicBlock *Cont = createBasicBlock("cont");
  2313. // If we're optimizing, collapse all calls to trap down to just one per
  2314. // function to save on code size.
  2315. if (!CGM.getCodeGenOpts().OptimizationLevel || !TrapBB) {
  2316. TrapBB = createBasicBlock("trap");
  2317. Builder.CreateCondBr(Checked, Cont, TrapBB);
  2318. EmitBlock(TrapBB);
  2319. llvm::CallInst *TrapCall = EmitTrapCall(llvm::Intrinsic::trap);
  2320. TrapCall->setDoesNotReturn();
  2321. TrapCall->setDoesNotThrow();
  2322. Builder.CreateUnreachable();
  2323. } else {
  2324. Builder.CreateCondBr(Checked, Cont, TrapBB);
  2325. }
  2326. EmitBlock(Cont);
  2327. }
  2328. llvm::CallInst *CodeGenFunction::EmitTrapCall(llvm::Intrinsic::ID IntrID) {
  2329. llvm::CallInst *TrapCall = Builder.CreateCall(CGM.getIntrinsic(IntrID));
  2330. if (!CGM.getCodeGenOpts().TrapFuncName.empty())
  2331. TrapCall->addAttribute(llvm::AttributeSet::FunctionIndex,
  2332. "trap-func-name",
  2333. CGM.getCodeGenOpts().TrapFuncName);
  2334. return TrapCall;
  2335. }
  2336. /// isSimpleArrayDecayOperand - If the specified expr is a simple decay from an
  2337. /// array to pointer, return the array subexpression.
  2338. static const Expr *isSimpleArrayDecayOperand(const Expr *E) {
  2339. // If this isn't just an array->pointer decay, bail out.
  2340. const auto *CE = dyn_cast<CastExpr>(E);
  2341. if (!CE || CE->getCastKind() != CK_ArrayToPointerDecay)
  2342. return nullptr;
  2343. // If this is a decay from variable width array, bail out.
  2344. const Expr *SubExpr = CE->getSubExpr();
  2345. if (SubExpr->getType()->isVariableArrayType())
  2346. return nullptr;
  2347. return SubExpr;
  2348. }
  2349. LValue CodeGenFunction::EmitArraySubscriptExpr(const ArraySubscriptExpr *E,
  2350. bool Accessed) {
  2351. // The index must always be an integer, which is not an aggregate. Emit it.
  2352. llvm::Value *Idx = EmitScalarExpr(E->getIdx());
  2353. QualType IdxTy = E->getIdx()->getType();
  2354. bool IdxSigned = IdxTy->isSignedIntegerOrEnumerationType();
  2355. if (SanOpts.has(SanitizerKind::ArrayBounds))
  2356. EmitBoundsCheck(E, E->getBase(), Idx, IdxTy, Accessed);
  2357. // If the base is a vector type, then we are forming a vector element lvalue
  2358. // with this subscript.
  2359. if (E->getBase()->getType()->isVectorType() &&
  2360. !isa<ExtVectorElementExpr>(E->getBase())) {
  2361. // Emit the vector as an lvalue to get its address.
  2362. LValue LHS = EmitLValue(E->getBase());
  2363. assert(LHS.isSimple() && "Can only subscript lvalue vectors here!");
  2364. return LValue::MakeVectorElt(LHS.getAddress(), Idx,
  2365. E->getBase()->getType(), LHS.getAlignment());
  2366. }
  2367. // Extend or truncate the index type to 32 or 64-bits.
  2368. if (Idx->getType() != IntPtrTy)
  2369. Idx = Builder.CreateIntCast(Idx, IntPtrTy, IdxSigned, "idxprom");
  2370. // HLSL Change Starts
  2371. const Expr *Array = isSimpleArrayDecayOperand(E->getBase());
  2372. assert((!getLangOpts().HLSL || nullptr == Array) &&
  2373. "else array decay snuck in AST for HLSL");
  2374. // HLSL Change Ends
  2375. // We know that the pointer points to a type of the correct size, unless the
  2376. // size is a VLA or Objective-C interface.
  2377. llvm::Value *Address = nullptr;
  2378. CharUnits ArrayAlignment;
  2379. if (isa<ExtVectorElementExpr>(E->getBase())) {
  2380. LValue LV = EmitLValue(E->getBase());
  2381. Address = EmitExtVectorElementLValue(LV);
  2382. Address = Builder.CreateInBoundsGEP(Address, Idx, "arrayidx");
  2383. const VectorType *ExprVT = LV.getType()->getAs<VectorType>();
  2384. QualType EQT = ExprVT->getElementType();
  2385. return MakeAddrLValue(Address, EQT,
  2386. getContext().getTypeAlignInChars(EQT));
  2387. }
  2388. else if (const VariableArrayType *vla =
  2389. getContext().getAsVariableArrayType(E->getType())) {
  2390. // The base must be a pointer, which is not an aggregate. Emit
  2391. // it. It needs to be emitted first in case it's what captures
  2392. // the VLA bounds.
  2393. Address = EmitScalarExpr(E->getBase());
  2394. // The element count here is the total number of non-VLA elements.
  2395. llvm::Value *numElements = getVLASize(vla).first;
  2396. // Effectively, the multiply by the VLA size is part of the GEP.
  2397. // GEP indexes are signed, and scaling an index isn't permitted to
  2398. // signed-overflow, so we use the same semantics for our explicit
  2399. // multiply. We suppress this if overflow is not undefined behavior.
  2400. if (getLangOpts().isSignedOverflowDefined()) {
  2401. Idx = Builder.CreateMul(Idx, numElements);
  2402. Address = Builder.CreateGEP(Address, Idx, "arrayidx");
  2403. } else {
  2404. Idx = Builder.CreateNSWMul(Idx, numElements);
  2405. Address = Builder.CreateInBoundsGEP(Address, Idx, "arrayidx");
  2406. }
  2407. } else if (const ObjCObjectType *OIT = E->getType()->getAs<ObjCObjectType>()){
  2408. // Indexing over an interface, as in "NSString *P; P[4];"
  2409. llvm::Value *InterfaceSize =
  2410. llvm::ConstantInt::get(Idx->getType(),
  2411. getContext().getTypeSizeInChars(OIT).getQuantity());
  2412. Idx = Builder.CreateMul(Idx, InterfaceSize);
  2413. // The base must be a pointer, which is not an aggregate. Emit it.
  2414. llvm::Value *Base = EmitScalarExpr(E->getBase());
  2415. Address = EmitCastToVoidPtr(Base);
  2416. Address = Builder.CreateGEP(Address, Idx, "arrayidx");
  2417. Address = Builder.CreateBitCast(Address, Base->getType());
  2418. } else if (!getLangOpts().HLSL && Array) { // HLSL Change - No Array to pointer decay for HLSL
  2419. // If this is A[i] where A is an array, the frontend will have decayed the
  2420. // base to be a ArrayToPointerDecay implicit cast. While correct, it is
  2421. // inefficient at -O0 to emit a "gep A, 0, 0" when codegen'ing it, then a
  2422. // "gep x, i" here. Emit one "gep A, 0, i".
  2423. assert(Array->getType()->isArrayType() &&
  2424. "Array to pointer decay must have array source type!");
  2425. LValue ArrayLV;
  2426. // For simple multidimensional array indexing, set the 'accessed' flag for
  2427. // better bounds-checking of the base expression.
  2428. if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Array))
  2429. ArrayLV = EmitArraySubscriptExpr(ASE, /*Accessed*/ true);
  2430. else
  2431. ArrayLV = EmitLValue(Array);
  2432. llvm::Value *ArrayPtr = ArrayLV.getAddress();
  2433. llvm::Value *Zero = llvm::ConstantInt::get(Int32Ty, 0);
  2434. llvm::Value *Args[] = { Zero, Idx };
  2435. // Propagate the alignment from the array itself to the result.
  2436. ArrayAlignment = ArrayLV.getAlignment();
  2437. if (getLangOpts().isSignedOverflowDefined())
  2438. Address = Builder.CreateGEP(ArrayPtr, Args, "arrayidx");
  2439. else
  2440. Address = Builder.CreateInBoundsGEP(ArrayPtr, Args, "arrayidx");
  2441. } else {
  2442. // HLSL Change Starts
  2443. const ArrayType *AT = dyn_cast<ArrayType>(E->getBase()->getType()->getCanonicalTypeUnqualified());
  2444. if (getContext().getLangOpts().HLSL && AT) {
  2445. LValue ArrayLV;
  2446. // For simple multidimensional array indexing, set the 'accessed' flag for
  2447. // better bounds-checking of the base expression.
  2448. if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(E->getBase()))
  2449. ArrayLV = EmitArraySubscriptExpr(ASE, /*Accessed*/ true);
  2450. else
  2451. ArrayLV = EmitLValue(E->getBase());
  2452. llvm::Value *ArrayPtr = ArrayLV.getAddress();
  2453. llvm::Value *Zero = llvm::ConstantInt::get(Int32Ty, 0);
  2454. llvm::Value *Args[] = { Zero, Idx };
  2455. // Propagate the alignment from the array itself to the result.
  2456. ArrayAlignment = ArrayLV.getAlignment();
  2457. if (getLangOpts().isSignedOverflowDefined())
  2458. Address = Builder.CreateGEP(ArrayPtr, Args, "arrayidx");
  2459. else
  2460. Address = Builder.CreateInBoundsGEP(ArrayPtr, Args, "arrayidx");
  2461. } else {
  2462. // HLSL Change Ends
  2463. // The base must be a pointer, which is not an aggregate. Emit it.
  2464. llvm::Value *Base = EmitScalarExpr(E->getBase());
  2465. if (getLangOpts().isSignedOverflowDefined())
  2466. Address = Builder.CreateGEP(Base, Idx, "arrayidx");
  2467. else
  2468. Address = Builder.CreateInBoundsGEP(Base, Idx, "arrayidx");
  2469. } // HLSL Change
  2470. }
  2471. QualType T = E->getBase()->getType()->getPointeeType();
  2472. // HLSL Change Starts
  2473. if (getContext().getLangOpts().HLSL && T.isNull()) {
  2474. T = QualType(E->getBase()->getType()->getArrayElementTypeNoTypeQual(), 0);
  2475. }
  2476. // HLSL Change Ends
  2477. assert(!T.isNull() &&
  2478. "CodeGenFunction::EmitArraySubscriptExpr(): Illegal base type");
  2479. // Limit the alignment to that of the result type.
  2480. LValue LV;
  2481. if (!ArrayAlignment.isZero()) {
  2482. CharUnits Align = getContext().getTypeAlignInChars(T);
  2483. ArrayAlignment = std::min(Align, ArrayAlignment);
  2484. LV = MakeAddrLValue(Address, T, ArrayAlignment);
  2485. } else {
  2486. LV = MakeNaturalAlignAddrLValue(Address, T);
  2487. }
  2488. LV.getQuals().setAddressSpace(E->getBase()->getType().getAddressSpace());
  2489. if (getLangOpts().ObjC1 &&
  2490. getLangOpts().getGC() != LangOptions::NonGC) {
  2491. LV.setNonGC(!E->isOBJCGCCandidate(getContext()));
  2492. setObjCGCLValueClass(getContext(), E, LV);
  2493. }
  2494. return LV;
  2495. }
  2496. static
  2497. llvm::Constant *GenerateConstantVector(CGBuilderTy &Builder,
  2498. SmallVectorImpl<unsigned> &Elts) {
  2499. SmallVector<llvm::Constant*, 4> CElts;
  2500. for (unsigned i = 0, e = Elts.size(); i != e; ++i)
  2501. CElts.push_back(Builder.getInt32(Elts[i]));
  2502. return llvm::ConstantVector::get(CElts);
  2503. }
  2504. LValue CodeGenFunction::
  2505. EmitExtVectorElementExpr(const ExtVectorElementExpr *E) {
  2506. // Emit the base vector as an l-value.
  2507. LValue Base;
  2508. // ExtVectorElementExpr's base can either be a vector or pointer to vector.
  2509. if (E->isArrow()) {
  2510. // If it is a pointer to a vector, emit the address and form an lvalue with
  2511. // it.
  2512. llvm::Value *Ptr = EmitScalarExpr(E->getBase());
  2513. const PointerType *PT = E->getBase()->getType()->getAs<PointerType>();
  2514. Base = MakeAddrLValue(Ptr, PT->getPointeeType());
  2515. Base.getQuals().removeObjCGCAttr();
  2516. } else if (E->getBase()->isGLValue()) {
  2517. // Otherwise, if the base is an lvalue ( as in the case of foo.x.x),
  2518. // emit the base as an lvalue.
  2519. assert(E->getBase()->getType()->isVectorType());
  2520. Base = EmitLValue(E->getBase());
  2521. } else {
  2522. // Otherwise, the base is a normal rvalue (as in (V+V).x), emit it as such.
  2523. assert(E->getBase()->getType()->isVectorType() &&
  2524. "Result must be a vector");
  2525. llvm::Value *Vec = EmitScalarExpr(E->getBase());
  2526. // Store the vector to memory (because LValue wants an address).
  2527. llvm::Value *VecMem = CreateMemTemp(E->getBase()->getType());
  2528. Builder.CreateStore(Vec, VecMem);
  2529. Base = MakeAddrLValue(VecMem, E->getBase()->getType());
  2530. }
  2531. QualType type =
  2532. E->getType().withCVRQualifiers(Base.getQuals().getCVRQualifiers());
  2533. // Encode the element access list into a vector of unsigned indices.
  2534. SmallVector<unsigned, 4> Indices;
  2535. E->getEncodedElementAccess(Indices);
  2536. if (Base.isSimple()) {
  2537. llvm::Constant *CV = GenerateConstantVector(Builder, Indices);
  2538. return LValue::MakeExtVectorElt(Base.getAddress(), CV, type,
  2539. Base.getAlignment());
  2540. }
  2541. assert(Base.isExtVectorElt() && "Can only subscript lvalue vec elts here!");
  2542. llvm::Constant *BaseElts = Base.getExtVectorElts();
  2543. SmallVector<llvm::Constant *, 4> CElts;
  2544. for (unsigned i = 0, e = Indices.size(); i != e; ++i)
  2545. CElts.push_back(BaseElts->getAggregateElement(Indices[i]));
  2546. llvm::Constant *CV = llvm::ConstantVector::get(CElts);
  2547. return LValue::MakeExtVectorElt(Base.getExtVectorAddr(), CV, type,
  2548. Base.getAlignment());
  2549. }
  2550. // HLSL Change Starts
  2551. LValue
  2552. CodeGenFunction::EmitExtMatrixElementExpr(const ExtMatrixElementExpr *E) {
  2553. LValue Base;
  2554. assert(!E->isArrow() && "ExtMatrixElementExpr's base will not be Arrow");
  2555. if (E->getBase()->isGLValue()) {
  2556. // if the base is an lvalue ( as in the case of foo.x.x),
  2557. // emit the base as an lvalue.
  2558. const Expr *base = E->getBase();
  2559. assert(hlsl::IsHLSLMatType(base->getType()));
  2560. Base = EmitLValue(base);
  2561. } else {
  2562. // Otherwise, the base is a normal rvalue (as in (V+V).x), emit it as such.
  2563. assert(hlsl::IsHLSLMatType(E->getBase()->getType()) &&
  2564. "Result must be a vector");
  2565. llvm::Value *Vec = EmitScalarExpr(E->getBase());
  2566. // Store the vector to memory (because LValue wants an address).
  2567. llvm::Value *VecMem = CreateMemTemp(E->getBase()->getType());
  2568. CGM.getHLSLRuntime().EmitHLSLMatrixStore(*this, Vec, VecMem, E->getBase()->getType());
  2569. Base = MakeAddrLValue(VecMem, E->getBase()->getType());
  2570. }
  2571. // Encode the element access list into a vector of unsigned indices.
  2572. SmallVector<unsigned, 4> Indices;
  2573. E->getEncodedElementAccess(Indices);
  2574. llvm::Type *ResultTy =
  2575. ConvertType(getContext().getLValueReferenceType(E->getType()));
  2576. llvm::Value *matBase = nullptr;
  2577. llvm::Constant *CV = nullptr;
  2578. if (Base.isSimple()) {
  2579. SmallVector<llvm::Constant *, 4> CElts;
  2580. for (unsigned i = 0, e = Indices.size(); i != e; ++i)
  2581. CElts.push_back(Builder.getInt32(Indices[i]));
  2582. CV = llvm::ConstantVector::get(CElts);
  2583. matBase = Base.getAddress();
  2584. } else {
  2585. assert(Base.isExtVectorElt() && "Can only subscript lvalue vec elts here!");
  2586. llvm::Constant *BaseElts = Base.getExtVectorElts();
  2587. SmallVector<llvm::Constant *, 4> CElts;
  2588. for (unsigned i = 0, e = Indices.size(); i != e; ++i)
  2589. CElts.push_back(BaseElts->getAggregateElement(Indices[i]));
  2590. CV = llvm::ConstantVector::get(CElts);
  2591. matBase = Base.getExtVectorAddr();
  2592. }
  2593. llvm::Value *Result = CGM.getHLSLRuntime().EmitHLSLMatrixElement(
  2594. *this, ResultTy, {matBase, CV}, E->getBase()->getType());
  2595. return MakeAddrLValue(Result, E->getType());
  2596. }
  2597. LValue
  2598. CodeGenFunction::EmitHLSLVectorElementExpr(const HLSLVectorElementExpr *E) {
  2599. // Emit the base vector as an l-value.
  2600. // Clone EmitExtVectorElementExpr for now
  2601. // TODO: difference between ExtVector and HlslVector
  2602. LValue Base;
  2603. // ExtVectorElementExpr's base can either be a vector or pointer to vector.
  2604. if (E->isArrow()) {
  2605. // If it is a pointer to a vector, emit the address and form an lvalue with
  2606. // it.
  2607. assert(!getLangOpts().HLSL && "this will not happen for hlsl");
  2608. } else if (E->getBase()->isGLValue()) {
  2609. // Otherwise, if the base is an lvalue ( as in the case of foo.x.x),
  2610. // emit the base as an lvalue.
  2611. const Expr *base = E->getBase();
  2612. if (const ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(base)) {
  2613. if (ICE->getCastKind() == CastKind::CK_HLSLVectorSplat &&
  2614. E->getNumElements() == 1) {
  2615. // For pattern like:
  2616. // static bool t;
  2617. // t.x = bool(a);
  2618. // Just ignore the .x, treat it like t = bool(a);
  2619. return EmitLValue(ICE->getSubExpr());
  2620. }
  2621. }
  2622. assert(hlsl::IsHLSLVecType(base->getType()));
  2623. Base = EmitLValue(base);
  2624. } else {
  2625. // Otherwise, the base is a normal rvalue (as in (V+V).x), emit it as such.
  2626. assert(hlsl::IsHLSLVecType(E->getBase()->getType()) &&
  2627. "Result must be a vector");
  2628. llvm::Value *Vec = EmitScalarExpr(E->getBase());
  2629. Vec = EmitToMemory(Vec, E->getBase()->getType());
  2630. // Store the vector to memory (because LValue wants an address).
  2631. llvm::Value *VecMemPtr = CreateMemTemp(E->getBase()->getType());
  2632. Builder.CreateStore(Vec, VecMemPtr);
  2633. Base = MakeAddrLValue(VecMemPtr, E->getBase()->getType());
  2634. }
  2635. QualType type =
  2636. E->getType().withCVRQualifiers(Base.getQuals().getCVRQualifiers());
  2637. // Encode the element access list into a vector of unsigned indices.
  2638. SmallVector<unsigned, 4> Indices;
  2639. E->getEncodedElementAccess(Indices);
  2640. if (Base.isSimple()) {
  2641. llvm::Constant *CV = GenerateConstantVector(Builder, Indices);
  2642. return LValue::MakeExtVectorElt(Base.getAddress(), CV, type,
  2643. Base.getAlignment());
  2644. }
  2645. assert(Base.isExtVectorElt() && "Can only subscript lvalue vec elts here!");
  2646. llvm::Constant *BaseElts = Base.getExtVectorElts();
  2647. SmallVector<llvm::Constant *, 4> CElts;
  2648. for (unsigned i = 0, e = Indices.size(); i != e; ++i)
  2649. CElts.push_back(BaseElts->getAggregateElement(Indices[i]));
  2650. llvm::Constant *CV = llvm::ConstantVector::get(CElts);
  2651. return LValue::MakeExtVectorElt(Base.getExtVectorAddr(), CV, type,
  2652. Base.getAlignment());
  2653. }
  2654. // HLSL Change Ends
  2655. LValue CodeGenFunction::EmitMemberExpr(const MemberExpr *E) {
  2656. Expr *BaseExpr = E->getBase();
  2657. // If this is s.x, emit s as an lvalue. If it is s->x, emit s as a scalar.
  2658. LValue BaseLV;
  2659. if (E->isArrow()) {
  2660. llvm::Value *Ptr = EmitScalarExpr(BaseExpr);
  2661. QualType PtrTy = BaseExpr->getType()->getPointeeType();
  2662. EmitTypeCheck(TCK_MemberAccess, E->getExprLoc(), Ptr, PtrTy);
  2663. BaseLV = MakeNaturalAlignAddrLValue(Ptr, PtrTy);
  2664. } else
  2665. BaseLV = EmitCheckedLValue(BaseExpr, TCK_MemberAccess);
  2666. NamedDecl *ND = E->getMemberDecl();
  2667. if (auto *Field = dyn_cast<FieldDecl>(ND)) {
  2668. LValue LV = EmitLValueForField(BaseLV, Field);
  2669. setObjCGCLValueClass(getContext(), E, LV);
  2670. return LV;
  2671. }
  2672. if (auto *VD = dyn_cast<VarDecl>(ND))
  2673. return EmitGlobalVarDeclLValue(*this, E, VD);
  2674. if (const auto *FD = dyn_cast<FunctionDecl>(ND))
  2675. return EmitFunctionDeclLValue(*this, E, FD);
  2676. llvm_unreachable("Unhandled member declaration!");
  2677. }
  2678. /// Given that we are currently emitting a lambda, emit an l-value for
  2679. /// one of its members.
  2680. LValue CodeGenFunction::EmitLValueForLambdaField(const FieldDecl *Field) {
  2681. assert(cast<CXXMethodDecl>(CurCodeDecl)->getParent()->isLambda());
  2682. assert(cast<CXXMethodDecl>(CurCodeDecl)->getParent() == Field->getParent());
  2683. QualType LambdaTagType =
  2684. getContext().getTagDeclType(Field->getParent());
  2685. LValue LambdaLV = MakeNaturalAlignAddrLValue(CXXABIThisValue, LambdaTagType);
  2686. return EmitLValueForField(LambdaLV, Field);
  2687. }
  2688. LValue CodeGenFunction::EmitLValueForField(LValue base,
  2689. const FieldDecl *field) {
  2690. if (field->isBitField()) {
  2691. const CGRecordLayout &RL =
  2692. CGM.getTypes().getCGRecordLayout(field->getParent());
  2693. const CGBitFieldInfo &Info = RL.getBitFieldInfo(field);
  2694. llvm::Value *Addr = base.getAddress();
  2695. unsigned Idx = RL.getLLVMFieldNo(field);
  2696. if (Idx != 0)
  2697. // For structs, we GEP to the field that the record layout suggests.
  2698. Addr = Builder.CreateStructGEP(nullptr, Addr, Idx, field->getName());
  2699. // Get the access type.
  2700. llvm::Type *PtrTy = llvm::Type::getIntNPtrTy(
  2701. getLLVMContext(), Info.StorageSize,
  2702. CGM.getContext().getTargetAddressSpace(base.getType()));
  2703. if (Addr->getType() != PtrTy)
  2704. Addr = Builder.CreateBitCast(Addr, PtrTy);
  2705. QualType fieldType =
  2706. field->getType().withCVRQualifiers(base.getVRQualifiers());
  2707. return LValue::MakeBitfield(Addr, Info, fieldType, base.getAlignment());
  2708. }
  2709. const RecordDecl *rec = field->getParent();
  2710. QualType type = field->getType();
  2711. CharUnits alignment = getContext().getDeclAlign(field);
  2712. // FIXME: It should be impossible to have an LValue without alignment for a
  2713. // complete type.
  2714. if (!base.getAlignment().isZero())
  2715. alignment = std::min(alignment, base.getAlignment());
  2716. bool mayAlias = rec->hasAttr<MayAliasAttr>();
  2717. llvm::Value *addr = base.getAddress();
  2718. unsigned cvr = base.getVRQualifiers();
  2719. bool TBAAPath = CGM.getCodeGenOpts().StructPathTBAA;
  2720. if (rec->isUnion()) {
  2721. // For unions, there is no pointer adjustment.
  2722. assert(!type->isReferenceType() && "union has reference member");
  2723. // TODO: handle path-aware TBAA for union.
  2724. TBAAPath = false;
  2725. } else {
  2726. // For structs, we GEP to the field that the record layout suggests.
  2727. unsigned idx = CGM.getTypes().getCGRecordLayout(rec).getLLVMFieldNo(field);
  2728. addr = Builder.CreateStructGEP(nullptr, addr, idx, field->getName());
  2729. // If this is a reference field, load the reference right now.
  2730. if (const ReferenceType *refType = type->getAs<ReferenceType>()) {
  2731. llvm::LoadInst *load = Builder.CreateLoad(addr, "ref");
  2732. if (cvr & Qualifiers::Volatile) load->setVolatile(true);
  2733. load->setAlignment(alignment.getQuantity());
  2734. // Loading the reference will disable path-aware TBAA.
  2735. TBAAPath = false;
  2736. if (CGM.shouldUseTBAA()) {
  2737. llvm::MDNode *tbaa;
  2738. if (mayAlias)
  2739. tbaa = CGM.getTBAAInfo(getContext().CharTy);
  2740. else
  2741. tbaa = CGM.getTBAAInfo(type);
  2742. if (tbaa)
  2743. CGM.DecorateInstruction(load, tbaa);
  2744. }
  2745. addr = load;
  2746. mayAlias = false;
  2747. type = refType->getPointeeType();
  2748. if (type->isIncompleteType())
  2749. alignment = CharUnits();
  2750. else
  2751. alignment = getContext().getTypeAlignInChars(type);
  2752. cvr = 0; // qualifiers don't recursively apply to referencee
  2753. }
  2754. }
  2755. // Make sure that the address is pointing to the right type. This is critical
  2756. // for both unions and structs. A union needs a bitcast, a struct element
  2757. // will need a bitcast if the LLVM type laid out doesn't match the desired
  2758. // type.
  2759. addr = EmitBitCastOfLValueToProperType(*this, addr,
  2760. CGM.getTypes().ConvertTypeForMem(type),
  2761. field->getName());
  2762. if (field->hasAttr<AnnotateAttr>())
  2763. addr = EmitFieldAnnotations(field, addr);
  2764. LValue LV = MakeAddrLValue(addr, type, alignment);
  2765. LV.getQuals().addCVRQualifiers(cvr);
  2766. if (TBAAPath) {
  2767. const ASTRecordLayout &Layout =
  2768. getContext().getASTRecordLayout(field->getParent());
  2769. // Set the base type to be the base type of the base LValue and
  2770. // update offset to be relative to the base type.
  2771. LV.setTBAABaseType(mayAlias ? getContext().CharTy : base.getTBAABaseType());
  2772. LV.setTBAAOffset(mayAlias ? 0 : base.getTBAAOffset() +
  2773. Layout.getFieldOffset(field->getFieldIndex()) /
  2774. getContext().getCharWidth());
  2775. }
  2776. // __weak attribute on a field is ignored.
  2777. if (LV.getQuals().getObjCGCAttr() == Qualifiers::Weak)
  2778. LV.getQuals().removeObjCGCAttr();
  2779. // Fields of may_alias structs act like 'char' for TBAA purposes.
  2780. // FIXME: this should get propagated down through anonymous structs
  2781. // and unions.
  2782. if (mayAlias && LV.getTBAAInfo())
  2783. LV.setTBAAInfo(CGM.getTBAAInfo(getContext().CharTy));
  2784. return LV;
  2785. }
  2786. LValue
  2787. CodeGenFunction::EmitLValueForFieldInitialization(LValue Base,
  2788. const FieldDecl *Field) {
  2789. QualType FieldType = Field->getType();
  2790. if (!FieldType->isReferenceType())
  2791. return EmitLValueForField(Base, Field);
  2792. const CGRecordLayout &RL =
  2793. CGM.getTypes().getCGRecordLayout(Field->getParent());
  2794. unsigned idx = RL.getLLVMFieldNo(Field);
  2795. llvm::Value *V = Builder.CreateStructGEP(nullptr, Base.getAddress(), idx);
  2796. assert(!FieldType.getObjCGCAttr() && "fields cannot have GC attrs");
  2797. // Make sure that the address is pointing to the right type. This is critical
  2798. // for both unions and structs. A union needs a bitcast, a struct element
  2799. // will need a bitcast if the LLVM type laid out doesn't match the desired
  2800. // type.
  2801. llvm::Type *llvmType = ConvertTypeForMem(FieldType);
  2802. V = EmitBitCastOfLValueToProperType(*this, V, llvmType, Field->getName());
  2803. CharUnits Alignment = getContext().getDeclAlign(Field);
  2804. // FIXME: It should be impossible to have an LValue without alignment for a
  2805. // complete type.
  2806. if (!Base.getAlignment().isZero())
  2807. Alignment = std::min(Alignment, Base.getAlignment());
  2808. return MakeAddrLValue(V, FieldType, Alignment);
  2809. }
  2810. LValue CodeGenFunction::EmitCompoundLiteralLValue(const CompoundLiteralExpr *E){
  2811. if (E->isFileScope()) {
  2812. llvm::Value *GlobalPtr = CGM.GetAddrOfConstantCompoundLiteral(E);
  2813. return MakeAddrLValue(GlobalPtr, E->getType());
  2814. }
  2815. if (E->getType()->isVariablyModifiedType())
  2816. // make sure to emit the VLA size.
  2817. EmitVariablyModifiedType(E->getType());
  2818. llvm::Value *DeclPtr = CreateMemTemp(E->getType(), ".compoundliteral");
  2819. const Expr *InitExpr = E->getInitializer();
  2820. LValue Result = MakeAddrLValue(DeclPtr, E->getType());
  2821. EmitAnyExprToMem(InitExpr, DeclPtr, E->getType().getQualifiers(),
  2822. /*Init*/ true);
  2823. return Result;
  2824. }
  2825. LValue CodeGenFunction::EmitInitListLValue(const InitListExpr *E) {
  2826. if (!E->isGLValue())
  2827. // Initializing an aggregate temporary in C++11: T{...}.
  2828. return EmitAggExprToLValue(E);
  2829. // An lvalue initializer list must be initializing a reference.
  2830. assert(E->getNumInits() == 1 && "reference init with multiple values");
  2831. return EmitLValue(E->getInit(0));
  2832. }
  2833. /// Emit the operand of a glvalue conditional operator. This is either a glvalue
  2834. /// or a (possibly-parenthesized) throw-expression. If this is a throw, no
  2835. /// LValue is returned and the current block has been terminated.
  2836. static Optional<LValue> EmitLValueOrThrowExpression(CodeGenFunction &CGF,
  2837. const Expr *Operand) {
  2838. if (auto *ThrowExpr = dyn_cast<CXXThrowExpr>(Operand->IgnoreParens())) {
  2839. CGF.EmitCXXThrowExpr(ThrowExpr, /*KeepInsertionPoint*/false);
  2840. return None;
  2841. }
  2842. return CGF.EmitLValue(Operand);
  2843. }
  2844. LValue CodeGenFunction::
  2845. EmitConditionalOperatorLValue(const AbstractConditionalOperator *expr) {
  2846. if (!expr->isGLValue()) {
  2847. // ?: here should be an aggregate.
  2848. assert(hasAggregateEvaluationKind(expr->getType()) &&
  2849. "Unexpected conditional operator!");
  2850. return EmitAggExprToLValue(expr);
  2851. }
  2852. OpaqueValueMapping binding(*this, expr);
  2853. const Expr *condExpr = expr->getCond();
  2854. bool CondExprBool;
  2855. if (ConstantFoldsToSimpleInteger(condExpr, CondExprBool)) {
  2856. const Expr *live = expr->getTrueExpr(), *dead = expr->getFalseExpr();
  2857. if (!CondExprBool) std::swap(live, dead);
  2858. if (!ContainsLabel(dead)) {
  2859. // If the true case is live, we need to track its region.
  2860. if (CondExprBool)
  2861. incrementProfileCounter(expr);
  2862. return EmitLValue(live);
  2863. }
  2864. }
  2865. llvm::BasicBlock *lhsBlock = createBasicBlock("cond.true");
  2866. llvm::BasicBlock *rhsBlock = createBasicBlock("cond.false");
  2867. llvm::BasicBlock *contBlock = createBasicBlock("cond.end");
  2868. ConditionalEvaluation eval(*this);
  2869. EmitBranchOnBoolExpr(condExpr, lhsBlock, rhsBlock, getProfileCount(expr));
  2870. // Any temporaries created here are conditional.
  2871. EmitBlock(lhsBlock);
  2872. incrementProfileCounter(expr);
  2873. eval.begin(*this);
  2874. Optional<LValue> lhs =
  2875. EmitLValueOrThrowExpression(*this, expr->getTrueExpr());
  2876. eval.end(*this);
  2877. if (lhs && !lhs->isSimple())
  2878. return EmitUnsupportedLValue(expr, "conditional operator");
  2879. lhsBlock = Builder.GetInsertBlock();
  2880. if (lhs)
  2881. Builder.CreateBr(contBlock);
  2882. // Any temporaries created here are conditional.
  2883. EmitBlock(rhsBlock);
  2884. eval.begin(*this);
  2885. Optional<LValue> rhs =
  2886. EmitLValueOrThrowExpression(*this, expr->getFalseExpr());
  2887. eval.end(*this);
  2888. if (rhs && !rhs->isSimple())
  2889. return EmitUnsupportedLValue(expr, "conditional operator");
  2890. rhsBlock = Builder.GetInsertBlock();
  2891. EmitBlock(contBlock);
  2892. if (lhs && rhs) {
  2893. llvm::PHINode *phi = Builder.CreatePHI(lhs->getAddress()->getType(),
  2894. 2, "cond-lvalue");
  2895. phi->addIncoming(lhs->getAddress(), lhsBlock);
  2896. phi->addIncoming(rhs->getAddress(), rhsBlock);
  2897. return MakeAddrLValue(phi, expr->getType());
  2898. } else {
  2899. assert((lhs || rhs) &&
  2900. "both operands of glvalue conditional are throw-expressions?");
  2901. return lhs ? *lhs : *rhs;
  2902. }
  2903. }
  2904. /// EmitCastLValue - Casts are never lvalues unless that cast is to a reference
  2905. /// type. If the cast is to a reference, we can have the usual lvalue result,
  2906. /// otherwise if a cast is needed by the code generator in an lvalue context,
  2907. /// then it must mean that we need the address of an aggregate in order to
  2908. /// access one of its members. This can happen for all the reasons that casts
  2909. /// are permitted with aggregate result, including noop aggregate casts, and
  2910. /// cast from scalar to union.
  2911. LValue CodeGenFunction::EmitCastLValue(const CastExpr *E) {
  2912. // HLSL Change Begins
  2913. if (hlsl::IsHLSLMatType(E->getType()) || hlsl::IsHLSLMatType(E->getSubExpr()->getType())) {
  2914. LValue LV = EmitLValue(E->getSubExpr());
  2915. QualType ToType = getContext().getLValueReferenceType(E->getType());
  2916. llvm::Value *FromValue = LV.getAddress();
  2917. llvm::Type *FromTy = FromValue->getType();
  2918. llvm::Type *RetTy = ConvertType(ToType);
  2919. // type not changed, LValueToRValue, CStyleCast may go this path
  2920. if (FromTy == RetTy) {
  2921. return LV;
  2922. // If only address space changed, add address space cast
  2923. }
  2924. if (FromTy->getPointerAddressSpace() != RetTy->getPointerAddressSpace()) {
  2925. llvm::Type *ConvertedFromTy = llvm::PointerType::get(
  2926. FromTy->getPointerElementType(), RetTy->getPointerAddressSpace());
  2927. assert(ConvertedFromTy == RetTy &&
  2928. "otherwise, more than just address space changing in one step");
  2929. llvm::Value *cast =
  2930. Builder.CreateAddrSpaceCast(FromValue, ConvertedFromTy);
  2931. return MakeAddrLValue(cast, ToType);
  2932. }
  2933. llvm::Value *cast = CGM.getHLSLRuntime().EmitHLSLMatrixOperationCall(*this, E, RetTy, { LV.getAddress() });
  2934. return MakeAddrLValue(cast, ToType);
  2935. }
  2936. // HLSL Change Ends
  2937. switch (E->getCastKind()) {
  2938. case CK_ToVoid:
  2939. case CK_BitCast:
  2940. case CK_ArrayToPointerDecay:
  2941. case CK_FunctionToPointerDecay:
  2942. case CK_NullToMemberPointer:
  2943. case CK_NullToPointer:
  2944. case CK_IntegralToPointer:
  2945. case CK_PointerToIntegral:
  2946. case CK_PointerToBoolean:
  2947. case CK_VectorSplat:
  2948. case CK_IntegralCast:
  2949. case CK_IntegralToBoolean:
  2950. case CK_IntegralToFloating:
  2951. case CK_FloatingToIntegral:
  2952. case CK_FloatingToBoolean:
  2953. case CK_FloatingCast:
  2954. case CK_FloatingRealToComplex:
  2955. case CK_FloatingComplexToReal:
  2956. case CK_FloatingComplexToBoolean:
  2957. case CK_FloatingComplexCast:
  2958. case CK_FloatingComplexToIntegralComplex:
  2959. case CK_IntegralRealToComplex:
  2960. case CK_IntegralComplexToReal:
  2961. case CK_IntegralComplexToBoolean:
  2962. case CK_IntegralComplexCast:
  2963. case CK_IntegralComplexToFloatingComplex:
  2964. case CK_DerivedToBaseMemberPointer:
  2965. case CK_BaseToDerivedMemberPointer:
  2966. case CK_MemberPointerToBoolean:
  2967. case CK_ReinterpretMemberPointer:
  2968. case CK_AnyPointerToBlockPointerCast:
  2969. case CK_ARCProduceObject:
  2970. case CK_ARCConsumeObject:
  2971. case CK_ARCReclaimReturnedObject:
  2972. case CK_ARCExtendBlockObject:
  2973. case CK_CopyAndAutoreleaseBlockObject:
  2974. case CK_AddressSpaceConversion:
  2975. return EmitUnsupportedLValue(E, "unexpected cast lvalue");
  2976. case CK_Dependent:
  2977. llvm_unreachable("dependent cast kind in IR gen!");
  2978. case CK_BuiltinFnToFnPtr:
  2979. llvm_unreachable("builtin functions are handled elsewhere");
  2980. // These are never l-values; just use the aggregate emission code.
  2981. case CK_NonAtomicToAtomic:
  2982. case CK_AtomicToNonAtomic:
  2983. return EmitAggExprToLValue(E);
  2984. case CK_Dynamic: {
  2985. LValue LV = EmitLValue(E->getSubExpr());
  2986. llvm::Value *V = LV.getAddress();
  2987. const auto *DCE = cast<CXXDynamicCastExpr>(E);
  2988. return MakeAddrLValue(EmitDynamicCast(V, DCE), E->getType());
  2989. }
  2990. case CK_ConstructorConversion:
  2991. case CK_UserDefinedConversion:
  2992. case CK_CPointerToObjCPointerCast:
  2993. case CK_BlockPointerToObjCPointerCast:
  2994. case CK_NoOp:
  2995. case CK_LValueToRValue:
  2996. return EmitLValue(E->getSubExpr());
  2997. case CK_UncheckedDerivedToBase:
  2998. case CK_DerivedToBase: {
  2999. const RecordType *DerivedClassTy =
  3000. E->getSubExpr()->getType()->getAs<RecordType>();
  3001. auto *DerivedClassDecl = cast<CXXRecordDecl>(DerivedClassTy->getDecl());
  3002. LValue LV = EmitLValue(E->getSubExpr());
  3003. llvm::Value *This = LV.getAddress();
  3004. // Perform the derived-to-base conversion
  3005. llvm::Value *Base = GetAddressOfBaseClass(
  3006. This, DerivedClassDecl, E->path_begin(), E->path_end(),
  3007. /*NullCheckValue=*/false, E->getExprLoc());
  3008. return MakeAddrLValue(Base, E->getType());
  3009. }
  3010. case CK_ToUnion:
  3011. return EmitAggExprToLValue(E);
  3012. case CK_BaseToDerived: {
  3013. const RecordType *DerivedClassTy = E->getType()->getAs<RecordType>();
  3014. auto *DerivedClassDecl = cast<CXXRecordDecl>(DerivedClassTy->getDecl());
  3015. LValue LV = EmitLValue(E->getSubExpr());
  3016. // Perform the base-to-derived conversion
  3017. llvm::Value *Derived =
  3018. GetAddressOfDerivedClass(LV.getAddress(), DerivedClassDecl,
  3019. E->path_begin(), E->path_end(),
  3020. /*NullCheckValue=*/false);
  3021. // C++11 [expr.static.cast]p2: Behavior is undefined if a downcast is
  3022. // performed and the object is not of the derived type.
  3023. if (sanitizePerformTypeCheck())
  3024. EmitTypeCheck(TCK_DowncastReference, E->getExprLoc(),
  3025. Derived, E->getType());
  3026. if (SanOpts.has(SanitizerKind::CFIDerivedCast))
  3027. EmitVTablePtrCheckForCast(E->getType(), Derived, /*MayBeNull=*/false,
  3028. CFITCK_DerivedCast, E->getLocStart());
  3029. return MakeAddrLValue(Derived, E->getType());
  3030. }
  3031. case CK_LValueBitCast: {
  3032. // This must be a reinterpret_cast (or c-style equivalent).
  3033. const auto *CE = cast<ExplicitCastExpr>(E);
  3034. LValue LV = EmitLValue(E->getSubExpr());
  3035. llvm::Value *V = Builder.CreateBitCast(LV.getAddress(),
  3036. ConvertType(CE->getTypeAsWritten()));
  3037. if (SanOpts.has(SanitizerKind::CFIUnrelatedCast))
  3038. EmitVTablePtrCheckForCast(E->getType(), V, /*MayBeNull=*/false,
  3039. CFITCK_UnrelatedCast, E->getLocStart());
  3040. return MakeAddrLValue(V, E->getType());
  3041. }
  3042. case CK_ObjCObjectLValueCast: {
  3043. LValue LV = EmitLValue(E->getSubExpr());
  3044. QualType ToType = getContext().getLValueReferenceType(E->getType());
  3045. llvm::Value *V = Builder.CreateBitCast(LV.getAddress(),
  3046. ConvertType(ToType));
  3047. return MakeAddrLValue(V, E->getType());
  3048. }
  3049. // HLSL Change Starts
  3050. case CK_HLSLVectorSplat: {
  3051. LValue LV = EmitLValue(E->getSubExpr());
  3052. llvm::Value *LVal = nullptr;
  3053. if (LV.isSimple())
  3054. LVal = LV.getAddress();
  3055. else if (LV.isExtVectorElt()) {
  3056. llvm::Constant *VecElts = LV.getExtVectorElts();
  3057. LVal = Builder.CreateGEP(
  3058. LV.getExtVectorAddr(),
  3059. {Builder.getInt32(0), VecElts->getAggregateElement((unsigned)0)});
  3060. } else
  3061. // TODO: make sure all cases are supported.
  3062. assert(0 && "not implement cases");
  3063. QualType ToType = getContext().getLValueReferenceType(E->getType());
  3064. // bitcast to target type
  3065. llvm::Type *ResultType = ConvertType(ToType);
  3066. llvm::Value *bitcast = Builder.CreateBitCast(LVal, ResultType);
  3067. return MakeAddrLValue(bitcast, ToType);
  3068. }
  3069. case CK_HLSLVectorTruncationCast: {
  3070. LValue LV = EmitLValue(E->getSubExpr());
  3071. QualType ToType = getContext().getLValueReferenceType(E->getType());
  3072. // bitcast to target type
  3073. llvm::Type *ResultType = ConvertType(ToType);
  3074. llvm::Value *bitcast = Builder.CreateBitCast(LV.getAddress(), ResultType);
  3075. return MakeAddrLValue(bitcast, ToType);
  3076. }
  3077. case CK_HLSLVectorToScalarCast: {
  3078. LValue LV = EmitLValue(E->getSubExpr());
  3079. QualType ToType = getContext().getLValueReferenceType(E->getType());
  3080. llvm::ConstantInt *idxZero = llvm::ConstantInt::get(llvm::Type::getInt32Ty(getLLVMContext()), 0);
  3081. llvm::Value *GEP = Builder.CreateInBoundsGEP(LV.getAddress(), {idxZero, idxZero});
  3082. return MakeAddrLValue(GEP, ToType);
  3083. } break;
  3084. case CK_HLSLCC_IntegralToFloating:
  3085. case CK_HLSLCC_FloatingToIntegral: {
  3086. LValue LV = EmitLValue(E->getSubExpr());
  3087. QualType ToType = getContext().getLValueReferenceType(E->getType());
  3088. // bitcast to target type
  3089. llvm::Type *ResultType = ConvertType(ToType);
  3090. llvm::Value *bitcast = Builder.CreateBitCast(LV.getAddress(), ResultType);
  3091. return MakeAddrLValue(bitcast, ToType);
  3092. }
  3093. case CK_FlatConversion: {
  3094. // Just bitcast.
  3095. QualType ToType = getContext().getLValueReferenceType(E->getType());
  3096. LValue LV = EmitLValue(E->getSubExpr());
  3097. llvm::Value *This = LV.getAddress();
  3098. // bitcast to target type
  3099. llvm::Type *ResultType = ConvertType(ToType);
  3100. // Make sure generate Inst not Operator to make lowering easy.
  3101. bool originAllowFolding = Builder.AllowFolding;
  3102. Builder.AllowFolding = false;
  3103. llvm::Value *bitcast = Builder.CreateBitCast(This, ResultType);
  3104. Builder.AllowFolding = originAllowFolding;
  3105. return MakeAddrLValue(bitcast, ToType);
  3106. }
  3107. case CK_HLSLDerivedToBase: {
  3108. // HLSL only single inheritance.
  3109. // Just GEP.
  3110. QualType ToType = getContext().getLValueReferenceType(E->getType());
  3111. LValue LV = EmitLValue(E->getSubExpr());
  3112. llvm::Value *This = LV.getAddress();
  3113. // gep to target type
  3114. llvm::Type *ResultType = ConvertType(ToType);
  3115. unsigned level = 0;
  3116. llvm::Type *ToTy = ResultType->getPointerElementType();
  3117. llvm::Type *FromTy = This->getType()->getPointerElementType();
  3118. // For empty struct, just bitcast.
  3119. if (FromTy->getStructNumElements()== 0) {
  3120. llvm::Value *bitcast = Builder.CreateBitCast(This, ResultType);
  3121. return MakeAddrLValue(bitcast, ToType);
  3122. }
  3123. while (ToTy != FromTy) {
  3124. FromTy = FromTy->getStructElementType(0);
  3125. ++level;
  3126. }
  3127. llvm::Value *zeroIdx = Builder.getInt32(0);
  3128. SmallVector<llvm::Value *, 2> IdxList(level + 1, zeroIdx);
  3129. llvm::Value *GEP = Builder.CreateInBoundsGEP(This, IdxList);
  3130. return MakeAddrLValue(GEP, ToType);
  3131. }
  3132. case CK_HLSLMatrixSplat:
  3133. case CK_HLSLMatrixToScalarCast:
  3134. case CK_HLSLMatrixTruncationCast:
  3135. case CK_HLSLMatrixToVectorCast:
  3136. // Matrices should be handled above.
  3137. case CK_HLSLVectorToMatrixCast:
  3138. case CK_HLSLCC_IntegralCast:
  3139. case CK_HLSLCC_IntegralToBoolean:
  3140. case CK_HLSLCC_FloatingToBoolean:
  3141. case CK_HLSLCC_FloatingCast:
  3142. llvm_unreachable("Unhandled HLSL lvalue cast");
  3143. // HLSL Change Ends
  3144. case CK_ZeroToOCLEvent:
  3145. llvm_unreachable("NULL to OpenCL event lvalue cast is not valid");
  3146. }
  3147. llvm_unreachable("Unhandled lvalue cast kind?");
  3148. }
  3149. LValue CodeGenFunction::EmitOpaqueValueLValue(const OpaqueValueExpr *e) {
  3150. assert(OpaqueValueMappingData::shouldBindAsLValue(e));
  3151. return getOpaqueLValueMapping(e);
  3152. }
  3153. RValue CodeGenFunction::EmitRValueForField(LValue LV,
  3154. const FieldDecl *FD,
  3155. SourceLocation Loc) {
  3156. QualType FT = FD->getType();
  3157. LValue FieldLV = EmitLValueForField(LV, FD);
  3158. switch (getEvaluationKind(FT)) {
  3159. case TEK_Complex:
  3160. return RValue::getComplex(EmitLoadOfComplex(FieldLV, Loc));
  3161. case TEK_Aggregate:
  3162. return FieldLV.asAggregateRValue();
  3163. case TEK_Scalar:
  3164. return EmitLoadOfLValue(FieldLV, Loc);
  3165. }
  3166. llvm_unreachable("bad evaluation kind");
  3167. }
  3168. //===--------------------------------------------------------------------===//
  3169. // Expression Emission
  3170. //===--------------------------------------------------------------------===//
  3171. RValue CodeGenFunction::EmitCallExpr(const CallExpr *E,
  3172. ReturnValueSlot ReturnValue) {
  3173. // Builtins never have block type.
  3174. if (E->getCallee()->getType()->isBlockPointerType())
  3175. return EmitBlockCallExpr(E, ReturnValue);
  3176. if (const auto *CE = dyn_cast<CXXMemberCallExpr>(E))
  3177. return EmitCXXMemberCallExpr(CE, ReturnValue);
  3178. if (const auto *CE = dyn_cast<CUDAKernelCallExpr>(E))
  3179. return EmitCUDAKernelCallExpr(CE, ReturnValue);
  3180. const Decl *TargetDecl = E->getCalleeDecl();
  3181. if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(TargetDecl)) {
  3182. if (unsigned builtinID = FD->getBuiltinID())
  3183. return EmitBuiltinExpr(FD, builtinID, E, ReturnValue);
  3184. // HLSL Change Starts
  3185. if (getLangOpts().HLSL) {
  3186. if (const NamespaceDecl *ns = dyn_cast<NamespaceDecl>(FD->getParent())) {
  3187. if (ns->getName() == "hlsl") {
  3188. // do hlsl intrinsic generation
  3189. return EmitHLSLBuiltinCallExpr(FD, E, ReturnValue);
  3190. }
  3191. }
  3192. }
  3193. // HLSL Change End
  3194. }
  3195. if (const auto *CE = dyn_cast<CXXOperatorCallExpr>(E))
  3196. if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(TargetDecl))
  3197. return EmitCXXOperatorMemberCallExpr(CE, MD, ReturnValue);
  3198. if (const auto *PseudoDtor =
  3199. dyn_cast<CXXPseudoDestructorExpr>(E->getCallee()->IgnoreParens())) {
  3200. QualType DestroyedType = PseudoDtor->getDestroyedType();
  3201. if (getLangOpts().ObjCAutoRefCount &&
  3202. DestroyedType->isObjCLifetimeType() &&
  3203. (DestroyedType.getObjCLifetime() == Qualifiers::OCL_Strong ||
  3204. DestroyedType.getObjCLifetime() == Qualifiers::OCL_Weak)) {
  3205. // Automatic Reference Counting:
  3206. // If the pseudo-expression names a retainable object with weak or
  3207. // strong lifetime, the object shall be released.
  3208. Expr *BaseExpr = PseudoDtor->getBase();
  3209. llvm::Value *BaseValue = nullptr;
  3210. Qualifiers BaseQuals;
  3211. // If this is s.x, emit s as an lvalue. If it is s->x, emit s as a scalar.
  3212. if (PseudoDtor->isArrow()) {
  3213. BaseValue = EmitScalarExpr(BaseExpr);
  3214. const PointerType *PTy = BaseExpr->getType()->getAs<PointerType>();
  3215. BaseQuals = PTy->getPointeeType().getQualifiers();
  3216. } else {
  3217. LValue BaseLV = EmitLValue(BaseExpr);
  3218. BaseValue = BaseLV.getAddress();
  3219. QualType BaseTy = BaseExpr->getType();
  3220. BaseQuals = BaseTy.getQualifiers();
  3221. }
  3222. switch (PseudoDtor->getDestroyedType().getObjCLifetime()) {
  3223. case Qualifiers::OCL_None:
  3224. case Qualifiers::OCL_ExplicitNone:
  3225. case Qualifiers::OCL_Autoreleasing:
  3226. break;
  3227. case Qualifiers::OCL_Strong:
  3228. EmitARCRelease(Builder.CreateLoad(BaseValue,
  3229. PseudoDtor->getDestroyedType().isVolatileQualified()),
  3230. ARCPreciseLifetime);
  3231. break;
  3232. case Qualifiers::OCL_Weak:
  3233. EmitARCDestroyWeak(BaseValue);
  3234. break;
  3235. }
  3236. } else {
  3237. // C++ [expr.pseudo]p1:
  3238. // The result shall only be used as the operand for the function call
  3239. // operator (), and the result of such a call has type void. The only
  3240. // effect is the evaluation of the postfix-expression before the dot or
  3241. // arrow.
  3242. EmitScalarExpr(E->getCallee());
  3243. }
  3244. return RValue::get(nullptr);
  3245. }
  3246. llvm::Value *Callee = EmitScalarExpr(E->getCallee());
  3247. return EmitCall(E->getCallee()->getType(), Callee, E, ReturnValue,
  3248. TargetDecl);
  3249. }
  3250. LValue CodeGenFunction::EmitBinaryOperatorLValue(const BinaryOperator *E) {
  3251. // Comma expressions just emit their LHS then their RHS as an l-value.
  3252. if (E->getOpcode() == BO_Comma) {
  3253. EmitIgnoredExpr(E->getLHS());
  3254. EnsureInsertPoint();
  3255. return EmitLValue(E->getRHS());
  3256. }
  3257. if (E->getOpcode() == BO_PtrMemD ||
  3258. E->getOpcode() == BO_PtrMemI)
  3259. return EmitPointerToDataMemberBinaryExpr(E);
  3260. assert(E->getOpcode() == BO_Assign && "unexpected binary l-value");
  3261. // Note that in all of these cases, __block variables need the RHS
  3262. // evaluated first just in case the variable gets moved by the RHS.
  3263. switch (getEvaluationKind(E->getType())) {
  3264. case TEK_Scalar: {
  3265. switch (E->getLHS()->getType().getObjCLifetime()) {
  3266. case Qualifiers::OCL_Strong:
  3267. return EmitARCStoreStrong(E, /*ignored*/ false).first;
  3268. case Qualifiers::OCL_Autoreleasing:
  3269. return EmitARCStoreAutoreleasing(E).first;
  3270. // No reason to do any of these differently.
  3271. case Qualifiers::OCL_None:
  3272. case Qualifiers::OCL_ExplicitNone:
  3273. case Qualifiers::OCL_Weak:
  3274. break;
  3275. }
  3276. RValue RV = EmitAnyExpr(E->getRHS());
  3277. LValue LV = EmitCheckedLValue(E->getLHS(), TCK_Store);
  3278. EmitStoreThroughLValue(RV, LV);
  3279. return LV;
  3280. }
  3281. case TEK_Complex:
  3282. return EmitComplexAssignmentLValue(E);
  3283. case TEK_Aggregate:
  3284. return EmitAggExprToLValue(E);
  3285. }
  3286. llvm_unreachable("bad evaluation kind");
  3287. }
  3288. LValue CodeGenFunction::EmitCallExprLValue(const CallExpr *E) {
  3289. RValue RV = EmitCallExpr(E);
  3290. if (!RV.isScalar())
  3291. return MakeAddrLValue(RV.getAggregateAddr(), E->getType());
  3292. assert(E->getCallReturnType(getContext())->isReferenceType() &&
  3293. "Can't have a scalar return unless the return type is a "
  3294. "reference type!");
  3295. return MakeAddrLValue(RV.getScalarVal(), E->getType());
  3296. }
  3297. LValue CodeGenFunction::EmitVAArgExprLValue(const VAArgExpr *E) {
  3298. // FIXME: This shouldn't require another copy.
  3299. return EmitAggExprToLValue(E);
  3300. }
  3301. LValue CodeGenFunction::EmitCXXConstructLValue(const CXXConstructExpr *E) {
  3302. assert(E->getType()->getAsCXXRecordDecl()->hasTrivialDestructor()
  3303. && "binding l-value to type which needs a temporary");
  3304. AggValueSlot Slot = CreateAggTemp(E->getType());
  3305. EmitCXXConstructExpr(E, Slot);
  3306. return MakeAddrLValue(Slot.getAddr(), E->getType());
  3307. }
  3308. LValue
  3309. CodeGenFunction::EmitCXXTypeidLValue(const CXXTypeidExpr *E) {
  3310. return MakeAddrLValue(EmitCXXTypeidExpr(E), E->getType());
  3311. }
  3312. llvm::Value *CodeGenFunction::EmitCXXUuidofExpr(const CXXUuidofExpr *E) {
  3313. return Builder.CreateBitCast(CGM.GetAddrOfUuidDescriptor(E),
  3314. ConvertType(E->getType())->getPointerTo());
  3315. }
  3316. LValue CodeGenFunction::EmitCXXUuidofLValue(const CXXUuidofExpr *E) {
  3317. return MakeAddrLValue(EmitCXXUuidofExpr(E), E->getType());
  3318. }
  3319. LValue
  3320. CodeGenFunction::EmitCXXBindTemporaryLValue(const CXXBindTemporaryExpr *E) {
  3321. AggValueSlot Slot = CreateAggTemp(E->getType(), "temp.lvalue");
  3322. Slot.setExternallyDestructed();
  3323. EmitAggExpr(E->getSubExpr(), Slot);
  3324. EmitCXXTemporary(E->getTemporary(), E->getType(), Slot.getAddr());
  3325. return MakeAddrLValue(Slot.getAddr(), E->getType());
  3326. }
  3327. LValue
  3328. CodeGenFunction::EmitLambdaLValue(const LambdaExpr *E) {
  3329. AggValueSlot Slot = CreateAggTemp(E->getType(), "temp.lvalue");
  3330. EmitLambdaExpr(E, Slot);
  3331. return MakeAddrLValue(Slot.getAddr(), E->getType());
  3332. }
  3333. LValue CodeGenFunction::EmitObjCMessageExprLValue(const ObjCMessageExpr *E) {
  3334. RValue RV = EmitObjCMessageExpr(E);
  3335. if (!RV.isScalar())
  3336. return MakeAddrLValue(RV.getAggregateAddr(), E->getType());
  3337. assert(E->getMethodDecl()->getReturnType()->isReferenceType() &&
  3338. "Can't have a scalar return unless the return type is a "
  3339. "reference type!");
  3340. return MakeAddrLValue(RV.getScalarVal(), E->getType());
  3341. }
  3342. LValue CodeGenFunction::EmitObjCSelectorLValue(const ObjCSelectorExpr *E) {
  3343. llvm::Value *V =
  3344. CGM.getObjCRuntime().GetSelector(*this, E->getSelector(), true);
  3345. return MakeAddrLValue(V, E->getType());
  3346. }
  3347. llvm::Value *CodeGenFunction::EmitIvarOffset(const ObjCInterfaceDecl *Interface,
  3348. const ObjCIvarDecl *Ivar) {
  3349. return CGM.getObjCRuntime().EmitIvarOffset(*this, Interface, Ivar);
  3350. }
  3351. LValue CodeGenFunction::EmitLValueForIvar(QualType ObjectTy,
  3352. llvm::Value *BaseValue,
  3353. const ObjCIvarDecl *Ivar,
  3354. unsigned CVRQualifiers) {
  3355. return CGM.getObjCRuntime().EmitObjCValueForIvar(*this, ObjectTy, BaseValue,
  3356. Ivar, CVRQualifiers);
  3357. }
  3358. LValue CodeGenFunction::EmitObjCIvarRefLValue(const ObjCIvarRefExpr *E) {
  3359. // FIXME: A lot of the code below could be shared with EmitMemberExpr.
  3360. llvm::Value *BaseValue = nullptr;
  3361. const Expr *BaseExpr = E->getBase();
  3362. Qualifiers BaseQuals;
  3363. QualType ObjectTy;
  3364. if (E->isArrow()) {
  3365. BaseValue = EmitScalarExpr(BaseExpr);
  3366. ObjectTy = BaseExpr->getType()->getPointeeType();
  3367. BaseQuals = ObjectTy.getQualifiers();
  3368. } else {
  3369. LValue BaseLV = EmitLValue(BaseExpr);
  3370. // FIXME: this isn't right for bitfields.
  3371. BaseValue = BaseLV.getAddress();
  3372. ObjectTy = BaseExpr->getType();
  3373. BaseQuals = ObjectTy.getQualifiers();
  3374. }
  3375. LValue LV =
  3376. EmitLValueForIvar(ObjectTy, BaseValue, E->getDecl(),
  3377. BaseQuals.getCVRQualifiers());
  3378. setObjCGCLValueClass(getContext(), E, LV);
  3379. return LV;
  3380. }
  3381. LValue CodeGenFunction::EmitStmtExprLValue(const StmtExpr *E) {
  3382. // Can only get l-value for message expression returning aggregate type
  3383. RValue RV = EmitAnyExprToTemp(E);
  3384. return MakeAddrLValue(RV.getAggregateAddr(), E->getType());
  3385. }
  3386. RValue CodeGenFunction::EmitCall(QualType CalleeType, llvm::Value *Callee,
  3387. const CallExpr *E, ReturnValueSlot ReturnValue,
  3388. const Decl *TargetDecl, llvm::Value *Chain) {
  3389. // Get the actual function type. The callee type will always be a pointer to
  3390. // function type or a block pointer type.
  3391. assert(CalleeType->isFunctionPointerType() &&
  3392. "Call must have function pointer type!");
  3393. CalleeType = getContext().getCanonicalType(CalleeType);
  3394. const auto *FnType =
  3395. cast<FunctionType>(cast<PointerType>(CalleeType)->getPointeeType());
  3396. if (getLangOpts().CPlusPlus && SanOpts.has(SanitizerKind::Function) &&
  3397. (!TargetDecl || !isa<FunctionDecl>(TargetDecl))) {
  3398. if (llvm::Constant *PrefixSig =
  3399. CGM.getTargetCodeGenInfo().getUBSanFunctionSignature(CGM)) {
  3400. SanitizerScope SanScope(this);
  3401. llvm::Constant *FTRTTIConst =
  3402. CGM.GetAddrOfRTTIDescriptor(QualType(FnType, 0), /*ForEH=*/true);
  3403. llvm::Type *PrefixStructTyElems[] = {
  3404. PrefixSig->getType(),
  3405. FTRTTIConst->getType()
  3406. };
  3407. llvm::StructType *PrefixStructTy = llvm::StructType::get(
  3408. CGM.getLLVMContext(), PrefixStructTyElems, /*isPacked=*/true);
  3409. llvm::Value *CalleePrefixStruct = Builder.CreateBitCast(
  3410. Callee, llvm::PointerType::getUnqual(PrefixStructTy));
  3411. llvm::Value *CalleeSigPtr =
  3412. Builder.CreateConstGEP2_32(PrefixStructTy, CalleePrefixStruct, 0, 0);
  3413. llvm::Value *CalleeSig = Builder.CreateLoad(CalleeSigPtr);
  3414. llvm::Value *CalleeSigMatch = Builder.CreateICmpEQ(CalleeSig, PrefixSig);
  3415. llvm::BasicBlock *Cont = createBasicBlock("cont");
  3416. llvm::BasicBlock *TypeCheck = createBasicBlock("typecheck");
  3417. Builder.CreateCondBr(CalleeSigMatch, TypeCheck, Cont);
  3418. EmitBlock(TypeCheck);
  3419. llvm::Value *CalleeRTTIPtr =
  3420. Builder.CreateConstGEP2_32(PrefixStructTy, CalleePrefixStruct, 0, 1);
  3421. llvm::Value *CalleeRTTI = Builder.CreateLoad(CalleeRTTIPtr);
  3422. llvm::Value *CalleeRTTIMatch =
  3423. Builder.CreateICmpEQ(CalleeRTTI, FTRTTIConst);
  3424. llvm::Constant *StaticData[] = {
  3425. EmitCheckSourceLocation(E->getLocStart()),
  3426. EmitCheckTypeDescriptor(CalleeType)
  3427. };
  3428. EmitCheck(std::make_pair(CalleeRTTIMatch, SanitizerKind::Function),
  3429. "function_type_mismatch", StaticData, Callee);
  3430. Builder.CreateBr(Cont);
  3431. EmitBlock(Cont);
  3432. }
  3433. }
  3434. // HLSL Change Begins
  3435. llvm::SmallVector<LValue, 8> castArgList;
  3436. llvm::SmallVector<LValue, 8> lifetimeCleanupList;
  3437. // The argList of the CallExpr, may be update for out parameter
  3438. llvm::SmallVector<const Stmt *, 8> argList(E->arg_begin(), E->arg_end());
  3439. ConstExprIterator argBegin = argList.data();
  3440. ConstExprIterator argEnd = argList.data() + E->getNumArgs();
  3441. // out param conversion
  3442. CodeGenFunction::HLSLOutParamScope OutParamScope(*this);
  3443. auto MapTemp = [&](const VarDecl *LocalVD, llvm::Value *TmpArg) {
  3444. OutParamScope.addTemp(LocalVD, TmpArg);
  3445. };
  3446. if (getLangOpts().HLSL) {
  3447. if (const FunctionDecl *FD = E->getDirectCallee())
  3448. CGM.getHLSLRuntime().EmitHLSLOutParamConversionInit(*this, FD, E,
  3449. castArgList, argList, lifetimeCleanupList, MapTemp);
  3450. }
  3451. // HLSL Change Ends
  3452. CallArgList Args;
  3453. if (Chain)
  3454. Args.add(RValue::get(Builder.CreateBitCast(Chain, CGM.VoidPtrTy)),
  3455. CGM.getContext().VoidPtrTy);
  3456. EmitCallArgs(Args, dyn_cast<FunctionProtoType>(FnType), argBegin, argEnd, // HLSL Change - use updated argList
  3457. E->getDirectCallee(), /*ParamsToSkip*/ 0);
  3458. const CGFunctionInfo &FnInfo = CGM.getTypes().arrangeFreeFunctionCall(
  3459. Args, FnType, /*isChainCall=*/Chain);
  3460. // C99 6.5.2.2p6:
  3461. // If the expression that denotes the called function has a type
  3462. // that does not include a prototype, [the default argument
  3463. // promotions are performed]. If the number of arguments does not
  3464. // equal the number of parameters, the behavior is undefined. If
  3465. // the function is defined with a type that includes a prototype,
  3466. // and either the prototype ends with an ellipsis (, ...) or the
  3467. // types of the arguments after promotion are not compatible with
  3468. // the types of the parameters, the behavior is undefined. If the
  3469. // function is defined with a type that does not include a
  3470. // prototype, and the types of the arguments after promotion are
  3471. // not compatible with those of the parameters after promotion,
  3472. // the behavior is undefined [except in some trivial cases].
  3473. // That is, in the general case, we should assume that a call
  3474. // through an unprototyped function type works like a *non-variadic*
  3475. // call. The way we make this work is to cast to the exact type
  3476. // of the promoted arguments.
  3477. //
  3478. // Chain calls use this same code path to add the invisible chain parameter
  3479. // to the function type.
  3480. if (isa<FunctionNoProtoType>(FnType) || Chain) {
  3481. llvm::Type *CalleeTy = getTypes().GetFunctionType(FnInfo);
  3482. CalleeTy = CalleeTy->getPointerTo();
  3483. Callee = Builder.CreateBitCast(Callee, CalleeTy, "callee.knr.cast");
  3484. }
  3485. RValue CallVal = EmitCall(FnInfo, Callee, ReturnValue, Args, TargetDecl);
  3486. // HLSL Change Begins
  3487. // out param conversion
  3488. // conversion and copy back after the call
  3489. if (getLangOpts().HLSL)
  3490. CGM.getHLSLRuntime().EmitHLSLOutParamConversionCopyBack(*this, castArgList, lifetimeCleanupList);
  3491. // HLSL Change Ends
  3492. return CallVal;
  3493. }
  3494. LValue CodeGenFunction::
  3495. EmitPointerToDataMemberBinaryExpr(const BinaryOperator *E) {
  3496. llvm::Value *BaseV;
  3497. if (E->getOpcode() == BO_PtrMemI)
  3498. BaseV = EmitScalarExpr(E->getLHS());
  3499. else
  3500. BaseV = EmitLValue(E->getLHS()).getAddress();
  3501. llvm::Value *OffsetV = EmitScalarExpr(E->getRHS());
  3502. const MemberPointerType *MPT
  3503. = E->getRHS()->getType()->getAs<MemberPointerType>();
  3504. llvm::Value *AddV = CGM.getCXXABI().EmitMemberDataPointerAddress(
  3505. *this, E, BaseV, OffsetV, MPT);
  3506. return MakeAddrLValue(AddV, MPT->getPointeeType());
  3507. }
  3508. /// Given the address of a temporary variable, produce an r-value of
  3509. /// its type.
  3510. RValue CodeGenFunction::convertTempToRValue(llvm::Value *addr,
  3511. QualType type,
  3512. SourceLocation loc) {
  3513. LValue lvalue = MakeNaturalAlignAddrLValue(addr, type);
  3514. switch (getEvaluationKind(type)) {
  3515. case TEK_Complex:
  3516. return RValue::getComplex(EmitLoadOfComplex(lvalue, loc));
  3517. case TEK_Aggregate:
  3518. return lvalue.asAggregateRValue();
  3519. case TEK_Scalar:
  3520. return RValue::get(EmitLoadOfScalar(lvalue, loc));
  3521. }
  3522. llvm_unreachable("bad evaluation kind");
  3523. }
  3524. void CodeGenFunction::SetFPAccuracy(llvm::Value *Val, float Accuracy) {
  3525. assert(Val->getType()->isFPOrFPVectorTy());
  3526. if (Accuracy == 0.0 || !isa<llvm::Instruction>(Val))
  3527. return;
  3528. llvm::MDBuilder MDHelper(getLLVMContext());
  3529. llvm::MDNode *Node = MDHelper.createFPMath(Accuracy);
  3530. cast<llvm::Instruction>(Val)->setMetadata(llvm::LLVMContext::MD_fpmath, Node);
  3531. }
  3532. namespace {
  3533. struct LValueOrRValue {
  3534. LValue LV;
  3535. RValue RV;
  3536. };
  3537. }
  3538. static LValueOrRValue emitPseudoObjectExpr(CodeGenFunction &CGF,
  3539. const PseudoObjectExpr *E,
  3540. bool forLValue,
  3541. AggValueSlot slot) {
  3542. SmallVector<CodeGenFunction::OpaqueValueMappingData, 4> opaques;
  3543. // Find the result expression, if any.
  3544. const Expr *resultExpr = E->getResultExpr();
  3545. LValueOrRValue result;
  3546. for (PseudoObjectExpr::const_semantics_iterator
  3547. i = E->semantics_begin(), e = E->semantics_end(); i != e; ++i) {
  3548. const Expr *semantic = *i;
  3549. // If this semantic expression is an opaque value, bind it
  3550. // to the result of its source expression.
  3551. if (const auto *ov = dyn_cast<OpaqueValueExpr>(semantic)) {
  3552. // If this is the result expression, we may need to evaluate
  3553. // directly into the slot.
  3554. typedef CodeGenFunction::OpaqueValueMappingData OVMA;
  3555. OVMA opaqueData;
  3556. if (ov == resultExpr && ov->isRValue() && !forLValue &&
  3557. CodeGenFunction::hasAggregateEvaluationKind(ov->getType())) {
  3558. CGF.EmitAggExpr(ov->getSourceExpr(), slot);
  3559. LValue LV = CGF.MakeAddrLValue(slot.getAddr(), ov->getType());
  3560. opaqueData = OVMA::bind(CGF, ov, LV);
  3561. result.RV = slot.asRValue();
  3562. // Otherwise, emit as normal.
  3563. } else {
  3564. opaqueData = OVMA::bind(CGF, ov, ov->getSourceExpr());
  3565. // If this is the result, also evaluate the result now.
  3566. if (ov == resultExpr) {
  3567. if (forLValue)
  3568. result.LV = CGF.EmitLValue(ov);
  3569. else
  3570. result.RV = CGF.EmitAnyExpr(ov, slot);
  3571. }
  3572. }
  3573. opaques.push_back(opaqueData);
  3574. // Otherwise, if the expression is the result, evaluate it
  3575. // and remember the result.
  3576. } else if (semantic == resultExpr) {
  3577. if (forLValue)
  3578. result.LV = CGF.EmitLValue(semantic);
  3579. else
  3580. result.RV = CGF.EmitAnyExpr(semantic, slot);
  3581. // Otherwise, evaluate the expression in an ignored context.
  3582. } else {
  3583. CGF.EmitIgnoredExpr(semantic);
  3584. }
  3585. }
  3586. // Unbind all the opaques now.
  3587. for (unsigned i = 0, e = opaques.size(); i != e; ++i)
  3588. opaques[i].unbind(CGF);
  3589. return result;
  3590. }
  3591. RValue CodeGenFunction::EmitPseudoObjectRValue(const PseudoObjectExpr *E,
  3592. AggValueSlot slot) {
  3593. return emitPseudoObjectExpr(*this, E, false, slot).RV;
  3594. }
  3595. LValue CodeGenFunction::EmitPseudoObjectLValue(const PseudoObjectExpr *E) {
  3596. return emitPseudoObjectExpr(*this, E, true, AggValueSlot::ignored()).LV;
  3597. }