CGHLSLMS.cpp 197 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394
  1. //===----- CGHLSLMS.cpp - Interface to HLSL Runtime ----------------===//
  2. ///////////////////////////////////////////////////////////////////////////////
  3. // //
  4. // CGHLSLMS.cpp //
  5. // Copyright (C) Microsoft Corporation. All rights reserved. //
  6. // This file is distributed under the University of Illinois Open Source //
  7. // License. See LICENSE.TXT for details. //
  8. // //
  9. // This provides a class for HLSL code generation. //
  10. // //
  11. ///////////////////////////////////////////////////////////////////////////////
  12. #include "CGHLSLRuntime.h"
  13. #include "CodeGenFunction.h"
  14. #include "CodeGenModule.h"
  15. #include "CGRecordLayout.h"
  16. #include "dxc/HlslIntrinsicOp.h"
  17. #include "dxc/HLSL/HLMatrixLowerHelper.h"
  18. #include "dxc/HLSL/HLModule.h"
  19. #include "dxc/HLSL/HLOperations.h"
  20. #include "dxc/HLSL/DXILOperations.h"
  21. #include "dxc/HLSL/DxilTypeSystem.h"
  22. #include "clang/AST/DeclTemplate.h"
  23. #include "clang/AST/HlslTypes.h"
  24. #include "clang/Frontend/CodeGenOptions.h"
  25. #include "llvm/IR/Constants.h"
  26. #include "llvm/IR/IRBuilder.h"
  27. #include "llvm/IR/GetElementPtrTypeIterator.h"
  28. #include <memory>
  29. #include <unordered_map>
  30. #include <unordered_set>
  31. #include "dxc/HLSL/DxilRootSignature.h"
  32. #include "dxc/HLSL/DxilCBuffer.h"
  33. #include "clang/Parse/ParseHLSL.h" // root sig would be in Parser if part of lang
  34. #include "dxc/Support/WinIncludes.h" // stream support
  35. #include "dxc/dxcapi.h" // stream support
  36. #include "dxc/HLSL/HLSLExtensionsCodegenHelper.h"
  37. using namespace clang;
  38. using namespace CodeGen;
  39. using namespace hlsl;
  40. using namespace llvm;
  41. using std::unique_ptr;
  42. static const bool KeepUndefinedTrue = true; // Keep interpolation mode undefined if not set explicitly.
  43. namespace {
  44. /// Use this class to represent HLSL cbuffer in high-level DXIL.
  45. class HLCBuffer : public DxilCBuffer {
  46. public:
  47. HLCBuffer() = default;
  48. virtual ~HLCBuffer() = default;
  49. void AddConst(std::unique_ptr<DxilResourceBase> &pItem);
  50. std::vector<std::unique_ptr<DxilResourceBase>> &GetConstants();
  51. private:
  52. std::vector<std::unique_ptr<DxilResourceBase>> constants; // constants inside const buffer
  53. };
  54. //------------------------------------------------------------------------------
  55. //
  56. // HLCBuffer methods.
  57. //
  58. void HLCBuffer::AddConst(std::unique_ptr<DxilResourceBase> &pItem) {
  59. pItem->SetID(constants.size());
  60. constants.push_back(std::move(pItem));
  61. }
  62. std::vector<std::unique_ptr<DxilResourceBase>> &HLCBuffer::GetConstants() {
  63. return constants;
  64. }
  65. class CGMSHLSLRuntime : public CGHLSLRuntime {
  66. private:
  67. /// Convenience reference to LLVM Context
  68. llvm::LLVMContext &Context;
  69. /// Convenience reference to the current module
  70. llvm::Module &TheModule;
  71. HLModule *m_pHLModule;
  72. llvm::Type *CBufferType;
  73. uint32_t globalCBIndex;
  74. // TODO: make sure how minprec works
  75. llvm::DataLayout legacyLayout;
  76. // decl map to constant id for program
  77. llvm::DenseMap<HLSLBufferDecl *, uint32_t> constantBufMap;
  78. bool m_bDebugInfo;
  79. HLCBuffer &GetGlobalCBuffer() {
  80. return *static_cast<HLCBuffer*>(&(m_pHLModule->GetCBuffer(globalCBIndex)));
  81. }
  82. void AddConstant(VarDecl *constDecl, HLCBuffer &CB);
  83. uint32_t AddSampler(VarDecl *samplerDecl);
  84. uint32_t AddUAVSRV(VarDecl *decl, hlsl::DxilResourceBase::Class resClass);
  85. uint32_t AddCBuffer(HLSLBufferDecl *D);
  86. hlsl::DxilResourceBase::Class TypeToClass(clang::QualType Ty);
  87. // Save the entryFunc so don't need to find it with original name.
  88. llvm::Function *EntryFunc;
  89. // Map to save patch constant functions
  90. StringMap<Function *> patchConstantFunctionMap;
  91. bool IsPatchConstantFunction(const Function *F);
  92. // List for functions with clip plane.
  93. std::vector<Function *> clipPlaneFuncList;
  94. std::unordered_map<Value *, DebugLoc> debugInfoMap;
  95. Value *EmitHLSLMatrixLoad(CGBuilderTy &Builder, Value *Ptr, QualType Ty);
  96. void EmitHLSLMatrixStore(CGBuilderTy &Builder, Value *Val, Value *DestPtr,
  97. QualType Ty);
  98. // Flatten the val into scalar val and push into elts and eltTys.
  99. void FlattenValToInitList(CodeGenFunction &CGF, SmallVector<Value *, 4> &elts,
  100. SmallVector<QualType, 4> &eltTys, QualType Ty,
  101. Value *val);
  102. // Push every value on InitListExpr into EltValList and EltTyList.
  103. void ScanInitList(CodeGenFunction &CGF, InitListExpr *E,
  104. SmallVector<Value *, 4> &EltValList,
  105. SmallVector<QualType, 4> &EltTyList);
  106. // Only scan init list to get the element size;
  107. unsigned ScanInitList(InitListExpr *E);
  108. void FlattenAggregatePtrToGepList(CodeGenFunction &CGF, Value *Ptr,
  109. SmallVector<Value *, 4> &idxList,
  110. clang::QualType Type, llvm::Type *Ty,
  111. SmallVector<Value *, 4> &GepList,
  112. SmallVector<QualType, 4> &EltTyList);
  113. void LoadFlattenedGepList(CodeGenFunction &CGF, ArrayRef<Value *> GepList,
  114. ArrayRef<QualType> EltTyList,
  115. SmallVector<Value *, 4> &EltList);
  116. void StoreFlattenedGepList(CodeGenFunction &CGF, ArrayRef<Value *> GepList,
  117. ArrayRef<QualType> GepTyList,
  118. ArrayRef<Value *> EltValList,
  119. ArrayRef<QualType> SrcTyList);
  120. void EmitHLSLAggregateCopy(CodeGenFunction &CGF, llvm::Value *SrcPtr,
  121. llvm::Value *DestPtr,
  122. SmallVector<Value *, 4> &idxList,
  123. clang::QualType Type,
  124. llvm::Type *Ty);
  125. void EmitHLSLAggregateStore(CodeGenFunction &CGF, llvm::Value *Val,
  126. llvm::Value *DestPtr,
  127. SmallVector<Value *, 4> &idxList,
  128. clang::QualType Type, llvm::Type *Ty);
  129. void EmitHLSLFlatConversionToAggregate(CodeGenFunction &CGF, Value *SrcVal,
  130. llvm::Value *DestPtr,
  131. SmallVector<Value *, 4> &idxList,
  132. QualType Type, QualType SrcType,
  133. llvm::Type *Ty);
  134. void EmitHLSLRootSignature(CodeGenFunction &CGF, HLSLRootSignatureAttr *RSA,
  135. llvm::Function *Fn);
  136. void CheckParameterAnnotation(SourceLocation SLoc,
  137. const DxilParameterAnnotation &paramInfo,
  138. bool isPatchConstantFunction);
  139. void CheckParameterAnnotation(SourceLocation SLoc,
  140. DxilParamInputQual paramQual,
  141. llvm::StringRef semFullName,
  142. bool isPatchConstantFunction);
  143. void SetEntryFunction();
  144. SourceLocation SetSemantic(const NamedDecl *decl,
  145. DxilParameterAnnotation &paramInfo);
  146. hlsl::InterpolationMode GetInterpMode(const Decl *decl, CompType compType,
  147. bool bKeepUndefined);
  148. hlsl::CompType GetCompType(const BuiltinType *BT);
  149. // save intrinsic opcode
  150. std::unordered_map<Function *, unsigned> m_IntrinsicMap;
  151. void AddHLSLIntrinsicOpcodeToFunction(Function *, unsigned opcode);
  152. // Type annotation related.
  153. unsigned ConstructStructAnnotation(DxilStructAnnotation *annotation,
  154. const RecordDecl *RD,
  155. DxilTypeSystem &dxilTypeSys);
  156. unsigned AddTypeAnnotation(QualType Ty, DxilTypeSystem &dxilTypeSys,
  157. unsigned &arrayEltSize);
  158. std::unordered_map<Constant*, DxilFieldAnnotation> m_ConstVarAnnotationMap;
  159. public:
  160. CGMSHLSLRuntime(CodeGenModule &CGM);
  161. bool IsHlslObjectType(llvm::Type * Ty) override;
  162. /// Add resouce to the program
  163. void addResource(Decl *D) override;
  164. void FinishCodeGen() override;
  165. Value *EmitHLSLInitListExpr(CodeGenFunction &CGF, InitListExpr *E, Value *DestPtr) override;
  166. QualType UpdateHLSLIncompleteArrayType(VarDecl &D) override;
  167. RValue EmitHLSLBuiltinCallExpr(CodeGenFunction &CGF, const FunctionDecl *FD,
  168. const CallExpr *E,
  169. ReturnValueSlot ReturnValue) override;
  170. void EmitHLSLOutParamConversionInit(
  171. CodeGenFunction &CGF, const FunctionDecl *FD, const CallExpr *E,
  172. llvm::SmallVector<LValue, 8> &castArgList,
  173. llvm::SmallVector<const Stmt *, 8> &argList,
  174. const std::function<void(const VarDecl *, llvm::Value *)> &TmpArgMap)
  175. override;
  176. void EmitHLSLOutParamConversionCopyBack(
  177. CodeGenFunction &CGF, llvm::SmallVector<LValue, 8> &castArgList) override;
  178. Value *EmitHLSLMatrixOperationCall(CodeGenFunction &CGF, const clang::Expr *E,
  179. llvm::Type *RetType,
  180. ArrayRef<Value *> paramList) override;
  181. void EmitHLSLDiscard(CodeGenFunction &CGF) override;
  182. Value *EmitHLSLMatrixSubscript(CodeGenFunction &CGF, llvm::Type *RetType,
  183. Value *Ptr, Value *Idx, QualType Ty) override;
  184. Value *EmitHLSLMatrixElement(CodeGenFunction &CGF, llvm::Type *RetType,
  185. ArrayRef<Value *> paramList,
  186. QualType Ty) override;
  187. Value *EmitHLSLMatrixLoad(CodeGenFunction &CGF, Value *Ptr,
  188. QualType Ty) override;
  189. void EmitHLSLMatrixStore(CodeGenFunction &CGF, Value *Val, Value *DestPtr,
  190. QualType Ty) override;
  191. void EmitHLSLAggregateCopy(CodeGenFunction &CGF, llvm::Value *SrcPtr,
  192. llvm::Value *DestPtr,
  193. clang::QualType Ty) override;
  194. void EmitHLSLAggregateStore(CodeGenFunction &CGF, llvm::Value *Val,
  195. llvm::Value *DestPtr,
  196. clang::QualType Ty) override;
  197. void EmitHLSLFlatConversionToAggregate(CodeGenFunction &CGF, Value *Val,
  198. Value *DestPtr,
  199. QualType Ty,
  200. QualType SrcTy) override;
  201. Value *EmitHLSLLiteralCast(CodeGenFunction &CGF, Value *Src, QualType SrcType,
  202. QualType DstType) override;
  203. void EmitHLSLFlatConversionAggregateCopy(CodeGenFunction &CGF, llvm::Value *SrcPtr,
  204. clang::QualType SrcTy,
  205. llvm::Value *DestPtr,
  206. clang::QualType DestTy) override;
  207. void AddHLSLFunctionInfo(llvm::Function *, const FunctionDecl *FD) override;
  208. void EmitHLSLFunctionProlog(llvm::Function *, const FunctionDecl *FD) override;
  209. void AddControlFlowHint(CodeGenFunction &CGF, const Stmt &S,
  210. llvm::TerminatorInst *TI,
  211. ArrayRef<const Attr *> Attrs) override;
  212. void FinishAutoVar(CodeGenFunction &CGF, const VarDecl &D, llvm::Value *V) override;
  213. /// Get or add constant to the program
  214. HLCBuffer &GetOrCreateCBuffer(HLSLBufferDecl *D);
  215. };
  216. }
  217. //------------------------------------------------------------------------------
  218. //
  219. // CGMSHLSLRuntime methods.
  220. //
  221. CGMSHLSLRuntime::CGMSHLSLRuntime(CodeGenModule &CGM)
  222. : CGHLSLRuntime(CGM), Context(CGM.getLLVMContext()), EntryFunc(nullptr),
  223. TheModule(CGM.getModule()), legacyLayout(HLModule::GetLegacyDataLayoutDesc()),
  224. CBufferType(
  225. llvm::StructType::create(TheModule.getContext(), "ConstantBuffer")) {
  226. const hlsl::ShaderModel *SM =
  227. hlsl::ShaderModel::GetByName(CGM.getCodeGenOpts().HLSLProfile.c_str());
  228. // Only accept valid, 6.0 shader model.
  229. if (!SM->IsValid() || SM->GetMajor() != 6 || SM->GetMinor() != 0) {
  230. DiagnosticsEngine &Diags = CGM.getDiags();
  231. unsigned DiagID =
  232. Diags.getCustomDiagID(DiagnosticsEngine::Error, "invalid profile %0");
  233. Diags.Report(DiagID) << CGM.getCodeGenOpts().HLSLProfile;
  234. }
  235. // TODO: add AllResourceBound.
  236. if (CGM.getCodeGenOpts().HLSLAvoidControlFlow && !CGM.getCodeGenOpts().HLSLAllResourcesBound) {
  237. if (SM->GetMajor() >= 5 && SM->GetMinor() >= 1) {
  238. DiagnosticsEngine &Diags = CGM.getDiags();
  239. unsigned DiagID =
  240. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  241. "Gfa option cannot be used in SM_5_1+ unless "
  242. "all_resources_bound flag is specified");
  243. Diags.Report(DiagID);
  244. }
  245. }
  246. // Create HLModule.
  247. const bool skipInit = true;
  248. m_pHLModule = &TheModule.GetOrCreateHLModule(skipInit);
  249. // Set Option.
  250. HLOptions opts;
  251. opts.bIEEEStrict = CGM.getCodeGenOpts().UnsafeFPMath;
  252. opts.bDefaultRowMajor = CGM.getCodeGenOpts().HLSLDefaultRowMajor;
  253. opts.bDisableOptimizations = CGM.getCodeGenOpts().DisableLLVMOpts;
  254. opts.bLegacyCBufferLoad = !CGM.getCodeGenOpts().HLSLNotUseLegacyCBufLoad;
  255. opts.bAllResourcesBound = CGM.getCodeGenOpts().HLSLAllResourcesBound;
  256. m_pHLModule->SetHLOptions(opts);
  257. m_bDebugInfo = CGM.getCodeGenOpts().getDebugInfo() == CodeGenOptions::FullDebugInfo;
  258. // set profile
  259. m_pHLModule->SetShaderModel(SM);
  260. // set entry name
  261. m_pHLModule->SetEntryFunctionName(CGM.getCodeGenOpts().HLSLEntryFunction);
  262. // add globalCB
  263. unique_ptr<HLCBuffer> CB = std::make_unique<HLCBuffer>();
  264. std::string globalCBName = "$Globals";
  265. CB->SetGlobalSymbol(nullptr);
  266. CB->SetGlobalName(globalCBName);
  267. globalCBIndex = m_pHLModule->GetCBuffers().size();
  268. CB->SetID(globalCBIndex);
  269. CB->SetRangeSize(1);
  270. CB->SetLowerBound(UINT_MAX);
  271. DXVERIFY_NOMSG(globalCBIndex == m_pHLModule->AddCBuffer(std::move(CB)));
  272. }
  273. bool CGMSHLSLRuntime::IsHlslObjectType(llvm::Type *Ty) {
  274. return HLModule::IsHLSLObjectType(Ty);
  275. }
  276. void CGMSHLSLRuntime::AddHLSLIntrinsicOpcodeToFunction(Function *F,
  277. unsigned opcode) {
  278. m_IntrinsicMap[F] = opcode;
  279. }
  280. void CGMSHLSLRuntime::CheckParameterAnnotation(
  281. SourceLocation SLoc, const DxilParameterAnnotation &paramInfo,
  282. bool isPatchConstantFunction) {
  283. if (!paramInfo.HasSemanticString()) {
  284. return;
  285. }
  286. llvm::StringRef semFullName = paramInfo.GetSemanticStringRef();
  287. DxilParamInputQual paramQual = paramInfo.GetParamInputQual();
  288. if (paramQual == DxilParamInputQual::Inout) {
  289. CheckParameterAnnotation(SLoc, DxilParamInputQual::In, semFullName, isPatchConstantFunction);
  290. CheckParameterAnnotation(SLoc, DxilParamInputQual::Out, semFullName, isPatchConstantFunction);
  291. return;
  292. }
  293. CheckParameterAnnotation(SLoc, paramQual, semFullName, isPatchConstantFunction);
  294. }
  295. void CGMSHLSLRuntime::CheckParameterAnnotation(
  296. SourceLocation SLoc, DxilParamInputQual paramQual, llvm::StringRef semFullName,
  297. bool isPatchConstantFunction) {
  298. const ShaderModel *SM = m_pHLModule->GetShaderModel();
  299. DXIL::SigPointKind sigPoint = SigPointFromInputQual(
  300. paramQual, SM->GetKind(), isPatchConstantFunction);
  301. llvm::StringRef semName;
  302. unsigned semIndex;
  303. Semantic::DecomposeNameAndIndex(semFullName, &semName, &semIndex);
  304. const Semantic *pSemantic =
  305. Semantic::GetByName(semName, sigPoint, SM->GetMajor(), SM->GetMinor());
  306. if (pSemantic->IsInvalid()) {
  307. DiagnosticsEngine &Diags = CGM.getDiags();
  308. unsigned DiagID =
  309. Diags.getCustomDiagID(DiagnosticsEngine::Error, "invalid semantic '%0' for %1");
  310. Diags.Report(SLoc, DiagID) << semName << m_pHLModule->GetShaderModel()->GetKindName();
  311. }
  312. }
  313. SourceLocation
  314. CGMSHLSLRuntime::SetSemantic(const NamedDecl *decl,
  315. DxilParameterAnnotation &paramInfo) {
  316. for (const hlsl::UnusualAnnotation *it : decl->getUnusualAnnotations()) {
  317. switch (it->getKind()) {
  318. case hlsl::UnusualAnnotation::UA_SemanticDecl: {
  319. const hlsl::SemanticDecl *sd = cast<hlsl::SemanticDecl>(it);
  320. paramInfo.SetSemanticString(sd->SemanticName);
  321. return it->Loc;
  322. }
  323. }
  324. }
  325. return SourceLocation();
  326. }
  327. static bool HasTessFactorSemantic(const ValueDecl *decl) {
  328. for (const hlsl::UnusualAnnotation *it : decl->getUnusualAnnotations()) {
  329. switch (it->getKind()) {
  330. case hlsl::UnusualAnnotation::UA_SemanticDecl: {
  331. const hlsl::SemanticDecl *sd = cast<hlsl::SemanticDecl>(it);
  332. const Semantic *pSemantic = Semantic::GetByName(sd->SemanticName);
  333. if (pSemantic && pSemantic->GetKind() == Semantic::Kind::TessFactor)
  334. return true;
  335. }
  336. }
  337. }
  338. return false;
  339. }
  340. static bool HasTessFactorSemanticRecurse(const ValueDecl *decl, QualType Ty) {
  341. if (Ty->isBuiltinType() || hlsl::IsHLSLVecMatType(Ty))
  342. return false;
  343. if (const RecordType *RT = Ty->getAsStructureType()) {
  344. RecordDecl *RD = RT->getDecl();
  345. for (FieldDecl *fieldDecl : RD->fields()) {
  346. if (HasTessFactorSemanticRecurse(fieldDecl, fieldDecl->getType()))
  347. return true;
  348. }
  349. return false;
  350. }
  351. if (const clang::ArrayType *arrayTy = Ty->getAsArrayTypeUnsafe())
  352. return HasTessFactorSemantic(decl);
  353. return false;
  354. }
  355. // TODO: get from type annotation.
  356. static bool IsPatchConstantFunctionDecl(const FunctionDecl *FD) {
  357. if (!FD->getReturnType()->isVoidType()) {
  358. // Try to find TessFactor in return type.
  359. if (HasTessFactorSemanticRecurse(FD, FD->getReturnType()))
  360. return true;
  361. }
  362. // Try to find TessFactor in out param.
  363. for (ParmVarDecl *param : FD->params()) {
  364. if (param->hasAttr<HLSLOutAttr>()) {
  365. if (HasTessFactorSemanticRecurse(param, param->getType()))
  366. return true;
  367. }
  368. }
  369. return false;
  370. }
  371. static DXIL::TessellatorDomain StringToDomain(StringRef domain) {
  372. if (domain == "isoline")
  373. return DXIL::TessellatorDomain::IsoLine;
  374. if (domain == "tri")
  375. return DXIL::TessellatorDomain::Tri;
  376. if (domain == "quad")
  377. return DXIL::TessellatorDomain::Quad;
  378. return DXIL::TessellatorDomain::Undefined;
  379. }
  380. static DXIL::TessellatorPartitioning StringToPartitioning(StringRef partition) {
  381. if (partition == "integer")
  382. return DXIL::TessellatorPartitioning::Integer;
  383. if (partition == "pow2")
  384. return DXIL::TessellatorPartitioning::Pow2;
  385. if (partition == "fractional_even")
  386. return DXIL::TessellatorPartitioning::FractionalEven;
  387. if (partition == "fractional_odd")
  388. return DXIL::TessellatorPartitioning::FractionalOdd;
  389. return DXIL::TessellatorPartitioning::Undefined;
  390. }
  391. static DXIL::TessellatorOutputPrimitive
  392. StringToTessOutputPrimitive(StringRef primitive) {
  393. if (primitive == "point")
  394. return DXIL::TessellatorOutputPrimitive::Point;
  395. if (primitive == "line")
  396. return DXIL::TessellatorOutputPrimitive::Line;
  397. if (primitive == "triangle_cw")
  398. return DXIL::TessellatorOutputPrimitive::TriangleCW;
  399. if (primitive == "triangle_ccw")
  400. return DXIL::TessellatorOutputPrimitive::TriangleCCW;
  401. return DXIL::TessellatorOutputPrimitive::Undefined;
  402. }
  403. static unsigned AlignTo8Bytes(unsigned offset, bool b8BytesAlign) {
  404. DXASSERT((offset & 0x3) == 0, "offset should be divisible by 4");
  405. if (!b8BytesAlign)
  406. return offset;
  407. else if ((offset & 0x7) == 0)
  408. return offset;
  409. else
  410. return offset + 4;
  411. }
  412. static unsigned AlignBaseOffset(unsigned baseOffset, unsigned size,
  413. QualType Ty, bool bDefaultRowMajor) {
  414. bool b8BytesAlign = false;
  415. if (Ty->isBuiltinType()) {
  416. const clang::BuiltinType *BT = Ty->getAs<clang::BuiltinType>();
  417. if (BT->getKind() == clang::BuiltinType::Kind::Double ||
  418. BT->getKind() == clang::BuiltinType::Kind::LongLong)
  419. b8BytesAlign = true;
  420. }
  421. if (unsigned remainder = (baseOffset & 0xf)) {
  422. // Align to 4 x 4 bytes.
  423. unsigned aligned = baseOffset - remainder + 16;
  424. // If cannot fit in the remainder, need align.
  425. bool bNeedAlign = (remainder + size) > 16;
  426. // Array always start aligned.
  427. bNeedAlign |= Ty->isArrayType();
  428. if (IsHLSLMatType(Ty)) {
  429. bool bColMajor = !bDefaultRowMajor;
  430. if (const AttributedType *AT = dyn_cast<AttributedType>(Ty)) {
  431. switch (AT->getAttrKind()) {
  432. case AttributedType::Kind::attr_hlsl_column_major:
  433. bColMajor = true;
  434. break;
  435. case AttributedType::Kind::attr_hlsl_row_major:
  436. bColMajor = false;
  437. break;
  438. default:
  439. // Do nothing
  440. break;
  441. }
  442. }
  443. unsigned row, col;
  444. hlsl::GetHLSLMatRowColCount(Ty, row, col);
  445. bNeedAlign |= bColMajor && col > 1;
  446. bNeedAlign |= !bColMajor && row > 1;
  447. }
  448. if (bNeedAlign)
  449. return AlignTo8Bytes(aligned, b8BytesAlign);
  450. else
  451. return AlignTo8Bytes(baseOffset, b8BytesAlign);
  452. } else
  453. return baseOffset;
  454. }
  455. static unsigned AlignBaseOffset(QualType Ty, unsigned baseOffset,
  456. bool bDefaultRowMajor,
  457. CodeGen::CodeGenModule &CGM,
  458. llvm::DataLayout &layout) {
  459. QualType paramTy = Ty.getCanonicalType();
  460. if (const ReferenceType *RefType = dyn_cast<ReferenceType>(paramTy))
  461. paramTy = RefType->getPointeeType();
  462. // Get size.
  463. llvm::Type *Type = CGM.getTypes().ConvertType(paramTy);
  464. unsigned size = layout.getTypeAllocSize(Type);
  465. return AlignBaseOffset(baseOffset, size, paramTy, bDefaultRowMajor);
  466. }
  467. static unsigned GetMatrixSizeInCB(QualType Ty, bool defaultRowMajor,
  468. bool b64Bit) {
  469. bool bColMajor = !defaultRowMajor;
  470. if (const AttributedType *AT = dyn_cast<AttributedType>(Ty)) {
  471. switch (AT->getAttrKind()) {
  472. case AttributedType::Kind::attr_hlsl_column_major:
  473. bColMajor = true;
  474. break;
  475. case AttributedType::Kind::attr_hlsl_row_major:
  476. bColMajor = false;
  477. break;
  478. default:
  479. // Do nothing
  480. break;
  481. }
  482. }
  483. unsigned row, col;
  484. hlsl::GetHLSLMatRowColCount(Ty, row, col);
  485. unsigned EltSize = b64Bit ? 8 : 4;
  486. // Align to 4 * 4bytes.
  487. unsigned alignment = 4 * 4;
  488. if (bColMajor) {
  489. unsigned rowSize = EltSize * row;
  490. // 3x64bit or 4x64bit align to 32 bytes.
  491. if (rowSize > alignment)
  492. alignment <<= 1;
  493. return alignment * (col - 1) + row * EltSize;
  494. } else {
  495. unsigned rowSize = EltSize * col;
  496. // 3x64bit or 4x64bit align to 32 bytes.
  497. if (rowSize > alignment)
  498. alignment <<= 1;
  499. return alignment * (row - 1) + col * EltSize;
  500. }
  501. }
  502. static CompType::Kind BuiltinTyToCompTy(const BuiltinType *BTy, bool bSNorm,
  503. bool bUNorm) {
  504. CompType::Kind kind = CompType::Kind::Invalid;
  505. switch (BTy->getKind()) {
  506. case BuiltinType::UInt:
  507. kind = CompType::Kind::U32;
  508. break;
  509. case BuiltinType::UShort:
  510. kind = CompType::Kind::U16;
  511. break;
  512. case BuiltinType::ULongLong:
  513. kind = CompType::Kind::U64;
  514. break;
  515. case BuiltinType::Int:
  516. kind = CompType::Kind::I32;
  517. break;
  518. case BuiltinType::Min12Int:
  519. case BuiltinType::Short:
  520. kind = CompType::Kind::I16;
  521. break;
  522. case BuiltinType::LongLong:
  523. kind = CompType::Kind::I64;
  524. break;
  525. case BuiltinType::Min10Float:
  526. case BuiltinType::Half:
  527. if (bSNorm)
  528. kind = CompType::Kind::SNormF16;
  529. else if (bUNorm)
  530. kind = CompType::Kind::UNormF16;
  531. else
  532. kind = CompType::Kind::F16;
  533. break;
  534. case BuiltinType::Float:
  535. if (bSNorm)
  536. kind = CompType::Kind::SNormF32;
  537. else if (bUNorm)
  538. kind = CompType::Kind::UNormF32;
  539. else
  540. kind = CompType::Kind::F32;
  541. break;
  542. case BuiltinType::Double:
  543. if (bSNorm)
  544. kind = CompType::Kind::SNormF64;
  545. else if (bUNorm)
  546. kind = CompType::Kind::UNormF64;
  547. else
  548. kind = CompType::Kind::F64;
  549. break;
  550. case BuiltinType::Bool:
  551. kind = CompType::Kind::I1;
  552. break;
  553. }
  554. return kind;
  555. }
  556. static void ConstructFieldAttributedAnnotation(DxilFieldAnnotation &fieldAnnotation, QualType fieldTy, bool bDefaultRowMajor) {
  557. QualType Ty = fieldTy;
  558. if (Ty->isReferenceType())
  559. Ty = Ty.getNonReferenceType();
  560. // Get element type.
  561. if (Ty->isArrayType()) {
  562. while (isa<clang::ArrayType>(Ty)) {
  563. const clang::ArrayType *ATy = dyn_cast<clang::ArrayType>(Ty);
  564. Ty = ATy->getElementType();
  565. }
  566. }
  567. QualType EltTy = Ty;
  568. if (hlsl::IsHLSLMatType(Ty)) {
  569. DxilMatrixAnnotation Matrix;
  570. Matrix.Orientation = bDefaultRowMajor ? MatrixOrientation::RowMajor
  571. : MatrixOrientation::ColumnMajor;
  572. if (const AttributedType *AT = dyn_cast<AttributedType>(Ty)) {
  573. switch (AT->getAttrKind()) {
  574. case AttributedType::Kind::attr_hlsl_column_major:
  575. Matrix.Orientation = MatrixOrientation::ColumnMajor;
  576. break;
  577. case AttributedType::Kind::attr_hlsl_row_major:
  578. Matrix.Orientation = MatrixOrientation::RowMajor;
  579. break;
  580. default:
  581. // Do nothing
  582. break;
  583. }
  584. }
  585. unsigned row, col;
  586. hlsl::GetHLSLMatRowColCount(Ty, row, col);
  587. Matrix.Cols = col;
  588. Matrix.Rows = row;
  589. fieldAnnotation.SetMatrixAnnotation(Matrix);
  590. EltTy = hlsl::GetHLSLMatElementType(Ty);
  591. }
  592. if (hlsl::IsHLSLVecType(Ty))
  593. EltTy = hlsl::GetHLSLVecElementType(Ty);
  594. bool bSNorm = false;
  595. bool bUNorm = false;
  596. if (const AttributedType *AT = dyn_cast<AttributedType>(Ty)) {
  597. switch (AT->getAttrKind()) {
  598. case AttributedType::Kind::attr_hlsl_snorm:
  599. bSNorm = true;
  600. break;
  601. case AttributedType::Kind::attr_hlsl_unorm:
  602. bUNorm = true;
  603. break;
  604. default:
  605. // Do nothing
  606. break;
  607. }
  608. }
  609. if (EltTy->isBuiltinType()) {
  610. const BuiltinType *BTy = EltTy->getAs<BuiltinType>();
  611. CompType::Kind kind = BuiltinTyToCompTy(BTy, bSNorm, bUNorm);
  612. fieldAnnotation.SetCompType(kind);
  613. }
  614. else
  615. DXASSERT(!bSNorm && !bUNorm, "snorm/unorm on invalid type, validate at handleHLSLTypeAttr");
  616. }
  617. static void ConstructFieldInterpolation(DxilFieldAnnotation &fieldAnnotation,
  618. FieldDecl *fieldDecl) {
  619. // Keep undefined for interpMode here.
  620. InterpolationMode InterpMode = {fieldDecl->hasAttr<HLSLNoInterpolationAttr>(),
  621. fieldDecl->hasAttr<HLSLLinearAttr>(),
  622. fieldDecl->hasAttr<HLSLNoPerspectiveAttr>(),
  623. fieldDecl->hasAttr<HLSLCentroidAttr>(),
  624. fieldDecl->hasAttr<HLSLSampleAttr>()};
  625. if (InterpMode.GetKind() != InterpolationMode::Kind::Undefined)
  626. fieldAnnotation.SetInterpolationMode(InterpMode);
  627. }
  628. unsigned CGMSHLSLRuntime::ConstructStructAnnotation(DxilStructAnnotation *annotation,
  629. const RecordDecl *RD,
  630. DxilTypeSystem &dxilTypeSys) {
  631. unsigned fieldIdx = 0;
  632. unsigned offset = 0;
  633. bool bDefaultRowMajor = m_pHLModule->GetHLOptions().bDefaultRowMajor;
  634. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  635. if (CXXRD->getNumBases()) {
  636. // Add base as field.
  637. for (const auto &I : CXXRD->bases()) {
  638. const CXXRecordDecl *BaseDecl =
  639. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  640. std::string fieldSemName = "";
  641. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  642. // Align offset.
  643. offset = AlignBaseOffset(parentTy, offset, bDefaultRowMajor, CGM,
  644. legacyLayout);
  645. unsigned CBufferOffset = offset;
  646. unsigned arrayEltSize = 0;
  647. // Process field to make sure the size of field is ready.
  648. unsigned size =
  649. AddTypeAnnotation(parentTy, dxilTypeSys, arrayEltSize);
  650. // Update offset.
  651. offset += size;
  652. if (size > 0) {
  653. DxilFieldAnnotation &fieldAnnotation =
  654. annotation->GetFieldAnnotation(fieldIdx++);
  655. fieldAnnotation.SetCBufferOffset(CBufferOffset);
  656. fieldAnnotation.SetFieldName(BaseDecl->getNameAsString());
  657. }
  658. }
  659. }
  660. }
  661. for (auto fieldDecl : RD->fields()) {
  662. std::string fieldSemName = "";
  663. QualType fieldTy = fieldDecl->getType();
  664. // Align offset.
  665. offset = AlignBaseOffset(fieldTy, offset, bDefaultRowMajor, CGM, legacyLayout);
  666. unsigned CBufferOffset = offset;
  667. bool userOffset = false;
  668. // Try to get info from fieldDecl.
  669. for (const hlsl::UnusualAnnotation *it :
  670. fieldDecl->getUnusualAnnotations()) {
  671. switch (it->getKind()) {
  672. case hlsl::UnusualAnnotation::UA_SemanticDecl: {
  673. const hlsl::SemanticDecl *sd = cast<hlsl::SemanticDecl>(it);
  674. fieldSemName = sd->SemanticName;
  675. } break;
  676. case hlsl::UnusualAnnotation::UA_ConstantPacking: {
  677. const hlsl::ConstantPacking *cp = cast<hlsl::ConstantPacking>(it);
  678. CBufferOffset = cp->Subcomponent << 2;
  679. CBufferOffset += cp->ComponentOffset;
  680. // Change to byte.
  681. CBufferOffset <<= 2;
  682. userOffset = true;
  683. } break;
  684. case hlsl::UnusualAnnotation::UA_RegisterAssignment: {
  685. // register assignment only works on global constant.
  686. DiagnosticsEngine &Diags = CGM.getDiags();
  687. unsigned DiagID = Diags.getCustomDiagID(
  688. DiagnosticsEngine::Error,
  689. "location semantics cannot be specified on members.");
  690. Diags.Report(it->Loc, DiagID);
  691. return 0;
  692. } break;
  693. default:
  694. llvm_unreachable("only semantic for input/output");
  695. break;
  696. }
  697. }
  698. unsigned arrayEltSize = 0;
  699. // Process field to make sure the size of field is ready.
  700. unsigned size = AddTypeAnnotation(fieldDecl->getType(), dxilTypeSys, arrayEltSize);
  701. // Update offset.
  702. offset += size;
  703. DxilFieldAnnotation &fieldAnnotation = annotation->GetFieldAnnotation(fieldIdx++);
  704. ConstructFieldAttributedAnnotation(fieldAnnotation, fieldTy, bDefaultRowMajor);
  705. ConstructFieldInterpolation(fieldAnnotation, fieldDecl);
  706. if (fieldDecl->hasAttr<HLSLPreciseAttr>())
  707. fieldAnnotation.SetPrecise();
  708. fieldAnnotation.SetCBufferOffset(CBufferOffset);
  709. fieldAnnotation.SetFieldName(fieldDecl->getName());
  710. if (!fieldSemName.empty())
  711. fieldAnnotation.SetSemanticString(fieldSemName);
  712. }
  713. annotation->SetCBufferSize(offset);
  714. if (offset == 0) {
  715. annotation->MarkEmptyStruct();
  716. }
  717. return offset;
  718. }
  719. static bool IsElementInputOutputType(QualType Ty) {
  720. return Ty->isBuiltinType() || hlsl::IsHLSLVecMatType(Ty);
  721. }
  722. // Return the size for constant buffer of each decl.
  723. unsigned CGMSHLSLRuntime::AddTypeAnnotation(QualType Ty,
  724. DxilTypeSystem &dxilTypeSys,
  725. unsigned &arrayEltSize) {
  726. QualType paramTy = Ty.getCanonicalType();
  727. if (const ReferenceType *RefType = dyn_cast<ReferenceType>(paramTy))
  728. paramTy = RefType->getPointeeType();
  729. // Get size.
  730. llvm::Type *Type = CGM.getTypes().ConvertType(paramTy);
  731. unsigned size = legacyLayout.getTypeAllocSize(Type);
  732. if (IsHLSLMatType(Ty)) {
  733. unsigned col, row;
  734. llvm::Type *EltTy = HLMatrixLower::GetMatrixInfo(Type, col, row);
  735. bool b64Bit = legacyLayout.getTypeAllocSize(EltTy) == 8;
  736. size = GetMatrixSizeInCB(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor,
  737. b64Bit);
  738. }
  739. // Skip element types.
  740. if (IsElementInputOutputType(paramTy))
  741. return size;
  742. else if (IsHLSLStreamOutputType(Ty)) {
  743. return AddTypeAnnotation(GetHLSLOutputPatchElementType(Ty), dxilTypeSys,
  744. arrayEltSize);
  745. } else if (IsHLSLInputPatchType(Ty))
  746. return AddTypeAnnotation(GetHLSLInputPatchElementType(Ty), dxilTypeSys,
  747. arrayEltSize);
  748. else if (IsHLSLOutputPatchType(Ty))
  749. return AddTypeAnnotation(GetHLSLOutputPatchElementType(Ty), dxilTypeSys,
  750. arrayEltSize);
  751. else if (const RecordType *RT = paramTy->getAsStructureType()) {
  752. RecordDecl *RD = RT->getDecl();
  753. llvm::StructType *ST = CGM.getTypes().ConvertRecordDeclType(RD);
  754. // Skip if already created.
  755. if (DxilStructAnnotation *annotation = dxilTypeSys.GetStructAnnotation(ST)) {
  756. unsigned structSize = annotation->GetCBufferSize();
  757. return structSize;
  758. }
  759. DxilStructAnnotation *annotation = dxilTypeSys.AddStructAnnotation(ST);
  760. return ConstructStructAnnotation(annotation, RD, dxilTypeSys);
  761. } else if (const RecordType *RT = dyn_cast<RecordType>(paramTy)) {
  762. // For this pointer.
  763. RecordDecl *RD = RT->getDecl();
  764. llvm::StructType *ST = CGM.getTypes().ConvertRecordDeclType(RD);
  765. // Skip if already created.
  766. if (DxilStructAnnotation *annotation = dxilTypeSys.GetStructAnnotation(ST)) {
  767. unsigned structSize = annotation->GetCBufferSize();
  768. return structSize;
  769. }
  770. DxilStructAnnotation *annotation = dxilTypeSys.AddStructAnnotation(ST);
  771. return ConstructStructAnnotation(annotation, RD, dxilTypeSys);
  772. } else if (IsHLSLResouceType(Ty))
  773. return AddTypeAnnotation(GetHLSLResourceResultType(Ty), dxilTypeSys, arrayEltSize);
  774. else {
  775. unsigned arraySize = 0;
  776. QualType arrayElementTy = Ty;
  777. if (Ty->isConstantArrayType()) {
  778. const ConstantArrayType *arrayTy =
  779. CGM.getContext().getAsConstantArrayType(Ty);
  780. DXASSERT(arrayTy != nullptr, "Must array type here");
  781. arraySize = arrayTy->getSize().getLimitedValue();
  782. arrayElementTy = arrayTy->getElementType();
  783. }
  784. else if (Ty->isIncompleteArrayType()) {
  785. const IncompleteArrayType *arrayTy = CGM.getContext().getAsIncompleteArrayType(Ty);
  786. arrayElementTy = arrayTy->getElementType();
  787. } else
  788. DXASSERT(0, "Must array type here");
  789. unsigned elementSize = AddTypeAnnotation(arrayElementTy, dxilTypeSys, arrayEltSize);
  790. // Only set arrayEltSize once.
  791. if (arrayEltSize == 0)
  792. arrayEltSize = elementSize;
  793. // Align to 4 * 4bytes.
  794. unsigned alignedSize = (elementSize + 15) & 0xfffffff0;
  795. return alignedSize * (arraySize - 1) + elementSize;
  796. }
  797. }
  798. static DxilResource::Kind KeywordToKind(StringRef keyword) {
  799. // TODO: refactor for faster search (switch by 1/2/3 first letters, then
  800. // compare)
  801. if (keyword == "Texture1D" || keyword == "RWTexture1D" || keyword == "RasterizerOrderedTexture1D")
  802. return DxilResource::Kind::Texture1D;
  803. if (keyword == "Texture2D" || keyword == "RWTexture2D" || keyword == "RasterizerOrderedTexture2D")
  804. return DxilResource::Kind::Texture2D;
  805. if (keyword == "Texture2DMS" || keyword == "RWTexture2DMS")
  806. return DxilResource::Kind::Texture2DMS;
  807. if (keyword == "Texture3D" || keyword == "RWTexture3D" || keyword == "RasterizerOrderedTexture3D")
  808. return DxilResource::Kind::Texture3D;
  809. if (keyword == "TextureCube" || keyword == "RWTextureCube")
  810. return DxilResource::Kind::TextureCube;
  811. if (keyword == "Texture1DArray" || keyword == "RWTexture1DArray" || keyword == "RasterizerOrderedTexture1DArray")
  812. return DxilResource::Kind::Texture1DArray;
  813. if (keyword == "Texture2DArray" || keyword == "RWTexture2DArray" || keyword == "RasterizerOrderedTexture2DArray")
  814. return DxilResource::Kind::Texture2DArray;
  815. if (keyword == "Texture2DMSArray" || keyword == "RWTexture2DMSArray")
  816. return DxilResource::Kind::Texture2DMSArray;
  817. if (keyword == "TextureCubeArray" || keyword == "RWTextureCubeArray")
  818. return DxilResource::Kind::TextureCubeArray;
  819. if (keyword == "ByteAddressBuffer" || keyword == "RWByteAddressBuffer" || keyword == "RasterizerOrderedByteAddressBuffer")
  820. return DxilResource::Kind::RawBuffer;
  821. if (keyword == "StructuredBuffer" || keyword == "RWStructuredBuffer" || keyword == "RasterizerOrderedStructuredBuffer")
  822. return DxilResource::Kind::StructuredBuffer;
  823. if (keyword == "AppendStructuredBuffer" || keyword == "ConsumeStructuredBuffer")
  824. return DxilResource::Kind::StructuredBuffer;
  825. // TODO: this is not efficient.
  826. bool isBuffer = keyword == "Buffer";
  827. isBuffer |= keyword == "RWBuffer";
  828. isBuffer |= keyword == "RasterizerOrderedBuffer";
  829. if (isBuffer)
  830. return DxilResource::Kind::TypedBuffer;
  831. return DxilResource::Kind::Invalid;
  832. }
  833. void CGMSHLSLRuntime::AddHLSLFunctionInfo(Function *F, const FunctionDecl *FD) {
  834. // Add hlsl intrinsic attr
  835. unsigned intrinsicOpcode;
  836. StringRef intrinsicGroup;
  837. if (hlsl::GetIntrinsicOp(FD, intrinsicOpcode, intrinsicGroup)) {
  838. AddHLSLIntrinsicOpcodeToFunction(F, intrinsicOpcode);
  839. F->addFnAttr(hlsl::HLPrefix, intrinsicGroup);
  840. // Save resource type annotation.
  841. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD)) {
  842. const CXXRecordDecl *RD = MD->getParent();
  843. // For nested case like sample_slice_type.
  844. if (const CXXRecordDecl *PRD = dyn_cast<CXXRecordDecl>(RD->getDeclContext())) {
  845. RD = PRD;
  846. }
  847. QualType recordTy = MD->getASTContext().getRecordType(RD);
  848. hlsl::DxilResourceBase::Class resClass = TypeToClass(recordTy);
  849. llvm::Type *Ty = F->getFunctionType()->params()[0]->getPointerElementType();
  850. // Add resource type annotation.
  851. switch (resClass) {
  852. case DXIL::ResourceClass::Sampler:
  853. m_pHLModule->AddResourceTypeAnnotation(Ty, DXIL::ResourceClass::Sampler,
  854. DXIL::ResourceKind::Sampler);
  855. break;
  856. case DXIL::ResourceClass::UAV:
  857. case DXIL::ResourceClass::SRV: {
  858. hlsl::DxilResource::Kind kind = KeywordToKind(RD->getName());
  859. m_pHLModule->AddResourceTypeAnnotation(Ty, resClass, kind);
  860. } break;
  861. }
  862. }
  863. StringRef lower;
  864. if (hlsl::GetIntrinsicLowering(FD, lower))
  865. hlsl::SetHLLowerStrategy(F, lower);
  866. // Don't need to add FunctionQual for intrinsic function.
  867. return;
  868. }
  869. // Set entry function
  870. const std::string &entryName = m_pHLModule->GetEntryFunctionName();
  871. bool isEntry = FD->getNameAsString() == entryName;
  872. if (isEntry)
  873. EntryFunc = F;
  874. std::unique_ptr<HLFunctionProps> funcProps = std::make_unique<HLFunctionProps>();
  875. // Save patch constant function to patchConstantFunctionMap.
  876. bool isPatchConstantFunction = false;
  877. if (IsPatchConstantFunctionDecl(FD)) {
  878. isPatchConstantFunction = true;
  879. if (patchConstantFunctionMap.count(FD->getName()) == 0)
  880. patchConstantFunctionMap[FD->getName()] = F;
  881. else {
  882. // TODO: This is not the same as how fxc handles patch constant functions.
  883. // This will fail if more than one function with the same name has a SV_TessFactor semantic.
  884. // Fxc just selects the last function defined that has the matching name when referenced
  885. // by the patchconstantfunc attribute from the hull shader currently being compiled.
  886. // Report error
  887. DiagnosticsEngine &Diags = CGM.getDiags();
  888. unsigned DiagID =
  889. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  890. "Multiple definitions for patchconstantfunc.");
  891. Diags.Report(FD->getLocation(), DiagID);
  892. return;
  893. }
  894. for (Argument &arg : F->getArgumentList()) {
  895. const ParmVarDecl *parmDecl = FD->getParamDecl(arg.getArgNo());
  896. QualType Ty = parmDecl->getType();
  897. if (IsHLSLOutputPatchType(Ty)) {
  898. funcProps->ShaderProps.HS.outputControlPoints =
  899. GetHLSLOutputPatchCount(parmDecl->getType());
  900. } else if (IsHLSLInputPatchType(Ty)) {
  901. funcProps->ShaderProps.HS.inputControlPoints =
  902. GetHLSLInputPatchCount(parmDecl->getType());
  903. }
  904. }
  905. }
  906. const ShaderModel *SM = m_pHLModule->GetShaderModel();
  907. // TODO: how to know VS/PS?
  908. funcProps->shaderKind = DXIL::ShaderKind::Invalid;
  909. DiagnosticsEngine &Diags = CGM.getDiags();
  910. // Geometry shader.
  911. bool isGS = false;
  912. if (const HLSLMaxVertexCountAttr *Attr =
  913. FD->getAttr<HLSLMaxVertexCountAttr>()) {
  914. isGS = true;
  915. funcProps->shaderKind = DXIL::ShaderKind::Geometry;
  916. funcProps->ShaderProps.GS.maxVertexCount = Attr->getCount();
  917. funcProps->ShaderProps.GS.inputPrimitive = DXIL::InputPrimitive::Undefined;
  918. if (isEntry && !SM->IsGS()) {
  919. unsigned DiagID = Diags.getCustomDiagID(DiagnosticsEngine::Error,
  920. "attribute maxvertexcount only valid for GS.");
  921. Diags.Report(Attr->getLocation(), DiagID);
  922. return;
  923. }
  924. }
  925. if (const HLSLInstanceAttr *Attr = FD->getAttr<HLSLInstanceAttr>()) {
  926. unsigned instanceCount = Attr->getCount();
  927. funcProps->ShaderProps.GS.instanceCount = instanceCount;
  928. if (isEntry && !SM->IsGS()) {
  929. unsigned DiagID = Diags.getCustomDiagID(DiagnosticsEngine::Error,
  930. "attribute maxvertexcount only valid for GS.");
  931. Diags.Report(Attr->getLocation(), DiagID);
  932. return;
  933. }
  934. }
  935. else {
  936. // Set default instance count.
  937. if (isGS)
  938. funcProps->ShaderProps.GS.instanceCount = 1;
  939. }
  940. // Computer shader.
  941. bool isCS = false;
  942. if (const HLSLNumThreadsAttr *Attr = FD->getAttr<HLSLNumThreadsAttr>()) {
  943. isCS = true;
  944. funcProps->shaderKind = DXIL::ShaderKind::Compute;
  945. funcProps->ShaderProps.CS.numThreads[0] = Attr->getX();
  946. funcProps->ShaderProps.CS.numThreads[1] = Attr->getY();
  947. funcProps->ShaderProps.CS.numThreads[2] = Attr->getZ();
  948. if (isEntry && !SM->IsCS()) {
  949. unsigned DiagID = Diags.getCustomDiagID(
  950. DiagnosticsEngine::Error, "attribute numthreads only valid for CS.");
  951. Diags.Report(Attr->getLocation(), DiagID);
  952. return;
  953. }
  954. }
  955. // Hull shader.
  956. bool isHS = false;
  957. if (const HLSLPatchConstantFuncAttr *Attr =
  958. FD->getAttr<HLSLPatchConstantFuncAttr>()) {
  959. if (isEntry && !SM->IsHS()) {
  960. unsigned DiagID = Diags.getCustomDiagID(
  961. DiagnosticsEngine::Error,
  962. "attribute patchconstantfunc only valid for HS.");
  963. Diags.Report(Attr->getLocation(), DiagID);
  964. return;
  965. }
  966. isHS = true;
  967. funcProps->shaderKind = DXIL::ShaderKind::Hull;
  968. StringRef funcName = Attr->getFunctionName();
  969. if (patchConstantFunctionMap.count(funcName) == 1) {
  970. Function *patchConstFunc = patchConstantFunctionMap[funcName];
  971. funcProps->ShaderProps.HS.patchConstantFunc = patchConstFunc;
  972. DXASSERT_NOMSG(m_pHLModule->HasHLFunctionProps(patchConstFunc));
  973. // Check no inout parameter for patch constant function.
  974. DxilFunctionAnnotation *patchConstFuncAnnotation =
  975. m_pHLModule->GetFunctionAnnotation(patchConstFunc);
  976. for (unsigned i = 0; i < patchConstFuncAnnotation->GetNumParameters();
  977. i++) {
  978. if (patchConstFuncAnnotation->GetParameterAnnotation(i)
  979. .GetParamInputQual() == DxilParamInputQual::Inout) {
  980. unsigned DiagID = Diags.getCustomDiagID(
  981. DiagnosticsEngine::Error,
  982. "Patch Constant function should not have inout param.");
  983. Diags.Report(Attr->getLocation(), DiagID);
  984. return;
  985. }
  986. }
  987. } else {
  988. // TODO: Bring this in line with fxc behavior. In fxc, patchconstantfunc
  989. // selection is based only on name (last function with matching name),
  990. // not by whether it has SV_TessFactor output.
  991. //// Report error
  992. // DiagnosticsEngine &Diags = CGM.getDiags();
  993. // unsigned DiagID = Diags.getCustomDiagID(DiagnosticsEngine::Error,
  994. // "Cannot find
  995. // patchconstantfunc.");
  996. // Diags.Report(Attr->getLocation(), DiagID);
  997. }
  998. }
  999. if (const HLSLOutputControlPointsAttr *Attr =
  1000. FD->getAttr<HLSLOutputControlPointsAttr>()) {
  1001. if (isHS) {
  1002. funcProps->ShaderProps.HS.outputControlPoints = Attr->getCount();
  1003. } else if (isEntry && !SM->IsHS()) {
  1004. unsigned DiagID = Diags.getCustomDiagID(
  1005. DiagnosticsEngine::Error,
  1006. "attribute outputcontrolpoints only valid for HS.");
  1007. Diags.Report(Attr->getLocation(), DiagID);
  1008. return;
  1009. }
  1010. }
  1011. if (const HLSLPartitioningAttr *Attr = FD->getAttr<HLSLPartitioningAttr>()) {
  1012. if (isHS) {
  1013. DXIL::TessellatorPartitioning partition =
  1014. StringToPartitioning(Attr->getScheme());
  1015. funcProps->ShaderProps.HS.partition = partition;
  1016. } else if (isEntry && !SM->IsHS()) {
  1017. unsigned DiagID =
  1018. Diags.getCustomDiagID(DiagnosticsEngine::Warning,
  1019. "attribute partitioning only valid for HS.");
  1020. Diags.Report(Attr->getLocation(), DiagID);
  1021. }
  1022. }
  1023. if (const HLSLOutputTopologyAttr *Attr =
  1024. FD->getAttr<HLSLOutputTopologyAttr>()) {
  1025. if (isHS) {
  1026. DXIL::TessellatorOutputPrimitive primitive =
  1027. StringToTessOutputPrimitive(Attr->getTopology());
  1028. funcProps->ShaderProps.HS.outputPrimitive = primitive;
  1029. } else if (isEntry && !SM->IsHS()) {
  1030. unsigned DiagID =
  1031. Diags.getCustomDiagID(DiagnosticsEngine::Warning,
  1032. "attribute outputtopology only valid for HS.");
  1033. Diags.Report(Attr->getLocation(), DiagID);
  1034. }
  1035. }
  1036. if (isHS) {
  1037. funcProps->ShaderProps.HS.maxTessFactor = DXIL::kHSMaxTessFactorUpperBound;
  1038. }
  1039. if (const HLSLMaxTessFactorAttr *Attr =
  1040. FD->getAttr<HLSLMaxTessFactorAttr>()) {
  1041. if (isHS) {
  1042. // TODO: change getFactor to return float.
  1043. llvm::APInt intV(32, Attr->getFactor());
  1044. funcProps->ShaderProps.HS.maxTessFactor = intV.bitsToFloat();
  1045. } else if (isEntry && !SM->IsHS()) {
  1046. unsigned DiagID =
  1047. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  1048. "attribute maxtessfactor only valid for HS.");
  1049. Diags.Report(Attr->getLocation(), DiagID);
  1050. return;
  1051. }
  1052. }
  1053. // Hull or domain shader.
  1054. bool isDS = false;
  1055. if (const HLSLDomainAttr *Attr = FD->getAttr<HLSLDomainAttr>()) {
  1056. if (isEntry && !SM->IsHS() && !SM->IsDS()) {
  1057. unsigned DiagID =
  1058. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  1059. "attribute domain only valid for HS or DS.");
  1060. Diags.Report(Attr->getLocation(), DiagID);
  1061. return;
  1062. }
  1063. isDS = !isHS;
  1064. if (isDS)
  1065. funcProps->shaderKind = DXIL::ShaderKind::Domain;
  1066. DXIL::TessellatorDomain domain = StringToDomain(Attr->getDomainType());
  1067. if (isHS)
  1068. funcProps->ShaderProps.HS.domain = domain;
  1069. else
  1070. funcProps->ShaderProps.DS.domain = domain;
  1071. }
  1072. // Vertex shader.
  1073. bool isVS = false;
  1074. if (const HLSLClipPlanesAttr *Attr = FD->getAttr<HLSLClipPlanesAttr>()) {
  1075. if (isEntry && !SM->IsVS()) {
  1076. unsigned DiagID =
  1077. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  1078. "attribute clipplane only valid for VS.");
  1079. Diags.Report(Attr->getLocation(), DiagID);
  1080. return;
  1081. }
  1082. isVS = true;
  1083. // The real job is done at EmitHLSLFunctionProlog where debug info is available.
  1084. // Only set shader kind here.
  1085. funcProps->shaderKind = DXIL::ShaderKind::Vertex;
  1086. }
  1087. // Pixel shader.
  1088. bool isPS = false;
  1089. if (const HLSLEarlyDepthStencilAttr *Attr = FD->getAttr<HLSLEarlyDepthStencilAttr>()) {
  1090. if (isEntry && !SM->IsPS()) {
  1091. unsigned DiagID =
  1092. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  1093. "attribute earlydepthstencil only valid for PS.");
  1094. Diags.Report(Attr->getLocation(), DiagID);
  1095. return;
  1096. }
  1097. isPS = true;
  1098. funcProps->ShaderProps.PS.EarlyDepthStencil = true;
  1099. funcProps->shaderKind = DXIL::ShaderKind::Pixel;
  1100. }
  1101. unsigned profileAttributes = 0;
  1102. if (isCS)
  1103. profileAttributes++;
  1104. if (isHS)
  1105. profileAttributes++;
  1106. if (isDS)
  1107. profileAttributes++;
  1108. if (isGS)
  1109. profileAttributes++;
  1110. if (isVS)
  1111. profileAttributes++;
  1112. if (isPS)
  1113. profileAttributes++;
  1114. // TODO: check this in front-end and report error.
  1115. DXASSERT(profileAttributes<2, "profile attributes are mutual exclusive");
  1116. if (isEntry) {
  1117. switch (funcProps->shaderKind) {
  1118. case ShaderModel::Kind::Compute:
  1119. case ShaderModel::Kind::Hull:
  1120. case ShaderModel::Kind::Domain:
  1121. case ShaderModel::Kind::Geometry:
  1122. case ShaderModel::Kind::Vertex:
  1123. case ShaderModel::Kind::Pixel:
  1124. DXASSERT(funcProps->shaderKind == SM->GetKind(),
  1125. "attribute profile not match entry function profile");
  1126. break;
  1127. }
  1128. }
  1129. DxilFunctionAnnotation *FuncAnnotation = m_pHLModule->AddFunctionAnnotation(F);
  1130. // Ret Info
  1131. DxilParameterAnnotation &retTyAnnotation = FuncAnnotation->GetRetTypeAnnotation();
  1132. QualType retTy = FD->getReturnType();
  1133. // keep Undefined here, we cannot decide for struct
  1134. retTyAnnotation.SetInterpolationMode(
  1135. GetInterpMode(FD, CompType::Kind::Invalid, /*bKeepUndefined*/ true)
  1136. .GetKind());
  1137. SourceLocation retTySemanticLoc = SetSemantic(FD, retTyAnnotation);
  1138. retTyAnnotation.SetParamInputQual(DxilParamInputQual::Out);
  1139. if (isEntry) {
  1140. CheckParameterAnnotation(retTySemanticLoc, retTyAnnotation, /*isPatchConstantFunction*/false);
  1141. }
  1142. bool bDefaultRowMajor = m_pHLModule->GetHLOptions().bDefaultRowMajor;
  1143. ConstructFieldAttributedAnnotation(retTyAnnotation, retTy, bDefaultRowMajor);
  1144. if (FD->hasAttr<HLSLPreciseAttr>())
  1145. retTyAnnotation.SetPrecise();
  1146. // Param Info
  1147. unsigned streamIndex = 0;
  1148. unsigned inputPatchCount = 0;
  1149. unsigned outputPatchCount = 0;
  1150. for (unsigned ArgNo = 0; ArgNo < F->arg_size(); ++ArgNo) {
  1151. unsigned ParmIdx = ArgNo;
  1152. DxilParameterAnnotation &paramAnnotation = FuncAnnotation->GetParameterAnnotation(ArgNo);
  1153. if (isa<CXXMethodDecl>(FD)) {
  1154. // skip arg0 for this pointer
  1155. if (ArgNo == 0)
  1156. continue;
  1157. // update idx for rest params
  1158. ParmIdx--;
  1159. }
  1160. const ParmVarDecl *parmDecl = FD->getParamDecl(ParmIdx);
  1161. ConstructFieldAttributedAnnotation(paramAnnotation, parmDecl->getType(), bDefaultRowMajor);
  1162. if (parmDecl->hasAttr<HLSLPreciseAttr>())
  1163. paramAnnotation.SetPrecise();
  1164. // keep Undefined here, we cannot decide for struct
  1165. InterpolationMode paramIM =
  1166. GetInterpMode(parmDecl, CompType::Kind::Invalid, KeepUndefinedTrue);
  1167. paramAnnotation.SetInterpolationMode(paramIM);
  1168. SourceLocation paramSemanticLoc = SetSemantic(parmDecl, paramAnnotation);
  1169. DxilParamInputQual dxilInputQ = DxilParamInputQual::In;
  1170. if (parmDecl->hasAttr<HLSLInOutAttr>())
  1171. dxilInputQ = DxilParamInputQual::Inout;
  1172. else if (parmDecl->hasAttr<HLSLOutAttr>())
  1173. dxilInputQ = DxilParamInputQual::Out;
  1174. DXIL::InputPrimitive inputPrimitive = DXIL::InputPrimitive::Undefined;
  1175. if (IsHLSLOutputPatchType(parmDecl->getType())) {
  1176. outputPatchCount++;
  1177. if (dxilInputQ != DxilParamInputQual::In) {
  1178. unsigned DiagID = Diags.getCustomDiagID(
  1179. DiagnosticsEngine::Error,
  1180. "OutputPatch should not be out/inout parameter");
  1181. Diags.Report(parmDecl->getLocation(), DiagID);
  1182. continue;
  1183. }
  1184. dxilInputQ = DxilParamInputQual::OutputPatch;
  1185. if (isDS)
  1186. funcProps->ShaderProps.DS.inputControlPoints =
  1187. GetHLSLOutputPatchCount(parmDecl->getType());
  1188. } else if (IsHLSLInputPatchType(parmDecl->getType())) {
  1189. inputPatchCount++;
  1190. if (dxilInputQ != DxilParamInputQual::In) {
  1191. unsigned DiagID = Diags.getCustomDiagID(
  1192. DiagnosticsEngine::Error,
  1193. "InputPatch should not be out/inout parameter");
  1194. Diags.Report(parmDecl->getLocation(), DiagID);
  1195. continue;
  1196. }
  1197. dxilInputQ = DxilParamInputQual::InputPatch;
  1198. if (isHS) {
  1199. funcProps->ShaderProps.HS.inputControlPoints =
  1200. GetHLSLInputPatchCount(parmDecl->getType());
  1201. } else if (isGS) {
  1202. inputPrimitive = (DXIL::InputPrimitive)(
  1203. (unsigned)DXIL::InputPrimitive::ControlPointPatch1 +
  1204. GetHLSLInputPatchCount(parmDecl->getType()) - 1);
  1205. }
  1206. } else if (IsHLSLStreamOutputType(parmDecl->getType())) {
  1207. // TODO: validation this at ASTContext::getFunctionType in
  1208. // AST/ASTContext.cpp
  1209. DXASSERT(dxilInputQ == DxilParamInputQual::Inout,
  1210. "stream output parameter must be inout");
  1211. switch (streamIndex) {
  1212. case 0:
  1213. dxilInputQ = DxilParamInputQual::OutStream0;
  1214. break;
  1215. case 1:
  1216. dxilInputQ = DxilParamInputQual::OutStream1;
  1217. break;
  1218. case 2:
  1219. dxilInputQ = DxilParamInputQual::OutStream2;
  1220. break;
  1221. case 3:
  1222. default:
  1223. // TODO: validation this at ASTContext::getFunctionType in
  1224. // AST/ASTContext.cpp
  1225. DXASSERT(streamIndex == 3, "stream number out of bound");
  1226. dxilInputQ = DxilParamInputQual::OutStream3;
  1227. break;
  1228. }
  1229. DXIL::PrimitiveTopology &streamTopology =
  1230. funcProps->ShaderProps.GS.streamPrimitiveTopologies[streamIndex];
  1231. if (IsHLSLPointStreamType(parmDecl->getType()))
  1232. streamTopology = DXIL::PrimitiveTopology::PointList;
  1233. else if (IsHLSLLineStreamType(parmDecl->getType()))
  1234. streamTopology = DXIL::PrimitiveTopology::LineStrip;
  1235. else {
  1236. DXASSERT(IsHLSLTriangleStreamType(parmDecl->getType()),
  1237. "invalid StreamType");
  1238. streamTopology = DXIL::PrimitiveTopology::TriangleStrip;
  1239. }
  1240. if (streamIndex > 0) {
  1241. bool bAllPoint =
  1242. streamTopology == DXIL::PrimitiveTopology::PointList &&
  1243. funcProps->ShaderProps.GS.streamPrimitiveTopologies[0] ==
  1244. DXIL::PrimitiveTopology::PointList;
  1245. if (!bAllPoint) {
  1246. DiagnosticsEngine &Diags = CGM.getDiags();
  1247. unsigned DiagID = Diags.getCustomDiagID(
  1248. DiagnosticsEngine::Error, "when multiple GS output streams are "
  1249. "used they must be pointlists.");
  1250. Diags.Report(FD->getLocation(), DiagID);
  1251. }
  1252. }
  1253. streamIndex++;
  1254. }
  1255. unsigned GsInputArrayDim = 0;
  1256. if (parmDecl->hasAttr<HLSLTriangleAttr>()) {
  1257. inputPrimitive = DXIL::InputPrimitive::Triangle;
  1258. GsInputArrayDim = 3;
  1259. } else if (parmDecl->hasAttr<HLSLTriangleAdjAttr>()) {
  1260. inputPrimitive = DXIL::InputPrimitive::TriangleWithAdjacency;
  1261. GsInputArrayDim = 6;
  1262. } else if (parmDecl->hasAttr<HLSLPointAttr>()) {
  1263. inputPrimitive = DXIL::InputPrimitive::Point;
  1264. GsInputArrayDim = 1;
  1265. } else if (parmDecl->hasAttr<HLSLLineAdjAttr>()) {
  1266. inputPrimitive = DXIL::InputPrimitive::LineWithAdjacency;
  1267. GsInputArrayDim = 4;
  1268. } else if (parmDecl->hasAttr<HLSLLineAttr>()) {
  1269. inputPrimitive = DXIL::InputPrimitive::Line;
  1270. GsInputArrayDim = 2;
  1271. }
  1272. if (inputPrimitive != DXIL::InputPrimitive::Undefined) {
  1273. // Set to InputPrimitive for GS.
  1274. dxilInputQ = DxilParamInputQual::InputPrimitive;
  1275. if (funcProps->ShaderProps.GS.inputPrimitive ==
  1276. DXIL::InputPrimitive::Undefined) {
  1277. funcProps->ShaderProps.GS.inputPrimitive = inputPrimitive;
  1278. } else if (funcProps->ShaderProps.GS.inputPrimitive != inputPrimitive) {
  1279. DiagnosticsEngine &Diags = CGM.getDiags();
  1280. unsigned DiagID = Diags.getCustomDiagID(
  1281. DiagnosticsEngine::Error, "input parameter conflicts with geometry "
  1282. "specifier of previous input parameters");
  1283. Diags.Report(parmDecl->getLocation(), DiagID);
  1284. }
  1285. }
  1286. if (GsInputArrayDim != 0) {
  1287. QualType Ty = parmDecl->getType();
  1288. if (!Ty->isConstantArrayType()) {
  1289. DiagnosticsEngine &Diags = CGM.getDiags();
  1290. unsigned DiagID = Diags.getCustomDiagID(
  1291. DiagnosticsEngine::Error,
  1292. "input types for geometry shader must be constant size arrays");
  1293. Diags.Report(parmDecl->getLocation(), DiagID);
  1294. } else {
  1295. const ConstantArrayType *CAT = cast<ConstantArrayType>(Ty);
  1296. if (CAT->getSize().getLimitedValue() != GsInputArrayDim) {
  1297. StringRef primtiveNames[] = {
  1298. "invalid", // 0
  1299. "point", // 1
  1300. "line", // 2
  1301. "triangle", // 3
  1302. "lineadj", // 4
  1303. "invalid", // 5
  1304. "triangleadj", // 6
  1305. };
  1306. DXASSERT(GsInputArrayDim < llvm::array_lengthof(primtiveNames),
  1307. "Invalid array dim");
  1308. DiagnosticsEngine &Diags = CGM.getDiags();
  1309. unsigned DiagID = Diags.getCustomDiagID(
  1310. DiagnosticsEngine::Error, "array dimension for %0 must be %1");
  1311. Diags.Report(parmDecl->getLocation(), DiagID)
  1312. << primtiveNames[GsInputArrayDim] << GsInputArrayDim;
  1313. }
  1314. }
  1315. }
  1316. paramAnnotation.SetParamInputQual(dxilInputQ);
  1317. if (isEntry) {
  1318. CheckParameterAnnotation(paramSemanticLoc, paramAnnotation, /*isPatchConstantFunction*/false);
  1319. }
  1320. }
  1321. if (inputPatchCount > 1) {
  1322. DiagnosticsEngine &Diags = CGM.getDiags();
  1323. unsigned DiagID = Diags.getCustomDiagID(
  1324. DiagnosticsEngine::Error, "may only have one InputPatch parameter");
  1325. Diags.Report(FD->getLocation(), DiagID);
  1326. }
  1327. if (outputPatchCount > 1) {
  1328. DiagnosticsEngine &Diags = CGM.getDiags();
  1329. unsigned DiagID = Diags.getCustomDiagID(
  1330. DiagnosticsEngine::Error, "may only have one OutputPatch parameter");
  1331. Diags.Report(FD->getLocation(), DiagID);
  1332. }
  1333. // Type annotation for parameters and return type.
  1334. DxilTypeSystem &dxilTypeSys = m_pHLModule->GetTypeSystem();
  1335. unsigned arrayEltSize = 0;
  1336. AddTypeAnnotation(FD->getReturnType(), dxilTypeSys, arrayEltSize);
  1337. // Type annotation for this pointer.
  1338. if (const CXXMethodDecl *MFD = dyn_cast<CXXMethodDecl>(FD)) {
  1339. const CXXRecordDecl *RD = MFD->getParent();
  1340. QualType Ty = CGM.getContext().getTypeDeclType(RD);
  1341. AddTypeAnnotation(Ty, dxilTypeSys, arrayEltSize);
  1342. }
  1343. for (const ValueDecl*param : FD->params()) {
  1344. QualType Ty = param->getType();
  1345. AddTypeAnnotation(Ty, dxilTypeSys, arrayEltSize);
  1346. }
  1347. if (isHS) {
  1348. // Check
  1349. Function *patchConstFunc = funcProps->ShaderProps.HS.patchConstantFunc;
  1350. if (m_pHLModule->HasHLFunctionProps(patchConstFunc)) {
  1351. HLFunctionProps &patchProps =
  1352. m_pHLModule->GetHLFunctionProps(patchConstFunc);
  1353. if (patchProps.ShaderProps.HS.outputControlPoints != 0 &&
  1354. patchProps.ShaderProps.HS.outputControlPoints !=
  1355. funcProps->ShaderProps.HS.outputControlPoints) {
  1356. unsigned DiagID = Diags.getCustomDiagID(
  1357. DiagnosticsEngine::Error,
  1358. "Patch constant function's output patch input "
  1359. "should have %0 elements, but has %1.");
  1360. Diags.Report(FD->getLocation(), DiagID)
  1361. << funcProps->ShaderProps.HS.outputControlPoints
  1362. << patchProps.ShaderProps.HS.outputControlPoints;
  1363. }
  1364. if (patchProps.ShaderProps.HS.inputControlPoints != 0 &&
  1365. patchProps.ShaderProps.HS.inputControlPoints !=
  1366. funcProps->ShaderProps.HS.inputControlPoints) {
  1367. unsigned DiagID =
  1368. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  1369. "Patch constant function's input patch input "
  1370. "should have %0 elements, but has %1.");
  1371. Diags.Report(FD->getLocation(), DiagID)
  1372. << funcProps->ShaderProps.HS.inputControlPoints
  1373. << patchProps.ShaderProps.HS.inputControlPoints;
  1374. }
  1375. }
  1376. }
  1377. // Only add functionProps when exist.
  1378. if (profileAttributes || isPatchConstantFunction)
  1379. m_pHLModule->AddHLFunctionProps(F, funcProps);
  1380. }
  1381. void CGMSHLSLRuntime::EmitHLSLFunctionProlog(Function *F, const FunctionDecl *FD) {
  1382. // Support clip plane need debug info which not available when create function attribute.
  1383. if (const HLSLClipPlanesAttr *Attr = FD->getAttr<HLSLClipPlanesAttr>()) {
  1384. HLFunctionProps &funcProps = m_pHLModule->GetHLFunctionProps(F);
  1385. // Initialize to null.
  1386. memset(funcProps.ShaderProps.VS.clipPlanes, 0, sizeof(funcProps.ShaderProps.VS.clipPlanes));
  1387. // Create global for each clip plane, and use the clip plane val as init val.
  1388. auto AddClipPlane = [&](Expr *clipPlane, unsigned idx) {
  1389. if (DeclRefExpr *decl = dyn_cast<DeclRefExpr>(clipPlane)) {
  1390. const VarDecl *VD = cast<VarDecl>(decl->getDecl());
  1391. Constant *clipPlaneVal = CGM.GetAddrOfGlobalVar(VD);
  1392. funcProps.ShaderProps.VS.clipPlanes[idx] = clipPlaneVal;
  1393. if (m_bDebugInfo) {
  1394. CodeGenFunction CGF(CGM);
  1395. ApplyDebugLocation applyDebugLoc(CGF, clipPlane);
  1396. debugInfoMap[clipPlaneVal] = CGF.Builder.getCurrentDebugLocation();
  1397. }
  1398. } else {
  1399. // Must be a MemberExpr.
  1400. const MemberExpr *ME = cast<MemberExpr>(clipPlane);
  1401. CodeGenFunction CGF(CGM);
  1402. CodeGen::LValue LV = CGF.EmitMemberExpr(ME);
  1403. Value *addr = LV.getAddress();
  1404. funcProps.ShaderProps.VS.clipPlanes[idx] = cast<Constant>(addr);
  1405. if (m_bDebugInfo) {
  1406. CodeGenFunction CGF(CGM);
  1407. ApplyDebugLocation applyDebugLoc(CGF, clipPlane);
  1408. debugInfoMap[addr] = CGF.Builder.getCurrentDebugLocation();
  1409. }
  1410. }
  1411. };
  1412. if (Expr *clipPlane = Attr->getClipPlane1())
  1413. AddClipPlane(clipPlane, 0);
  1414. if (Expr *clipPlane = Attr->getClipPlane2())
  1415. AddClipPlane(clipPlane, 1);
  1416. if (Expr *clipPlane = Attr->getClipPlane3())
  1417. AddClipPlane(clipPlane, 2);
  1418. if (Expr *clipPlane = Attr->getClipPlane4())
  1419. AddClipPlane(clipPlane, 3);
  1420. if (Expr *clipPlane = Attr->getClipPlane5())
  1421. AddClipPlane(clipPlane, 4);
  1422. if (Expr *clipPlane = Attr->getClipPlane6())
  1423. AddClipPlane(clipPlane, 5);
  1424. clipPlaneFuncList.emplace_back(F);
  1425. }
  1426. }
  1427. void CGMSHLSLRuntime::AddControlFlowHint(CodeGenFunction &CGF, const Stmt &S,
  1428. llvm::TerminatorInst *TI,
  1429. ArrayRef<const Attr *> Attrs) {
  1430. // Build hints.
  1431. bool bNoBranchFlatten = true;
  1432. bool bBranch = false;
  1433. bool bFlatten = false;
  1434. std::vector<DXIL::ControlFlowHint> hints;
  1435. for (const auto *Attr : Attrs) {
  1436. if (isa<HLSLBranchAttr>(Attr)) {
  1437. hints.emplace_back(DXIL::ControlFlowHint::Branch);
  1438. bNoBranchFlatten = false;
  1439. bBranch = true;
  1440. }
  1441. else if (isa<HLSLFlattenAttr>(Attr)) {
  1442. hints.emplace_back(DXIL::ControlFlowHint::Flatten);
  1443. bNoBranchFlatten = false;
  1444. bFlatten = true;
  1445. } else if (isa<HLSLForceCaseAttr>(Attr)) {
  1446. if (isa<SwitchStmt>(&S)) {
  1447. hints.emplace_back(DXIL::ControlFlowHint::ForceCase);
  1448. }
  1449. }
  1450. // Ignore fastopt, allow_uav_condition and call for now.
  1451. }
  1452. if (bNoBranchFlatten) {
  1453. // CHECK control flow option.
  1454. if (CGF.CGM.getCodeGenOpts().HLSLPreferControlFlow)
  1455. hints.emplace_back(DXIL::ControlFlowHint::Branch);
  1456. else if (CGF.CGM.getCodeGenOpts().HLSLAvoidControlFlow)
  1457. hints.emplace_back(DXIL::ControlFlowHint::Flatten);
  1458. }
  1459. if (bFlatten && bBranch) {
  1460. DiagnosticsEngine &Diags = CGM.getDiags();
  1461. unsigned DiagID = Diags.getCustomDiagID(
  1462. DiagnosticsEngine::Error,
  1463. "can't use branch and flatten attributes together");
  1464. Diags.Report(S.getLocStart(), DiagID);
  1465. }
  1466. if (hints.size()) {
  1467. // Add meta data to the instruction.
  1468. MDNode *hintsNode = DxilMDHelper::EmitControlFlowHints(Context, hints);
  1469. TI->setMetadata(DxilMDHelper::kDxilControlFlowHintMDName, hintsNode);
  1470. }
  1471. }
  1472. void CGMSHLSLRuntime::FinishAutoVar(CodeGenFunction &CGF, const VarDecl &D, llvm::Value *V) {
  1473. if (D.hasAttr<HLSLPreciseAttr>()) {
  1474. AllocaInst *AI = cast<AllocaInst>(V);
  1475. HLModule::MarkPreciseAttributeWithMetadata(AI);
  1476. }
  1477. // Add type annotation for local variable.
  1478. DxilTypeSystem &typeSys = m_pHLModule->GetTypeSystem();
  1479. unsigned arrayEltSize = 0;
  1480. AddTypeAnnotation(D.getType(), typeSys, arrayEltSize);
  1481. }
  1482. hlsl::InterpolationMode CGMSHLSLRuntime::GetInterpMode(const Decl *decl,
  1483. CompType compType,
  1484. bool bKeepUndefined) {
  1485. InterpolationMode Interp(
  1486. decl->hasAttr<HLSLNoInterpolationAttr>(), decl->hasAttr<HLSLLinearAttr>(),
  1487. decl->hasAttr<HLSLNoPerspectiveAttr>(), decl->hasAttr<HLSLCentroidAttr>(),
  1488. decl->hasAttr<HLSLSampleAttr>());
  1489. DXASSERT(Interp.IsValid(), "otherwise front-end missing validation");
  1490. if (Interp.IsUndefined() && !bKeepUndefined) {
  1491. // Type-based default: linear for floats, constant for others.
  1492. if (compType.IsFloatTy())
  1493. Interp = InterpolationMode::Kind::Linear;
  1494. else
  1495. Interp = InterpolationMode::Kind::Constant;
  1496. }
  1497. return Interp;
  1498. }
  1499. hlsl::CompType CGMSHLSLRuntime::GetCompType(const BuiltinType *BT) {
  1500. hlsl::CompType ElementType = hlsl::CompType::getInvalid();
  1501. switch (BT->getKind()) {
  1502. case BuiltinType::Bool:
  1503. ElementType = hlsl::CompType::getI1();
  1504. break;
  1505. case BuiltinType::Double:
  1506. ElementType = hlsl::CompType::getF64();
  1507. break;
  1508. case BuiltinType::Float:
  1509. ElementType = hlsl::CompType::getF32();
  1510. break;
  1511. case BuiltinType::Min10Float:
  1512. case BuiltinType::Half:
  1513. ElementType = hlsl::CompType::getF16();
  1514. break;
  1515. case BuiltinType::Int:
  1516. ElementType = hlsl::CompType::getI32();
  1517. break;
  1518. case BuiltinType::LongLong:
  1519. ElementType = hlsl::CompType::getI64();
  1520. break;
  1521. case BuiltinType::Min12Int:
  1522. case BuiltinType::Short:
  1523. ElementType = hlsl::CompType::getI16();
  1524. break;
  1525. case BuiltinType::UInt:
  1526. ElementType = hlsl::CompType::getU32();
  1527. break;
  1528. case BuiltinType::ULongLong:
  1529. ElementType = hlsl::CompType::getU64();
  1530. break;
  1531. case BuiltinType::UShort:
  1532. ElementType = hlsl::CompType::getU16();
  1533. break;
  1534. default:
  1535. llvm_unreachable("unsupported type");
  1536. break;
  1537. }
  1538. return ElementType;
  1539. }
  1540. /// Add resouce to the program
  1541. void CGMSHLSLRuntime::addResource(Decl *D) {
  1542. if (HLSLBufferDecl *BD = dyn_cast<HLSLBufferDecl>(D))
  1543. GetOrCreateCBuffer(BD);
  1544. else if (VarDecl *VD = dyn_cast<VarDecl>(D)) {
  1545. hlsl::DxilResourceBase::Class resClass = TypeToClass(VD->getType());
  1546. // skip decl has init which is resource.
  1547. if (VD->hasInit() && resClass != DXIL::ResourceClass::Invalid)
  1548. return;
  1549. // skip static global.
  1550. if (!VD->isExternallyVisible())
  1551. return;
  1552. if (D->hasAttr<HLSLGroupSharedAttr>()) {
  1553. GlobalVariable *GV = cast<GlobalVariable>(CGM.GetAddrOfGlobalVar(VD));
  1554. m_pHLModule->AddGroupSharedVariable(GV);
  1555. return;
  1556. }
  1557. switch (resClass) {
  1558. case hlsl::DxilResourceBase::Class::Sampler:
  1559. AddSampler(VD);
  1560. break;
  1561. case hlsl::DxilResourceBase::Class::UAV:
  1562. case hlsl::DxilResourceBase::Class::SRV:
  1563. AddUAVSRV(VD, resClass);
  1564. break;
  1565. case hlsl::DxilResourceBase::Class::Invalid: {
  1566. // normal global constant, add to global CB
  1567. HLCBuffer &globalCB = GetGlobalCBuffer();
  1568. AddConstant(VD, globalCB);
  1569. break;
  1570. }
  1571. case DXIL::ResourceClass::CBuffer:
  1572. DXASSERT(0, "cbuffer should not be here");
  1573. break;
  1574. }
  1575. }
  1576. }
  1577. // TODO: collect such helper utility functions in one place.
  1578. static DxilResourceBase::Class KeywordToClass(const std::string &keyword) {
  1579. // TODO: refactor for faster search (switch by 1/2/3 first letters, then
  1580. // compare)
  1581. if (keyword == "SamplerState")
  1582. return DxilResourceBase::Class::Sampler;
  1583. if (keyword == "SamplerComparisonState")
  1584. return DxilResourceBase::Class::Sampler;
  1585. if (keyword == "ConstantBuffer")
  1586. return DxilResourceBase::Class::CBuffer;
  1587. if (keyword == "TextureBuffer")
  1588. return DxilResourceBase::Class::SRV;
  1589. bool isSRV = keyword == "Buffer";
  1590. isSRV |= keyword == "ByteAddressBuffer";
  1591. isSRV |= keyword == "StructuredBuffer";
  1592. isSRV |= keyword == "Texture1D";
  1593. isSRV |= keyword == "Texture1DArray";
  1594. isSRV |= keyword == "Texture2D";
  1595. isSRV |= keyword == "Texture2DArray";
  1596. isSRV |= keyword == "Texture3D";
  1597. isSRV |= keyword == "TextureCube";
  1598. isSRV |= keyword == "TextureCubeArray";
  1599. isSRV |= keyword == "Texture2DMS";
  1600. isSRV |= keyword == "Texture2DMSArray";
  1601. if (isSRV)
  1602. return DxilResourceBase::Class::SRV;
  1603. bool isUAV = keyword == "RWBuffer";
  1604. isUAV |= keyword == "RWByteAddressBuffer";
  1605. isUAV |= keyword == "RWStructuredBuffer";
  1606. isUAV |= keyword == "RWTexture1D";
  1607. isUAV |= keyword == "RWTexture1DArray";
  1608. isUAV |= keyword == "RWTexture2D";
  1609. isUAV |= keyword == "RWTexture2DArray";
  1610. isUAV |= keyword == "RWTexture3D";
  1611. isUAV |= keyword == "RWTextureCube";
  1612. isUAV |= keyword == "RWTextureCubeArray";
  1613. isUAV |= keyword == "RWTexture2DMS";
  1614. isUAV |= keyword == "RWTexture2DMSArray";
  1615. isUAV |= keyword == "AppendStructuredBuffer";
  1616. isUAV |= keyword == "ConsumeStructuredBuffer";
  1617. isUAV |= keyword == "RasterizerOrderedBuffer";
  1618. isUAV |= keyword == "RasterizerOrderedByteAddressBuffer";
  1619. isUAV |= keyword == "RasterizerOrderedStructuredBuffer";
  1620. isUAV |= keyword == "RasterizerOrderedTexture1D";
  1621. isUAV |= keyword == "RasterizerOrderedTexture1DArray";
  1622. isUAV |= keyword == "RasterizerOrderedTexture2D";
  1623. isUAV |= keyword == "RasterizerOrderedTexture2DArray";
  1624. isUAV |= keyword == "RasterizerOrderedTexture3D";
  1625. if (isUAV)
  1626. return DxilResourceBase::Class::UAV;
  1627. return DxilResourceBase::Class::Invalid;
  1628. }
  1629. static DxilSampler::SamplerKind KeywordToSamplerKind(const std::string &keyword) {
  1630. // TODO: refactor for faster search (switch by 1/2/3 first letters, then
  1631. // compare)
  1632. if (keyword == "SamplerState")
  1633. return DxilSampler::SamplerKind::Default;
  1634. if (keyword == "SamplerComparisonState")
  1635. return DxilSampler::SamplerKind::Comparison;
  1636. return DxilSampler::SamplerKind::Invalid;
  1637. }
  1638. // This should probably be refactored to ASTContextHLSL, and follow types
  1639. // rather than do string comparisons.
  1640. DXIL::ResourceClass
  1641. hlsl::GetResourceClassForType(const clang::ASTContext &context,
  1642. clang::QualType Ty) {
  1643. Ty = Ty.getCanonicalType();
  1644. if (const clang::ArrayType *arrayType = context.getAsArrayType(Ty)) {
  1645. return GetResourceClassForType(context, arrayType->getElementType());
  1646. } else if (const RecordType *RT = Ty->getAsStructureType()) {
  1647. return KeywordToClass(RT->getDecl()->getName());
  1648. } else if (const RecordType *RT = Ty->getAs<RecordType>()) {
  1649. if (const ClassTemplateSpecializationDecl *templateDecl =
  1650. dyn_cast<ClassTemplateSpecializationDecl>(RT->getDecl())) {
  1651. return KeywordToClass(templateDecl->getName());
  1652. }
  1653. }
  1654. return hlsl::DxilResourceBase::Class::Invalid;
  1655. }
  1656. hlsl::DxilResourceBase::Class CGMSHLSLRuntime::TypeToClass(clang::QualType Ty) {
  1657. return hlsl::GetResourceClassForType(CGM.getContext(), Ty);
  1658. }
  1659. uint32_t CGMSHLSLRuntime::AddSampler(VarDecl *samplerDecl) {
  1660. llvm::Constant *val = CGM.GetAddrOfGlobalVar(samplerDecl);
  1661. unique_ptr<DxilSampler> hlslRes(new DxilSampler);
  1662. hlslRes->SetLowerBound(UINT_MAX);
  1663. hlslRes->SetGlobalSymbol(cast<llvm::GlobalVariable>(val));
  1664. hlslRes->SetGlobalName(samplerDecl->getName());
  1665. QualType VarTy = samplerDecl->getType();
  1666. if (const clang::ArrayType *arrayType =
  1667. CGM.getContext().getAsArrayType(VarTy)) {
  1668. if (arrayType->isConstantArrayType()) {
  1669. uint32_t arraySize =
  1670. cast<ConstantArrayType>(arrayType)->getSize().getLimitedValue();
  1671. hlslRes->SetRangeSize(arraySize);
  1672. } else {
  1673. hlslRes->SetRangeSize(UINT_MAX);
  1674. }
  1675. // use elementTy
  1676. VarTy = arrayType->getElementType();
  1677. // Support more dim.
  1678. while (const clang::ArrayType *arrayType =
  1679. CGM.getContext().getAsArrayType(VarTy)) {
  1680. unsigned rangeSize = hlslRes->GetRangeSize();
  1681. if (arrayType->isConstantArrayType()) {
  1682. uint32_t arraySize =
  1683. cast<ConstantArrayType>(arrayType)->getSize().getLimitedValue();
  1684. if (rangeSize != UINT_MAX)
  1685. hlslRes->SetRangeSize(rangeSize * arraySize);
  1686. } else
  1687. hlslRes->SetRangeSize(UINT_MAX);
  1688. // use elementTy
  1689. VarTy = arrayType->getElementType();
  1690. }
  1691. } else
  1692. hlslRes->SetRangeSize(1);
  1693. const RecordType *RT = VarTy->getAs<RecordType>();
  1694. DxilSampler::SamplerKind kind = KeywordToSamplerKind(RT->getDecl()->getName());
  1695. hlslRes->SetSamplerKind(kind);
  1696. for (hlsl::UnusualAnnotation *it : samplerDecl->getUnusualAnnotations()) {
  1697. switch (it->getKind()) {
  1698. case hlsl::UnusualAnnotation::UA_RegisterAssignment: {
  1699. hlsl::RegisterAssignment *ra = cast<hlsl::RegisterAssignment>(it);
  1700. hlslRes->SetLowerBound(ra->RegisterNumber);
  1701. hlslRes->SetSpaceID(ra->RegisterSpace);
  1702. break;
  1703. }
  1704. default:
  1705. llvm_unreachable("only register for sampler");
  1706. break;
  1707. }
  1708. }
  1709. hlslRes->SetID(m_pHLModule->GetSamplers().size());
  1710. return m_pHLModule->AddSampler(std::move(hlslRes));
  1711. }
  1712. static void CollectScalarTypes(std::vector<llvm::Type *> &scalarTys, llvm::Type *Ty) {
  1713. if (llvm::StructType *ST = dyn_cast<llvm::StructType>(Ty)) {
  1714. for (llvm::Type *EltTy : ST->elements()) {
  1715. CollectScalarTypes(scalarTys, EltTy);
  1716. }
  1717. } else if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty)) {
  1718. llvm::Type *EltTy = AT->getElementType();
  1719. for (unsigned i=0;i<AT->getNumElements();i++) {
  1720. CollectScalarTypes(scalarTys, EltTy);
  1721. }
  1722. } else if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1723. llvm::Type *EltTy = VT->getElementType();
  1724. for (unsigned i=0;i<VT->getNumElements();i++) {
  1725. CollectScalarTypes(scalarTys, EltTy);
  1726. }
  1727. } else {
  1728. scalarTys.emplace_back(Ty);
  1729. }
  1730. }
  1731. static void CollectScalarTypes(std::vector<QualType> &ScalarTys, QualType Ty) {
  1732. if (Ty->isRecordType()) {
  1733. if (hlsl::IsHLSLMatType(Ty)) {
  1734. QualType EltTy = hlsl::GetHLSLMatElementType(Ty);
  1735. unsigned row = 0;
  1736. unsigned col = 0;
  1737. hlsl::GetRowsAndCols(Ty, row, col);
  1738. unsigned size = col*row;
  1739. for (unsigned i = 0; i < size; i++) {
  1740. CollectScalarTypes(ScalarTys, EltTy);
  1741. }
  1742. } else if (hlsl::IsHLSLVecType(Ty)) {
  1743. QualType EltTy = hlsl::GetHLSLVecElementType(Ty);
  1744. unsigned row = 0;
  1745. unsigned col = 0;
  1746. hlsl::GetRowsAndColsForAny(Ty, row, col);
  1747. unsigned size = col;
  1748. for (unsigned i = 0; i < size; i++) {
  1749. CollectScalarTypes(ScalarTys, EltTy);
  1750. }
  1751. } else {
  1752. const RecordType *RT = Ty->getAsStructureType();
  1753. // For CXXRecord.
  1754. if (!RT)
  1755. RT = Ty->getAs<RecordType>();
  1756. RecordDecl *RD = RT->getDecl();
  1757. for (FieldDecl *field : RD->fields())
  1758. CollectScalarTypes(ScalarTys, field->getType());
  1759. }
  1760. } else if (Ty->isArrayType()) {
  1761. const clang::ArrayType *AT = Ty->getAsArrayTypeUnsafe();
  1762. QualType EltTy = AT->getElementType();
  1763. // Set it to 5 for unsized array.
  1764. unsigned size = 5;
  1765. if (AT->isConstantArrayType()) {
  1766. size = cast<ConstantArrayType>(AT)->getSize().getLimitedValue();
  1767. }
  1768. for (unsigned i=0;i<size;i++) {
  1769. CollectScalarTypes(ScalarTys, EltTy);
  1770. }
  1771. } else {
  1772. ScalarTys.emplace_back(Ty);
  1773. }
  1774. }
  1775. uint32_t CGMSHLSLRuntime::AddUAVSRV(VarDecl *decl,
  1776. hlsl::DxilResourceBase::Class resClass) {
  1777. llvm::GlobalVariable *val =
  1778. cast<llvm::GlobalVariable>(CGM.GetAddrOfGlobalVar(decl));
  1779. QualType VarTy = decl->getType().getCanonicalType();
  1780. unique_ptr<HLResource> hlslRes(new HLResource);
  1781. hlslRes->SetLowerBound(UINT_MAX);
  1782. hlslRes->SetGlobalSymbol(val);
  1783. hlslRes->SetGlobalName(decl->getName());
  1784. if (const clang::ArrayType *arrayType =
  1785. CGM.getContext().getAsArrayType(VarTy)) {
  1786. if (arrayType->isConstantArrayType()) {
  1787. uint32_t arraySize =
  1788. cast<ConstantArrayType>(arrayType)->getSize().getLimitedValue();
  1789. hlslRes->SetRangeSize(arraySize);
  1790. } else
  1791. hlslRes->SetRangeSize(UINT_MAX);
  1792. // use elementTy
  1793. VarTy = arrayType->getElementType();
  1794. // Support more dim.
  1795. while (const clang::ArrayType *arrayType =
  1796. CGM.getContext().getAsArrayType(VarTy)) {
  1797. unsigned rangeSize = hlslRes->GetRangeSize();
  1798. if (arrayType->isConstantArrayType()) {
  1799. uint32_t arraySize =
  1800. cast<ConstantArrayType>(arrayType)->getSize().getLimitedValue();
  1801. if (rangeSize != UINT_MAX)
  1802. hlslRes->SetRangeSize(rangeSize * arraySize);
  1803. } else
  1804. hlslRes->SetRangeSize(UINT_MAX);
  1805. // use elementTy
  1806. VarTy = arrayType->getElementType();
  1807. }
  1808. } else
  1809. hlslRes->SetRangeSize(1);
  1810. for (hlsl::UnusualAnnotation *it : decl->getUnusualAnnotations()) {
  1811. switch (it->getKind()) {
  1812. case hlsl::UnusualAnnotation::UA_RegisterAssignment: {
  1813. hlsl::RegisterAssignment *ra = cast<hlsl::RegisterAssignment>(it);
  1814. hlslRes->SetLowerBound(ra->RegisterNumber);
  1815. hlslRes->SetSpaceID(ra->RegisterSpace);
  1816. break;
  1817. }
  1818. default:
  1819. llvm_unreachable("only register for uav/srv");
  1820. break;
  1821. }
  1822. }
  1823. const RecordType *RT = VarTy->getAs<RecordType>();
  1824. RecordDecl *RD = RT->getDecl();
  1825. hlsl::DxilResource::Kind kind = KeywordToKind(RT->getDecl()->getName());
  1826. hlslRes->SetKind(kind);
  1827. // Get the result type from handle field.
  1828. FieldDecl *FD = *(RD->field_begin());
  1829. DXASSERT(FD->getName() == "h", "must be handle field");
  1830. QualType resultTy = FD->getType();
  1831. // Type annotation for result type of resource.
  1832. DxilTypeSystem &dxilTypeSys = m_pHLModule->GetTypeSystem();
  1833. unsigned arrayEltSize = 0;
  1834. AddTypeAnnotation(decl->getType(), dxilTypeSys, arrayEltSize);
  1835. if (kind == hlsl::DxilResource::Kind::Texture2DMS ||
  1836. kind == hlsl::DxilResource::Kind::Texture2DMSArray) {
  1837. const ClassTemplateSpecializationDecl *templateDecl =
  1838. dyn_cast<ClassTemplateSpecializationDecl>(RT->getDecl());
  1839. const clang::TemplateArgument &sampleCountArg =
  1840. templateDecl->getTemplateArgs()[1];
  1841. uint32_t sampleCount = sampleCountArg.getAsIntegral().getLimitedValue();
  1842. hlslRes->SetSampleCount(sampleCount);
  1843. }
  1844. if (kind != hlsl::DxilResource::Kind::StructuredBuffer) {
  1845. QualType Ty = resultTy;
  1846. QualType EltTy = Ty;
  1847. if (hlsl::IsHLSLVecType(Ty)) {
  1848. EltTy = hlsl::GetHLSLVecElementType(Ty);
  1849. } else if (hlsl::IsHLSLMatType(Ty)) {
  1850. EltTy = hlsl::GetHLSLMatElementType(Ty);
  1851. } else if (resultTy->isAggregateType()) {
  1852. // Struct or array in a none-struct resource.
  1853. std::vector<QualType> ScalarTys;
  1854. CollectScalarTypes(ScalarTys, resultTy);
  1855. unsigned size = ScalarTys.size();
  1856. if (size == 0) {
  1857. DiagnosticsEngine &Diags = CGM.getDiags();
  1858. unsigned DiagID = Diags.getCustomDiagID(
  1859. DiagnosticsEngine::Error, "object's templated type must have at least one element");
  1860. Diags.Report(decl->getLocation(), DiagID);
  1861. return 0;
  1862. }
  1863. if (size > 4) {
  1864. DiagnosticsEngine &Diags = CGM.getDiags();
  1865. unsigned DiagID = Diags.getCustomDiagID(
  1866. DiagnosticsEngine::Error, "elements of typed buffers and textures "
  1867. "must fit in four 32-bit quantities");
  1868. Diags.Report(decl->getLocation(), DiagID);
  1869. return 0;
  1870. }
  1871. EltTy = ScalarTys[0];
  1872. for (QualType ScalarTy : ScalarTys) {
  1873. if (ScalarTy != EltTy) {
  1874. DiagnosticsEngine &Diags = CGM.getDiags();
  1875. unsigned DiagID = Diags.getCustomDiagID(
  1876. DiagnosticsEngine::Error,
  1877. "all template type components must have the same type");
  1878. Diags.Report(decl->getLocation(), DiagID);
  1879. return 0;
  1880. }
  1881. }
  1882. }
  1883. EltTy = EltTy.getCanonicalType();
  1884. bool bSNorm = false;
  1885. bool bUNorm = false;
  1886. if (const AttributedType *AT = dyn_cast<AttributedType>(Ty)) {
  1887. switch (AT->getAttrKind()) {
  1888. case AttributedType::Kind::attr_hlsl_snorm:
  1889. bSNorm = true;
  1890. break;
  1891. case AttributedType::Kind::attr_hlsl_unorm:
  1892. bUNorm = true;
  1893. break;
  1894. default:
  1895. // Do nothing
  1896. break;
  1897. }
  1898. }
  1899. if (EltTy->isBuiltinType()) {
  1900. const BuiltinType *BTy = EltTy->getAs<BuiltinType>();
  1901. CompType::Kind kind = BuiltinTyToCompTy(BTy, bSNorm, bUNorm);
  1902. // 64bits types are implemented with u32.
  1903. if (kind == CompType::Kind::U64 ||
  1904. kind == CompType::Kind::I64 ||
  1905. kind == CompType::Kind::SNormF64 ||
  1906. kind == CompType::Kind::UNormF64 ||
  1907. kind == CompType::Kind::F64) {
  1908. kind = CompType::Kind::U32;
  1909. }
  1910. hlslRes->SetCompType(kind);
  1911. } else {
  1912. DXASSERT(!bSNorm && !bUNorm, "snorm/unorm on invalid type");
  1913. }
  1914. }
  1915. // TODO: set resource
  1916. // hlslRes.SetGloballyCoherent();
  1917. hlslRes->SetROV(RT->getDecl()->getName().startswith("RasterizerOrdered"));
  1918. if (kind == hlsl::DxilResource::Kind::TypedBuffer ||
  1919. kind == hlsl::DxilResource::Kind::StructuredBuffer) {
  1920. const ClassTemplateSpecializationDecl *templateDecl =
  1921. dyn_cast<ClassTemplateSpecializationDecl>(RT->getDecl());
  1922. const clang::TemplateArgument &retTyArg =
  1923. templateDecl->getTemplateArgs()[0];
  1924. llvm::Type *retTy = CGM.getTypes().ConvertType(retTyArg.getAsType());
  1925. uint32_t strideInBytes = legacyLayout.getTypeAllocSize(retTy);
  1926. hlslRes->SetElementStride(strideInBytes);
  1927. }
  1928. if (resClass == hlsl::DxilResourceBase::Class::SRV) {
  1929. hlslRes->SetRW(false);
  1930. hlslRes->SetID(m_pHLModule->GetSRVs().size());
  1931. return m_pHLModule->AddSRV(std::move(hlslRes));
  1932. } else {
  1933. hlslRes->SetRW(true);
  1934. hlslRes->SetID(m_pHLModule->GetUAVs().size());
  1935. return m_pHLModule->AddUAV(std::move(hlslRes));
  1936. }
  1937. }
  1938. static bool IsResourceInType(const clang::ASTContext &context,
  1939. clang::QualType Ty) {
  1940. Ty = Ty.getCanonicalType();
  1941. if (const clang::ArrayType *arrayType = context.getAsArrayType(Ty)) {
  1942. return IsResourceInType(context, arrayType->getElementType());
  1943. } else if (const RecordType *RT = Ty->getAsStructureType()) {
  1944. if (KeywordToClass(RT->getDecl()->getName()) != DxilResourceBase::Class::Invalid)
  1945. return true;
  1946. const CXXRecordDecl* typeRecordDecl = RT->getAsCXXRecordDecl();
  1947. if (typeRecordDecl && !typeRecordDecl->isImplicit()) {
  1948. for (auto field : typeRecordDecl->fields()) {
  1949. if (IsResourceInType(context, field->getType()))
  1950. return true;
  1951. }
  1952. }
  1953. } else if (const RecordType *RT = Ty->getAs<RecordType>()) {
  1954. if (const ClassTemplateSpecializationDecl *templateDecl =
  1955. dyn_cast<ClassTemplateSpecializationDecl>(RT->getDecl())) {
  1956. if (KeywordToClass(templateDecl->getName()) != DxilResourceBase::Class::Invalid)
  1957. return true;
  1958. }
  1959. }
  1960. return false; // no resources found
  1961. }
  1962. void CGMSHLSLRuntime::AddConstant(VarDecl *constDecl, HLCBuffer &CB) {
  1963. if (constDecl->getStorageClass() == SC_Static) {
  1964. // For static inside cbuffer, take as global static.
  1965. // Don't add to cbuffer.
  1966. CGM.EmitGlobal(constDecl);
  1967. return;
  1968. }
  1969. // Search defined structure for resource objects and fail
  1970. if (IsResourceInType(CGM.getContext(), constDecl->getType())) {
  1971. DiagnosticsEngine &Diags = CGM.getDiags();
  1972. unsigned DiagID = Diags.getCustomDiagID(
  1973. DiagnosticsEngine::Error,
  1974. "object types not supported in global aggregate instances, cbuffers, or tbuffers.");
  1975. Diags.Report(constDecl->getLocation(), DiagID);
  1976. return;
  1977. }
  1978. llvm::Constant *constVal = CGM.GetAddrOfGlobalVar(constDecl);
  1979. bool isGlobalCB = CB.GetID() == globalCBIndex;
  1980. uint32_t offset = 0;
  1981. bool userOffset = false;
  1982. for (hlsl::UnusualAnnotation *it : constDecl->getUnusualAnnotations()) {
  1983. switch (it->getKind()) {
  1984. case hlsl::UnusualAnnotation::UA_ConstantPacking: {
  1985. if (!isGlobalCB) {
  1986. // TODO: check cannot mix packoffset elements with nonpackoffset
  1987. // elements in a cbuffer.
  1988. hlsl::ConstantPacking *cp = cast<hlsl::ConstantPacking>(it);
  1989. offset = cp->Subcomponent << 2;
  1990. offset += cp->ComponentOffset;
  1991. // Change to byte.
  1992. offset <<= 2;
  1993. userOffset = true;
  1994. } else {
  1995. DiagnosticsEngine &Diags = CGM.getDiags();
  1996. unsigned DiagID = Diags.getCustomDiagID(
  1997. DiagnosticsEngine::Error,
  1998. "packoffset is only allowed in a constant buffer.");
  1999. Diags.Report(it->Loc, DiagID);
  2000. }
  2001. break;
  2002. }
  2003. case hlsl::UnusualAnnotation::UA_RegisterAssignment: {
  2004. if (isGlobalCB) {
  2005. RegisterAssignment *ra = cast<RegisterAssignment>(it);
  2006. offset = ra->RegisterNumber << 2;
  2007. // Change to byte.
  2008. offset <<= 2;
  2009. userOffset = true;
  2010. }
  2011. break;
  2012. }
  2013. case hlsl::UnusualAnnotation::UA_SemanticDecl:
  2014. // skip semantic on constant
  2015. break;
  2016. }
  2017. }
  2018. std::unique_ptr<DxilResourceBase> pHlslConst = std::make_unique<DxilResourceBase>(DXIL::ResourceClass::Invalid);
  2019. pHlslConst->SetLowerBound(UINT_MAX);
  2020. pHlslConst->SetGlobalSymbol(cast<llvm::GlobalVariable>(constVal));
  2021. pHlslConst->SetGlobalName(constDecl->getName());
  2022. if (userOffset) {
  2023. pHlslConst->SetLowerBound(offset);
  2024. }
  2025. DxilTypeSystem &dxilTypeSys = m_pHLModule->GetTypeSystem();
  2026. // Just add type annotation here.
  2027. // Offset will be allocated later.
  2028. QualType Ty = constDecl->getType();
  2029. if (CB.GetRangeSize() != 1) {
  2030. while (Ty->isArrayType()) {
  2031. Ty = Ty->getAsArrayTypeUnsafe()->getElementType();
  2032. }
  2033. }
  2034. unsigned arrayEltSize = 0;
  2035. unsigned size = AddTypeAnnotation(Ty, dxilTypeSys, arrayEltSize);
  2036. pHlslConst->SetRangeSize(size);
  2037. CB.AddConst(pHlslConst);
  2038. // Save fieldAnnotation for the const var.
  2039. DxilFieldAnnotation fieldAnnotation;
  2040. if (userOffset)
  2041. fieldAnnotation.SetCBufferOffset(offset);
  2042. // Get the nested element type.
  2043. if (Ty->isArrayType()) {
  2044. while (const ConstantArrayType *arrayTy =
  2045. CGM.getContext().getAsConstantArrayType(Ty)) {
  2046. Ty = arrayTy->getElementType();
  2047. }
  2048. }
  2049. bool bDefaultRowMajor = m_pHLModule->GetHLOptions().bDefaultRowMajor;
  2050. ConstructFieldAttributedAnnotation(fieldAnnotation, Ty, bDefaultRowMajor);
  2051. m_ConstVarAnnotationMap[constVal] = fieldAnnotation;
  2052. }
  2053. uint32_t CGMSHLSLRuntime::AddCBuffer(HLSLBufferDecl *D) {
  2054. unique_ptr<HLCBuffer> CB = std::make_unique<HLCBuffer>();
  2055. // setup the CB
  2056. CB->SetGlobalSymbol(nullptr);
  2057. CB->SetGlobalName(D->getNameAsString());
  2058. CB->SetLowerBound(UINT_MAX);
  2059. if (!D->isCBuffer()) {
  2060. CB->SetKind(DXIL::ResourceKind::TBuffer);
  2061. }
  2062. // the global variable will only used once by the createHandle?
  2063. // SetHandle(llvm::Value *pHandle);
  2064. for (hlsl::UnusualAnnotation *it : D->getUnusualAnnotations()) {
  2065. switch (it->getKind()) {
  2066. case hlsl::UnusualAnnotation::UA_RegisterAssignment: {
  2067. hlsl::RegisterAssignment *ra = cast<hlsl::RegisterAssignment>(it);
  2068. uint32_t regNum = ra->RegisterNumber;
  2069. uint32_t regSpace = ra->RegisterSpace;
  2070. CB->SetSpaceID(regSpace);
  2071. CB->SetLowerBound(regNum);
  2072. break;
  2073. }
  2074. case hlsl::UnusualAnnotation::UA_SemanticDecl:
  2075. // skip semantic on constant buffer
  2076. break;
  2077. case hlsl::UnusualAnnotation::UA_ConstantPacking:
  2078. llvm_unreachable("no packoffset on constant buffer");
  2079. break;
  2080. }
  2081. }
  2082. // Add constant
  2083. if (D->isConstantBufferView()) {
  2084. VarDecl *constDecl = cast<VarDecl>(*D->decls_begin());
  2085. CB->SetRangeSize(1);
  2086. QualType Ty = constDecl->getType();
  2087. if (Ty->isArrayType()) {
  2088. if (!Ty->isIncompleteArrayType()) {
  2089. unsigned arraySize = 1;
  2090. while (Ty->isArrayType()) {
  2091. Ty = Ty->getCanonicalTypeUnqualified();
  2092. const ConstantArrayType *AT = cast<ConstantArrayType>(Ty);
  2093. arraySize *= AT->getSize().getLimitedValue();
  2094. Ty = AT->getElementType();
  2095. }
  2096. CB->SetRangeSize(arraySize);
  2097. } else {
  2098. CB->SetRangeSize(UINT_MAX);
  2099. }
  2100. }
  2101. AddConstant(constDecl, *CB.get());
  2102. } else {
  2103. auto declsEnds = D->decls_end();
  2104. CB->SetRangeSize(1);
  2105. for (auto it = D->decls_begin(); it != declsEnds; it++) {
  2106. if (VarDecl *constDecl = dyn_cast<VarDecl>(*it))
  2107. AddConstant(constDecl, *CB.get());
  2108. else if (isa<EmptyDecl>(*it)) {
  2109. } else if (isa<CXXRecordDecl>(*it)) {
  2110. } else {
  2111. HLSLBufferDecl *inner = cast<HLSLBufferDecl>(*it);
  2112. GetOrCreateCBuffer(inner);
  2113. }
  2114. }
  2115. }
  2116. CB->SetID(m_pHLModule->GetCBuffers().size());
  2117. return m_pHLModule->AddCBuffer(std::move(CB));
  2118. }
  2119. HLCBuffer &CGMSHLSLRuntime::GetOrCreateCBuffer(HLSLBufferDecl *D) {
  2120. if (constantBufMap.count(D) != 0) {
  2121. uint32_t cbIndex = constantBufMap[D];
  2122. return *static_cast<HLCBuffer*>(&(m_pHLModule->GetCBuffer(cbIndex)));
  2123. }
  2124. uint32_t cbID = AddCBuffer(D);
  2125. constantBufMap[D] = cbID;
  2126. return *static_cast<HLCBuffer*>(&(m_pHLModule->GetCBuffer(cbID)));
  2127. }
  2128. bool CGMSHLSLRuntime::IsPatchConstantFunction(const Function *F) {
  2129. DXASSERT_NOMSG(F != nullptr);
  2130. for (auto && p : patchConstantFunctionMap) {
  2131. if (p.second == F) return true;
  2132. }
  2133. return false;
  2134. }
  2135. void CGMSHLSLRuntime::SetEntryFunction() {
  2136. if (EntryFunc == nullptr) {
  2137. DiagnosticsEngine &Diags = CGM.getDiags();
  2138. unsigned DiagID = Diags.getCustomDiagID(DiagnosticsEngine::Error,
  2139. "cannot find entry function %0");
  2140. Diags.Report(DiagID) << CGM.getCodeGenOpts().HLSLEntryFunction;
  2141. return;
  2142. }
  2143. m_pHLModule->SetEntryFunction(EntryFunc);
  2144. }
  2145. // Here the size is CB size. So don't need check type.
  2146. static unsigned AlignCBufferOffset(unsigned offset, unsigned size, llvm::Type *Ty) {
  2147. // offset is already 4 bytes aligned.
  2148. bool b8BytesAlign = Ty->isDoubleTy();
  2149. if (llvm::IntegerType *IT = dyn_cast<llvm::IntegerType>(Ty)) {
  2150. b8BytesAlign = IT->getBitWidth() > 32;
  2151. }
  2152. // Align it to 4 x 4bytes.
  2153. if (unsigned remainder = (offset & 0xf)) {
  2154. unsigned aligned = offset - remainder + 16;
  2155. // If cannot fit in the remainder, need align.
  2156. bool bNeedAlign = (remainder + size) > 16;
  2157. // Array always start aligned.
  2158. bNeedAlign |= Ty->isArrayTy();
  2159. if (bNeedAlign)
  2160. return AlignTo8Bytes(aligned, b8BytesAlign);
  2161. else
  2162. return AlignTo8Bytes(offset, b8BytesAlign);
  2163. } else
  2164. return offset;
  2165. }
  2166. static unsigned AllocateDxilConstantBuffer(HLCBuffer &CB) {
  2167. unsigned offset = 0;
  2168. // Scan user allocated constants first.
  2169. // Update offset.
  2170. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2171. if (C->GetLowerBound() == UINT_MAX)
  2172. continue;
  2173. unsigned size = C->GetRangeSize();
  2174. unsigned nextOffset = size + C->GetLowerBound();
  2175. if (offset < nextOffset)
  2176. offset = nextOffset;
  2177. }
  2178. // Alloc after user allocated constants.
  2179. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2180. if (C->GetLowerBound() != UINT_MAX)
  2181. continue;
  2182. unsigned size = C->GetRangeSize();
  2183. llvm::Type *Ty = C->GetGlobalSymbol()->getType()->getPointerElementType();
  2184. // Align offset.
  2185. offset = AlignCBufferOffset(offset, size, Ty);
  2186. if (C->GetLowerBound() == UINT_MAX) {
  2187. C->SetLowerBound(offset);
  2188. }
  2189. offset += size;
  2190. }
  2191. return offset;
  2192. }
  2193. static void AllocateDxilConstantBuffers(HLModule *pHLModule) {
  2194. for (unsigned i = 0; i < pHLModule->GetCBuffers().size(); i++) {
  2195. HLCBuffer &CB = *static_cast<HLCBuffer*>(&(pHLModule->GetCBuffer(i)));
  2196. unsigned size = AllocateDxilConstantBuffer(CB);
  2197. CB.SetSize(size);
  2198. }
  2199. }
  2200. static void ReplaceUseInFunction(Value *V, Value *NewV, Function *F,
  2201. IRBuilder<> &Builder) {
  2202. for (auto U = V->user_begin(); U != V->user_end(); ) {
  2203. User *user = *(U++);
  2204. if (Instruction *I = dyn_cast<Instruction>(user)) {
  2205. if (I->getParent()->getParent() == F) {
  2206. // replace use with GEP if in F
  2207. for (unsigned i = 0; i < I->getNumOperands(); i++) {
  2208. if (I->getOperand(i) == V)
  2209. I->setOperand(i, NewV);
  2210. }
  2211. }
  2212. } else {
  2213. // For constant operator, create local clone which use GEP.
  2214. // Only support GEP and bitcast.
  2215. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  2216. std::vector<Value *> idxList(GEPOp->idx_begin(), GEPOp->idx_end());
  2217. Value *NewGEP = Builder.CreateInBoundsGEP(NewV, idxList);
  2218. ReplaceUseInFunction(GEPOp, NewGEP, F, Builder);
  2219. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  2220. // Change the init val into NewV with Store.
  2221. GV->setInitializer(nullptr);
  2222. Builder.CreateStore(NewV, GV);
  2223. } else {
  2224. // Must be bitcast here.
  2225. BitCastOperator *BC = cast<BitCastOperator>(user);
  2226. Value *NewBC = Builder.CreateBitCast(NewV, BC->getType());
  2227. ReplaceUseInFunction(BC, NewBC, F, Builder);
  2228. }
  2229. }
  2230. }
  2231. }
  2232. void MarkUsedFunctionForConst(Value *V, std::unordered_set<Function*> &usedFunc) {
  2233. for (auto U = V->user_begin(); U != V->user_end();) {
  2234. User *user = *(U++);
  2235. if (Instruction *I = dyn_cast<Instruction>(user)) {
  2236. Function *F = I->getParent()->getParent();
  2237. usedFunc.insert(F);
  2238. } else {
  2239. // For constant operator, create local clone which use GEP.
  2240. // Only support GEP and bitcast.
  2241. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  2242. MarkUsedFunctionForConst(GEPOp, usedFunc);
  2243. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  2244. MarkUsedFunctionForConst(GV, usedFunc);
  2245. } else {
  2246. // Must be bitcast here.
  2247. BitCastOperator *BC = cast<BitCastOperator>(user);
  2248. MarkUsedFunctionForConst(BC, usedFunc);
  2249. }
  2250. }
  2251. }
  2252. }
  2253. static bool CreateCBufferVariable(HLCBuffer &CB,
  2254. llvm::Module &M) {
  2255. bool bUsed = false;
  2256. // Build Struct for CBuffer.
  2257. SmallVector<llvm::Type*, 4> Elements;
  2258. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2259. Value *GV = C->GetGlobalSymbol();
  2260. if (GV->hasNUsesOrMore(1))
  2261. bUsed = true;
  2262. // Global variable must be pointer type.
  2263. llvm::Type *Ty = GV->getType()->getPointerElementType();
  2264. Elements.emplace_back(Ty);
  2265. }
  2266. // Don't create CBuffer variable for unused cbuffer.
  2267. if (!bUsed)
  2268. return false;
  2269. bool isCBArray = CB.GetRangeSize() != 1;
  2270. llvm::GlobalVariable *cbGV = nullptr;
  2271. llvm::Type *cbTy = nullptr;
  2272. unsigned cbIndexDepth = 0;
  2273. if (!isCBArray) {
  2274. llvm::StructType *CBStructTy =
  2275. llvm::StructType::create(Elements, CB.GetGlobalName());
  2276. cbGV = new llvm::GlobalVariable(M, CBStructTy, /*IsConstant*/ true,
  2277. llvm::GlobalValue::ExternalLinkage,
  2278. /*InitVal*/ nullptr, CB.GetGlobalName());
  2279. cbTy = cbGV->getType();
  2280. } else {
  2281. // For array of ConstantBuffer, create array of struct instead of struct of
  2282. // array.
  2283. DXASSERT(CB.GetConstants().size() == 1,
  2284. "ConstantBuffer should have 1 constant");
  2285. Value *GV = CB.GetConstants()[0]->GetGlobalSymbol();
  2286. llvm::Type *CBEltTy =
  2287. GV->getType()->getPointerElementType()->getArrayElementType();
  2288. cbIndexDepth = 1;
  2289. while (CBEltTy->isArrayTy()) {
  2290. CBEltTy = CBEltTy->getArrayElementType();
  2291. cbIndexDepth++;
  2292. }
  2293. // Add one level struct type to match normal case.
  2294. llvm::StructType *CBStructTy =
  2295. llvm::StructType::create({CBEltTy}, CB.GetGlobalName());
  2296. llvm::ArrayType *CBArrayTy =
  2297. llvm::ArrayType::get(CBStructTy, CB.GetRangeSize());
  2298. cbGV = new llvm::GlobalVariable(M, CBArrayTy, /*IsConstant*/ true,
  2299. llvm::GlobalValue::ExternalLinkage,
  2300. /*InitVal*/ nullptr, CB.GetGlobalName());
  2301. cbTy = llvm::PointerType::get(CBStructTy,
  2302. cbGV->getType()->getPointerAddressSpace());
  2303. }
  2304. CB.SetGlobalSymbol(cbGV);
  2305. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  2306. llvm::Type *idxTy = opcodeTy;
  2307. llvm::FunctionType *SubscriptFuncTy =
  2308. llvm::FunctionType::get(cbTy, { opcodeTy, cbGV->getType(), idxTy}, false);
  2309. Function *subscriptFunc =
  2310. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  2311. (unsigned)HLSubscriptOpcode::CBufferSubscript);
  2312. Constant *opArg = ConstantInt::get(opcodeTy, (unsigned)HLSubscriptOpcode::CBufferSubscript);
  2313. Constant *zeroIdx = ConstantInt::get(opcodeTy, 0);
  2314. Value *args[] = { opArg, nullptr, zeroIdx };
  2315. llvm::LLVMContext &Context = M.getContext();
  2316. llvm::Type *i32Ty = llvm::Type::getInt32Ty(Context);
  2317. Value *zero = ConstantInt::get(i32Ty, (uint64_t)0);
  2318. std::vector<Value *> indexArray(CB.GetConstants().size());
  2319. std::vector<std::unordered_set<Function*>> constUsedFuncList(CB.GetConstants().size());
  2320. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2321. Value *idx = ConstantInt::get(i32Ty, C->GetID());
  2322. indexArray[C->GetID()] = idx;
  2323. Value *GV = C->GetGlobalSymbol();
  2324. MarkUsedFunctionForConst(GV, constUsedFuncList[C->GetID()]);
  2325. }
  2326. for (Function &F : M.functions()) {
  2327. if (!F.isDeclaration()) {
  2328. IRBuilder<> Builder(F.getEntryBlock().getFirstInsertionPt());
  2329. args[HLOperandIndex::kSubscriptObjectOpIdx] = cbGV;
  2330. // create HL subscript to make all the use of cbuffer start from it.
  2331. Instruction *cbSubscript = cast<Instruction>(Builder.CreateCall(subscriptFunc, {args} ));
  2332. // Replace constant var with GEP pGV
  2333. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2334. Value *GV = C->GetGlobalSymbol();
  2335. if (constUsedFuncList[C->GetID()].count(&F) == 0)
  2336. continue;
  2337. Value *idx = indexArray[C->GetID()];
  2338. if (!isCBArray) {
  2339. Instruction *GEP = cast<Instruction>(
  2340. Builder.CreateInBoundsGEP(cbSubscript, {zero, idx}));
  2341. // TODO: make sure the debug info is synced to GEP.
  2342. // GEP->setDebugLoc(GV);
  2343. ReplaceUseInFunction(GV, GEP, &F, Builder);
  2344. // Delete if no use in F.
  2345. if (GEP->user_empty())
  2346. GEP->eraseFromParent();
  2347. } else {
  2348. for (auto U = GV->user_begin(); U != GV->user_end();) {
  2349. User *user = *(U++);
  2350. if (user->user_empty())
  2351. continue;
  2352. Instruction *I = dyn_cast<Instruction>(user);
  2353. if (I && I->getParent()->getParent() != &F)
  2354. continue;
  2355. IRBuilder<> *instBuilder = &Builder;
  2356. unique_ptr<IRBuilder<> > B;
  2357. if (I) {
  2358. B = make_unique<IRBuilder<> >(I);
  2359. instBuilder = B.get();
  2360. }
  2361. GEPOperator *GEPOp = cast<GEPOperator>(user);
  2362. std::vector<Value *> idxList;
  2363. DXASSERT(GEPOp->getNumIndices() >= 1 + cbIndexDepth,
  2364. "must indexing ConstantBuffer array");
  2365. idxList.reserve(GEPOp->getNumIndices() - (cbIndexDepth - 1));
  2366. gep_type_iterator GI = gep_type_begin(*GEPOp), E = gep_type_end(*GEPOp);
  2367. idxList.push_back(GI.getOperand());
  2368. // change array index with 0 for struct index.
  2369. idxList.push_back(zero);
  2370. GI++;
  2371. Value *arrayIdx = GI.getOperand();
  2372. GI++;
  2373. for (unsigned curIndex = 1; GI != E && curIndex < cbIndexDepth; ++GI, ++curIndex) {
  2374. arrayIdx = instBuilder->CreateMul(arrayIdx, Builder.getInt32(GI->getArrayNumElements()));
  2375. arrayIdx = instBuilder->CreateAdd(arrayIdx, GI.getOperand());
  2376. }
  2377. for (; GI != E; ++GI) {
  2378. idxList.push_back(GI.getOperand());
  2379. }
  2380. args[HLOperandIndex::kSubscriptIndexOpIdx] = arrayIdx;
  2381. Instruction *cbSubscript =
  2382. cast<Instruction>(instBuilder->CreateCall(subscriptFunc, {args}));
  2383. Instruction *NewGEP = cast<Instruction>(
  2384. instBuilder->CreateInBoundsGEP(cbSubscript, idxList));
  2385. ReplaceUseInFunction(GEPOp, NewGEP, &F, *instBuilder);
  2386. }
  2387. }
  2388. }
  2389. // Delete if no use in F.
  2390. if (cbSubscript->user_empty())
  2391. cbSubscript->eraseFromParent();
  2392. }
  2393. }
  2394. return true;
  2395. }
  2396. static void ConstructCBufferAnnotation(
  2397. HLCBuffer &CB, DxilTypeSystem &dxilTypeSys,
  2398. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  2399. Value *GV = CB.GetGlobalSymbol();
  2400. llvm::StructType *CBStructTy =
  2401. dyn_cast<llvm::StructType>(GV->getType()->getPointerElementType());
  2402. if (!CBStructTy) {
  2403. // For Array of ConstantBuffer.
  2404. llvm::ArrayType *CBArrayTy =
  2405. cast<llvm::ArrayType>(GV->getType()->getPointerElementType());
  2406. CBStructTy = cast<llvm::StructType>(CBArrayTy->getArrayElementType());
  2407. }
  2408. DxilStructAnnotation *CBAnnotation =
  2409. dxilTypeSys.AddStructAnnotation(CBStructTy);
  2410. CBAnnotation->SetCBufferSize(CB.GetSize());
  2411. // Set fieldAnnotation for each constant var.
  2412. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2413. Constant *GV = C->GetGlobalSymbol();
  2414. DxilFieldAnnotation &fieldAnnotation =
  2415. CBAnnotation->GetFieldAnnotation(C->GetID());
  2416. fieldAnnotation = AnnotationMap[GV];
  2417. // This is after CBuffer allocation.
  2418. fieldAnnotation.SetCBufferOffset(C->GetLowerBound());
  2419. fieldAnnotation.SetFieldName(C->GetGlobalName());
  2420. }
  2421. }
  2422. static void ConstructCBuffer(
  2423. HLModule *pHLModule,
  2424. llvm::Type *CBufferType,
  2425. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  2426. DxilTypeSystem &dxilTypeSys = pHLModule->GetTypeSystem();
  2427. for (unsigned i = 0; i < pHLModule->GetCBuffers().size(); i++) {
  2428. HLCBuffer &CB = *static_cast<HLCBuffer*>(&(pHLModule->GetCBuffer(i)));
  2429. if (CB.GetConstants().size() == 0) {
  2430. // Create Fake variable for cbuffer which is empty.
  2431. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  2432. *pHLModule->GetModule(), CBufferType, true,
  2433. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  2434. CB.SetGlobalSymbol(pGV);
  2435. } else {
  2436. bool bCreated = CreateCBufferVariable(CB, *pHLModule->GetModule());
  2437. if (bCreated)
  2438. ConstructCBufferAnnotation(CB, dxilTypeSys, AnnotationMap);
  2439. else {
  2440. // Create Fake variable for cbuffer which is unused.
  2441. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  2442. *pHLModule->GetModule(), CBufferType, true,
  2443. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  2444. CB.SetGlobalSymbol(pGV);
  2445. }
  2446. }
  2447. // Clear the constants which useless now.
  2448. CB.GetConstants().clear();
  2449. }
  2450. }
  2451. static void ReplaceBoolVectorSubscript(CallInst *CI) {
  2452. Value *Ptr = CI->getArgOperand(0);
  2453. Value *Idx = CI->getArgOperand(1);
  2454. Value *IdxList[] = {ConstantInt::get(Idx->getType(), 0), Idx};
  2455. for (auto It = CI->user_begin(), E = CI->user_end(); It != E;) {
  2456. Instruction *user = cast<Instruction>(*(It++));
  2457. IRBuilder<> Builder(user);
  2458. Value *GEP = Builder.CreateInBoundsGEP(Ptr, IdxList);
  2459. if (LoadInst *LI = dyn_cast<LoadInst>(user)) {
  2460. Value *NewLd = Builder.CreateLoad(GEP);
  2461. Value *cast = Builder.CreateZExt(NewLd, LI->getType());
  2462. LI->replaceAllUsesWith(cast);
  2463. LI->eraseFromParent();
  2464. } else {
  2465. // Must be a store inst here.
  2466. StoreInst *SI = cast<StoreInst>(user);
  2467. Value *V = SI->getValueOperand();
  2468. Value *cast =
  2469. Builder.CreateICmpNE(V, llvm::ConstantInt::get(V->getType(), 0));
  2470. Builder.CreateStore(cast, GEP);
  2471. SI->eraseFromParent();
  2472. }
  2473. }
  2474. CI->eraseFromParent();
  2475. }
  2476. static void ReplaceBoolVectorSubscript(Function *F) {
  2477. for (auto It = F->user_begin(), E = F->user_end(); It != E; ) {
  2478. User *user = *(It++);
  2479. CallInst *CI = cast<CallInst>(user);
  2480. ReplaceBoolVectorSubscript(CI);
  2481. }
  2482. }
  2483. // Add function body for intrinsic if possible.
  2484. static Function *CreateOpFunction(llvm::Module &M, Function *F,
  2485. llvm::FunctionType *funcTy,
  2486. HLOpcodeGroup group, unsigned opcode) {
  2487. Function *opFunc = nullptr;
  2488. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  2489. if (group == HLOpcodeGroup::HLIntrinsic) {
  2490. IntrinsicOp intriOp = static_cast<IntrinsicOp>(opcode);
  2491. switch (intriOp) {
  2492. case IntrinsicOp::MOP_Append:
  2493. case IntrinsicOp::MOP_Consume: {
  2494. bool bAppend = intriOp == IntrinsicOp::MOP_Append;
  2495. llvm::Type *handleTy = funcTy->getParamType(HLOperandIndex::kHandleOpIdx);
  2496. // Don't generate body for OutputStream::Append.
  2497. if (bAppend && HLModule::IsStreamOutputPtrType(handleTy)) {
  2498. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode);
  2499. break;
  2500. }
  2501. opFunc = GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode,
  2502. bAppend ? "append" : "consume");
  2503. llvm::Type *counterTy = llvm::Type::getInt32Ty(M.getContext());
  2504. llvm::FunctionType *IncCounterFuncTy =
  2505. llvm::FunctionType::get(counterTy, {opcodeTy, handleTy}, false);
  2506. unsigned counterOpcode = bAppend ? (unsigned)IntrinsicOp::MOP_IncrementCounter:
  2507. (unsigned)IntrinsicOp::MOP_DecrementCounter;
  2508. Function *incCounterFunc =
  2509. GetOrCreateHLFunction(M, IncCounterFuncTy, group,
  2510. counterOpcode);
  2511. llvm::Type *idxTy = counterTy;
  2512. llvm::Type *valTy = bAppend ?
  2513. funcTy->getParamType(HLOperandIndex::kAppendValOpIndex):funcTy->getReturnType();
  2514. llvm::Type *subscriptTy = valTy;
  2515. if (!valTy->isPointerTy()) {
  2516. // Return type for subscript should be pointer type.
  2517. subscriptTy = llvm::PointerType::get(valTy, 0);
  2518. }
  2519. llvm::FunctionType *SubscriptFuncTy =
  2520. llvm::FunctionType::get(subscriptTy, {opcodeTy, handleTy, idxTy}, false);
  2521. Function *subscriptFunc =
  2522. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  2523. (unsigned)HLSubscriptOpcode::DefaultSubscript);
  2524. BasicBlock *BB = BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  2525. IRBuilder<> Builder(BB);
  2526. auto argIter = opFunc->args().begin();
  2527. // Skip the opcode arg.
  2528. argIter++;
  2529. Argument *thisArg = argIter++;
  2530. // int counter = IncrementCounter/DecrementCounter(Buf);
  2531. Value *incCounterOpArg =
  2532. ConstantInt::get(idxTy, counterOpcode);
  2533. Value *counter =
  2534. Builder.CreateCall(incCounterFunc, {incCounterOpArg, thisArg});
  2535. // Buf[counter];
  2536. Value *subscriptOpArg = ConstantInt::get(
  2537. idxTy, (unsigned)HLSubscriptOpcode::DefaultSubscript);
  2538. Value *subscript =
  2539. Builder.CreateCall(subscriptFunc, {subscriptOpArg, thisArg, counter});
  2540. if (bAppend) {
  2541. Argument *valArg = argIter;
  2542. // Buf[counter] = val;
  2543. if (valTy->isPointerTy()) {
  2544. Value *valArgCast = Builder.CreateBitCast(valArg, llvm::Type::getInt8PtrTy(F->getContext()));
  2545. Value *subscriptCast = Builder.CreateBitCast(subscript, llvm::Type::getInt8PtrTy(F->getContext()));
  2546. // TODO: use real type size and alignment.
  2547. Value *tySize = ConstantInt::get(idxTy, 8);
  2548. unsigned Align = 8;
  2549. Builder.CreateMemCpy(subscriptCast, valArgCast, tySize, Align);
  2550. } else
  2551. Builder.CreateStore(valArg, subscript);
  2552. Builder.CreateRetVoid();
  2553. } else {
  2554. // return Buf[counter];
  2555. if (valTy->isPointerTy())
  2556. Builder.CreateRet(subscript);
  2557. else {
  2558. Value *retVal = Builder.CreateLoad(subscript);
  2559. Builder.CreateRet(retVal);
  2560. }
  2561. }
  2562. } break;
  2563. case IntrinsicOp::IOP_sincos: {
  2564. opFunc = GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode, "sincos");
  2565. llvm::Type *valTy = funcTy->getParamType(HLOperandIndex::kTrinaryOpSrc0Idx);
  2566. llvm::FunctionType *sinFuncTy =
  2567. llvm::FunctionType::get(valTy, {opcodeTy, valTy}, false);
  2568. unsigned sinOp = static_cast<unsigned>(IntrinsicOp::IOP_sin);
  2569. unsigned cosOp = static_cast<unsigned>(IntrinsicOp::IOP_cos);
  2570. Function *sinFunc = GetOrCreateHLFunction(M, sinFuncTy, group, sinOp);
  2571. Function *cosFunc = GetOrCreateHLFunction(M, sinFuncTy, group, cosOp);
  2572. BasicBlock *BB = BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  2573. IRBuilder<> Builder(BB);
  2574. auto argIter = opFunc->args().begin();
  2575. // Skip the opcode arg.
  2576. argIter++;
  2577. Argument *valArg = argIter++;
  2578. Argument *sinPtrArg = argIter++;
  2579. Argument *cosPtrArg = argIter++;
  2580. Value *sinOpArg =
  2581. ConstantInt::get(opcodeTy, sinOp);
  2582. Value *sinVal = Builder.CreateCall(sinFunc, {sinOpArg, valArg});
  2583. Builder.CreateStore(sinVal, sinPtrArg);
  2584. Value *cosOpArg =
  2585. ConstantInt::get(opcodeTy, cosOp);
  2586. Value *cosVal = Builder.CreateCall(cosFunc, {cosOpArg, valArg});
  2587. Builder.CreateStore(cosVal, cosPtrArg);
  2588. // Ret.
  2589. Builder.CreateRetVoid();
  2590. } break;
  2591. default:
  2592. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode);
  2593. break;
  2594. }
  2595. }
  2596. else if (group == HLOpcodeGroup::HLExtIntrinsic) {
  2597. llvm::StringRef fnName = F->getName();
  2598. llvm::StringRef groupName = GetHLOpcodeGroupNameByAttr(F);
  2599. opFunc = GetOrCreateHLFunction(M, funcTy, group, &groupName, &fnName, opcode);
  2600. }
  2601. else {
  2602. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode);
  2603. }
  2604. // Add attribute
  2605. if (F->hasFnAttribute(Attribute::ReadNone))
  2606. opFunc->addFnAttr(Attribute::ReadNone);
  2607. if (F->hasFnAttribute(Attribute::ReadOnly))
  2608. opFunc->addFnAttr(Attribute::ReadOnly);
  2609. return opFunc;
  2610. }
  2611. static void AddOpcodeParamForIntrinsic(HLModule &HLM, Function *F,
  2612. unsigned opcode) {
  2613. llvm::Module &M = *HLM.GetModule();
  2614. llvm::FunctionType *oldFuncTy = F->getFunctionType();
  2615. SmallVector<llvm::Type *, 4> paramTyList;
  2616. // Add the opcode param
  2617. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  2618. paramTyList.emplace_back(opcodeTy);
  2619. paramTyList.append(oldFuncTy->param_begin(), oldFuncTy->param_end());
  2620. for (unsigned i = 1; i < paramTyList.size(); i++) {
  2621. llvm::Type *Ty = paramTyList[i];
  2622. if (Ty->isPointerTy()) {
  2623. Ty = Ty->getPointerElementType();
  2624. if (HLModule::IsHLSLObjectType(Ty) &&
  2625. // StreamOutput don't need handle.
  2626. !HLModule::IsStreamOutputType(Ty)) {
  2627. // Use object type directly, not by pointer.
  2628. // This will make sure temp object variable only used by ld/st.
  2629. paramTyList[i] = Ty;
  2630. }
  2631. }
  2632. }
  2633. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  2634. if (group == HLOpcodeGroup::HLSubscript &&
  2635. opcode == static_cast<unsigned>(HLSubscriptOpcode::VectorSubscript)) {
  2636. llvm::FunctionType *FT = F->getFunctionType();
  2637. llvm::Type *VecArgTy = FT->getParamType(0);
  2638. llvm::VectorType *VType =
  2639. cast<llvm::VectorType>(VecArgTy->getPointerElementType());
  2640. llvm::Type *Ty = VType->getElementType();
  2641. DXASSERT(Ty->isIntegerTy(), "Only bool could use VectorSubscript");
  2642. llvm::IntegerType *ITy = cast<IntegerType>(Ty);
  2643. DXASSERT_LOCALVAR(ITy, ITy->getBitWidth() == 1, "Only bool could use VectorSubscript");
  2644. // The return type is i8*.
  2645. // Replace all uses with i1*.
  2646. ReplaceBoolVectorSubscript(F);
  2647. return;
  2648. }
  2649. bool isDoubleSubscriptFunc = group == HLOpcodeGroup::HLSubscript &&
  2650. opcode == static_cast<unsigned>(HLSubscriptOpcode::DoubleSubscript);
  2651. llvm::Type *RetTy = oldFuncTy->getReturnType();
  2652. if (isDoubleSubscriptFunc) {
  2653. CallInst *doubleSub = cast<CallInst>(*F->user_begin());
  2654. // Change currentIdx type into coord type.
  2655. auto U = doubleSub->user_begin();
  2656. Value *user = *U;
  2657. CallInst *secSub = cast<CallInst>(user);
  2658. unsigned coordIdx = HLOperandIndex::kSubscriptIndexOpIdx;
  2659. // opcode operand not add yet, so the index need -1.
  2660. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) == HLOpcodeGroup::NotHL)
  2661. coordIdx -= 1;
  2662. Value *coord = secSub->getArgOperand(coordIdx);
  2663. llvm::Type *coordTy = coord->getType();
  2664. paramTyList[HLOperandIndex::kSubscriptIndexOpIdx] = coordTy;
  2665. // Add the sampleIdx or mipLevel parameter to the end.
  2666. paramTyList.emplace_back(opcodeTy);
  2667. // Change return type to be resource ret type.
  2668. // opcode operand not add yet, so the index need -1.
  2669. Value *objPtr = doubleSub->getArgOperand(HLOperandIndex::kSubscriptObjectOpIdx-1);
  2670. // Must be a GEP
  2671. GEPOperator *objGEP = cast<GEPOperator>(objPtr);
  2672. gep_type_iterator GEPIt = gep_type_begin(objGEP), E = gep_type_end(objGEP);
  2673. llvm::Type *resTy = nullptr;
  2674. while (GEPIt != E) {
  2675. if (HLModule::IsHLSLObjectType(*GEPIt)) {
  2676. resTy = *GEPIt;
  2677. break;
  2678. }
  2679. GEPIt++;
  2680. }
  2681. DXASSERT(resTy, "must find the resource type");
  2682. // Change object type to resource type.
  2683. paramTyList[HLOperandIndex::kSubscriptObjectOpIdx] = resTy;
  2684. // Change RetTy into pointer of resource reture type.
  2685. RetTy = cast<StructType>(resTy)->getElementType(0)->getPointerTo();
  2686. llvm::Type *sliceTy = objGEP->getType()->getPointerElementType();
  2687. DXIL::ResourceClass RC = HLM.GetResourceClass(sliceTy);
  2688. DXIL::ResourceKind RK = HLM.GetResourceKind(sliceTy);
  2689. HLM.AddResourceTypeAnnotation(resTy, RC, RK);
  2690. }
  2691. llvm::FunctionType *funcTy =
  2692. llvm::FunctionType::get(RetTy, paramTyList, false);
  2693. Function *opFunc = CreateOpFunction(M, F, funcTy, group, opcode);
  2694. StringRef lower = hlsl::GetHLLowerStrategy(F);
  2695. if (!lower.empty())
  2696. hlsl::SetHLLowerStrategy(opFunc, lower);
  2697. for (auto user = F->user_begin(); user != F->user_end();) {
  2698. // User must be a call.
  2699. CallInst *oldCI = cast<CallInst>(*(user++));
  2700. SmallVector<Value *, 4> opcodeParamList;
  2701. Value *opcodeConst = Constant::getIntegerValue(opcodeTy, APInt(32, opcode));
  2702. opcodeParamList.emplace_back(opcodeConst);
  2703. opcodeParamList.append(oldCI->arg_operands().begin(),
  2704. oldCI->arg_operands().end());
  2705. IRBuilder<> Builder(oldCI);
  2706. if (isDoubleSubscriptFunc) {
  2707. // Change obj to the resource pointer.
  2708. Value *objVal = opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx];
  2709. GEPOperator *objGEP = cast<GEPOperator>(objVal);
  2710. SmallVector<Value *, 8> IndexList;
  2711. IndexList.append(objGEP->idx_begin(), objGEP->idx_end());
  2712. Value *lastIndex = IndexList.back();
  2713. ConstantInt *constIndex = cast<ConstantInt>(lastIndex);
  2714. DXASSERT_LOCALVAR(constIndex, constIndex->getLimitedValue() == 1, "last index must 1");
  2715. // Remove the last index.
  2716. IndexList.pop_back();
  2717. objVal = objGEP->getPointerOperand();
  2718. if (IndexList.size() > 1)
  2719. objVal = Builder.CreateInBoundsGEP(objVal, IndexList);
  2720. // Change obj to the resource pointer.
  2721. opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx] = objVal;
  2722. // Set idx and mipIdx.
  2723. Value *mipIdx = opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx];
  2724. auto U = oldCI->user_begin();
  2725. Value *user = *U;
  2726. CallInst *secSub = cast<CallInst>(user);
  2727. unsigned idxOpIndex = HLOperandIndex::kSubscriptIndexOpIdx;
  2728. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) == HLOpcodeGroup::NotHL)
  2729. idxOpIndex--;
  2730. Value *idx = secSub->getArgOperand(idxOpIndex);
  2731. DXASSERT(secSub->hasOneUse(), "subscript should only has one use");
  2732. // Add the sampleIdx or mipLevel parameter to the end.
  2733. opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx] = idx;
  2734. opcodeParamList.emplace_back(mipIdx);
  2735. // Insert new call before secSub to make sure idx is ready to use.
  2736. Builder.SetInsertPoint(secSub);
  2737. }
  2738. for (unsigned i = 1; i < opcodeParamList.size(); i++) {
  2739. Value *arg = opcodeParamList[i];
  2740. llvm::Type *Ty = arg->getType();
  2741. if (Ty->isPointerTy()) {
  2742. Ty = Ty->getPointerElementType();
  2743. if (HLModule::IsHLSLObjectType(Ty) &&
  2744. // StreamOutput don't need handle.
  2745. !HLModule::IsStreamOutputType(Ty)) {
  2746. // Use object type directly, not by pointer.
  2747. // This will make sure temp object variable only used by ld/st.
  2748. if (GEPOperator *argGEP = dyn_cast<GEPOperator>(arg)) {
  2749. std::vector<Value*> idxList(argGEP->idx_begin(), argGEP->idx_end());
  2750. // Create instruction to avoid GEPOperator.
  2751. GetElementPtrInst *GEP = GetElementPtrInst::CreateInBounds(argGEP->getPointerOperand(),
  2752. idxList);
  2753. Builder.Insert(GEP);
  2754. arg = GEP;
  2755. }
  2756. opcodeParamList[i] = Builder.CreateLoad(arg);
  2757. }
  2758. }
  2759. }
  2760. Value *CI = Builder.CreateCall(opFunc, opcodeParamList);
  2761. if (!isDoubleSubscriptFunc) {
  2762. // replace new call and delete the old call
  2763. oldCI->replaceAllUsesWith(CI);
  2764. oldCI->eraseFromParent();
  2765. } else {
  2766. // For double script.
  2767. // Replace single users use with new CI.
  2768. auto U = oldCI->user_begin();
  2769. Value *user = *U;
  2770. CallInst *secSub = cast<CallInst>(user);
  2771. secSub->replaceAllUsesWith(CI);
  2772. secSub->eraseFromParent();
  2773. oldCI->eraseFromParent();
  2774. }
  2775. }
  2776. // delete the function
  2777. F->eraseFromParent();
  2778. }
  2779. static void AddOpcodeParamForIntrinsics(HLModule &HLM
  2780. , std::unordered_map<Function *, unsigned> &intrinsicMap) {
  2781. for (auto mapIter = intrinsicMap.begin(); mapIter != intrinsicMap.end();
  2782. mapIter++) {
  2783. Function *F = mapIter->first;
  2784. if (F->user_empty()) {
  2785. // delete the function
  2786. F->eraseFromParent();
  2787. continue;
  2788. }
  2789. unsigned opcode = mapIter->second;
  2790. AddOpcodeParamForIntrinsic(HLM, F, opcode);
  2791. }
  2792. }
  2793. static void SimplifyScalarToVec1Splat(BitCastInst *BCI, std::vector<Instruction *> &deadInsts) {
  2794. Value *Ptr = BCI->getOperand(0);
  2795. // For case like SsaoBuffer[DTid.xy].xxx;
  2796. // It will translated into
  2797. //%8 = bitcast float* %7 to <1 x float>*
  2798. //%9 = load <1 x float>, <1 x float>* %8
  2799. //%10 = shufflevector <1 x float> %9, <1 x float> undef, <3 x i32>
  2800. //zeroinitializer
  2801. // To remove the bitcast,
  2802. // We transform it into
  2803. // %8 = load float, float* %7
  2804. // %9 = insertelement <1 x float> undef, float %8, i64 0
  2805. // %10 = shufflevector <1 x float> %9, <1 x float> undef, <3 x i32>
  2806. // zeroinitializer
  2807. IRBuilder<> Builder(BCI);
  2808. Value *SVal = Builder.CreateLoad(Ptr);
  2809. Value *VVal = UndefValue::get(BCI->getType()->getPointerElementType());
  2810. VVal = Builder.CreateInsertElement(VVal, SVal, (uint64_t)0);
  2811. for (Value::user_iterator Iter = BCI->user_begin(), IterE = BCI->user_end();
  2812. Iter != IterE;) {
  2813. Instruction *I = cast<Instruction>(*(Iter++));
  2814. if (LoadInst *ldInst = dyn_cast<LoadInst>(I)) {
  2815. ldInst->replaceAllUsesWith(VVal);
  2816. deadInsts.emplace_back(ldInst);
  2817. } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(I)) {
  2818. GEP->replaceAllUsesWith(Ptr);
  2819. deadInsts.emplace_back(GEP);
  2820. } else {
  2821. // Must be StoreInst here.
  2822. StoreInst *stInst = cast<StoreInst>(I);
  2823. Value *Val = stInst->getValueOperand();
  2824. IRBuilder<> Builder(stInst);
  2825. Val = Builder.CreateExtractElement(Val, (uint64_t)0);
  2826. Builder.CreateStore(Val, Ptr);
  2827. deadInsts.emplace_back(stInst);
  2828. }
  2829. }
  2830. deadInsts.emplace_back(BCI);
  2831. }
  2832. static void SimplifyVectorTrunc(BitCastInst *BCI, std::vector<Instruction *> &deadInsts) {
  2833. // Transform
  2834. //%a.addr = alloca <2 x float>, align 4
  2835. //%1 = bitcast <2 x float>* %a.addr to <1 x float>*
  2836. //%2 = getelementptr inbounds <1 x float>, <1 x float>* %1, i32 0, i32 0
  2837. // into
  2838. //%a.addr = alloca <2 x float>, align 4
  2839. //%2 = getelementptr inbounds <2 x float>, <2 x float>* %2, i32 0, i32 0
  2840. Value *bigVec = BCI->getOperand(0);
  2841. llvm::Type *idxTy = llvm::Type::getInt32Ty(BCI->getContext());
  2842. Constant *zeroIdx = ConstantInt::get(idxTy, 0);
  2843. unsigned vecSize = bigVec->getType()->getPointerElementType()->getVectorNumElements();
  2844. for (auto It = BCI->user_begin(), E = BCI->user_end(); It != E;) {
  2845. Instruction *I = cast<Instruction>(*(It++));
  2846. if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(I)) {
  2847. DXASSERT_NOMSG(
  2848. !isa<llvm::VectorType>(GEP->getType()->getPointerElementType()));
  2849. IRBuilder<> Builder(GEP);
  2850. std::vector<Value *> idxList(GEP->idx_begin(), GEP->idx_end());
  2851. Value *NewGEP = Builder.CreateInBoundsGEP(bigVec, idxList);
  2852. GEP->replaceAllUsesWith(NewGEP);
  2853. deadInsts.emplace_back(GEP);
  2854. } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
  2855. IRBuilder<> Builder(LI);
  2856. Value *NewLI = Builder.CreateLoad(bigVec);
  2857. NewLI = Builder.CreateShuffleVector(NewLI, NewLI, {0});
  2858. LI->replaceAllUsesWith(NewLI);
  2859. deadInsts.emplace_back(LI);
  2860. } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
  2861. Value *V = SI->getValueOperand();
  2862. IRBuilder<> Builder(LI);
  2863. for (unsigned i = 0; i < vecSize; i++) {
  2864. Value *Elt = Builder.CreateExtractElement(V, i);
  2865. Value *EltGEP = Builder.CreateInBoundsGEP(
  2866. bigVec, {zeroIdx, ConstantInt::get(idxTy, i)});
  2867. Builder.CreateStore(Elt, EltGEP);
  2868. }
  2869. deadInsts.emplace_back(SI);
  2870. } else {
  2871. DXASSERT(0, "not support yet");
  2872. }
  2873. }
  2874. deadInsts.emplace_back(BCI);
  2875. }
  2876. static void SimplifyArrayToVector(Value *Cast, Value *Ptr, llvm::Type *i32Ty,
  2877. std::vector<Instruction *> &deadInsts) {
  2878. // Transform
  2879. // %4 = bitcast [4 x i32]* %Val2 to <4 x i32>*
  2880. // store <4 x i32> %5, <4 x i32>* %4, !tbaa !0
  2881. // Into
  2882. //%6 = extractelement <4 x i32> %5, i64 0
  2883. //%7 = getelementptr inbounds [4 x i32], [4 x i32]* %Val2, i32 0, i32 0
  2884. // store i32 %6, i32* %7
  2885. //%8 = extractelement <4 x i32> %5, i64 1
  2886. //%9 = getelementptr inbounds [4 x i32], [4 x i32]* %Val2, i32 0, i32 1
  2887. // store i32 %8, i32* %9
  2888. //%10 = extractelement <4 x i32> %5, i64 2
  2889. //%11 = getelementptr inbounds [4 x i32], [4 x i32]* %Val2, i32 0, i32 2
  2890. // store i32 %10, i32* %11
  2891. //%12 = extractelement <4 x i32> %5, i64 3
  2892. //%13 = getelementptr inbounds [4 x i32], [4 x i32]* %Val2, i32 0, i32 3
  2893. // store i32 %12, i32* %13
  2894. Value *zeroIdx = ConstantInt::get(i32Ty, 0);
  2895. for (User *U : Cast->users()) {
  2896. if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
  2897. IRBuilder<> Builder(LI);
  2898. unsigned vecSize = LI->getType()->getVectorNumElements();
  2899. Value *NewLd = UndefValue::get(LI->getType());
  2900. for (unsigned i = 0; i < vecSize; i++) {
  2901. Value *GEP = Builder.CreateInBoundsGEP(
  2902. Ptr, {zeroIdx, ConstantInt::get(i32Ty, i)});
  2903. Value *Elt = Builder.CreateLoad(GEP);
  2904. NewLd = Builder.CreateInsertElement(NewLd, Elt, i);
  2905. }
  2906. LI->replaceAllUsesWith(NewLd);
  2907. deadInsts.emplace_back(LI);
  2908. } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
  2909. Value *V = SI->getValueOperand();
  2910. IRBuilder<> Builder(SI);
  2911. unsigned vecSize = V->getType()->getVectorNumElements();
  2912. for (unsigned i = 0; i < vecSize; i++) {
  2913. Value *Elt = Builder.CreateExtractElement(V, i);
  2914. Value *GEP = Builder.CreateInBoundsGEP(
  2915. Ptr, {zeroIdx, ConstantInt::get(i32Ty, i)});
  2916. Builder.CreateStore(Elt, GEP);
  2917. }
  2918. deadInsts.emplace_back(SI);
  2919. } else {
  2920. DXASSERT(0, "not support yet");
  2921. }
  2922. }
  2923. }
  2924. static void SimplifyArrayToVector(BitCastInst *BCI, std::vector<Instruction *> &deadInsts) {
  2925. Value *Ptr = BCI->getOperand(0);
  2926. llvm::Type *i32Ty = llvm::Type::getInt32Ty(BCI->getContext());
  2927. SimplifyArrayToVector(BCI, Ptr, i32Ty, deadInsts);
  2928. deadInsts.emplace_back(BCI);
  2929. }
  2930. static void SimplifyBoolCast(BitCastInst *BCI, llvm::Type *i1Ty, std::vector<Instruction *> &deadInsts) {
  2931. // Transform
  2932. //%22 = bitcast i1* %21 to i32*
  2933. //%23 = load i32, i32* %22, !tbaa !3, !range !7
  2934. //%tobool5 = icmp ne i32 %23, 0
  2935. // To
  2936. //%tobool5 = load i1, i1* %21, !tbaa !3, !range !7
  2937. Value *i1Ptr = BCI->getOperand(0);
  2938. for (User *U : BCI->users()) {
  2939. if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
  2940. if (!LI->hasOneUse()) {
  2941. continue;
  2942. }
  2943. if (ICmpInst *II = dyn_cast<ICmpInst>(*LI->user_begin())) {
  2944. if (ConstantInt *CI = dyn_cast<ConstantInt>(II->getOperand(1))) {
  2945. if (CI->getLimitedValue() == 0 &&
  2946. II->getPredicate() == CmpInst::ICMP_NE) {
  2947. IRBuilder<> Builder(LI);
  2948. Value *i1Val = Builder.CreateLoad(i1Ptr);
  2949. II->replaceAllUsesWith(i1Val);
  2950. deadInsts.emplace_back(LI);
  2951. deadInsts.emplace_back(II);
  2952. }
  2953. }
  2954. }
  2955. }
  2956. }
  2957. deadInsts.emplace_back(BCI);
  2958. }
  2959. typedef float(__cdecl *FloatUnaryEvalFuncType)(float);
  2960. typedef double(__cdecl *DoubleUnaryEvalFuncType)(double);
  2961. typedef float(__cdecl *FloatBinaryEvalFuncType)(float, float);
  2962. typedef double(__cdecl *DoubleBinaryEvalFuncType)(double, double);
  2963. static Value * EvalUnaryIntrinsic(CallInst *CI,
  2964. FloatUnaryEvalFuncType floatEvalFunc,
  2965. DoubleUnaryEvalFuncType doubleEvalFunc) {
  2966. Value *V = CI->getArgOperand(0);
  2967. ConstantFP *fpV = cast<ConstantFP>(V);
  2968. llvm::Type *Ty = CI->getType();
  2969. Value *Result = nullptr;
  2970. if (Ty->isDoubleTy()) {
  2971. double dV = fpV->getValueAPF().convertToDouble();
  2972. Value *dResult = ConstantFP::get(V->getType(), doubleEvalFunc(dV));
  2973. CI->replaceAllUsesWith(dResult);
  2974. Result = dResult;
  2975. } else {
  2976. DXASSERT_NOMSG(Ty->isFloatTy());
  2977. float fV = fpV->getValueAPF().convertToFloat();
  2978. Value *dResult = ConstantFP::get(V->getType(), floatEvalFunc(fV));
  2979. CI->replaceAllUsesWith(dResult);
  2980. Result = dResult;
  2981. }
  2982. CI->eraseFromParent();
  2983. return Result;
  2984. }
  2985. static Value * EvalBinaryIntrinsic(CallInst *CI,
  2986. FloatBinaryEvalFuncType floatEvalFunc,
  2987. DoubleBinaryEvalFuncType doubleEvalFunc) {
  2988. Value *V0 = CI->getArgOperand(0);
  2989. ConstantFP *fpV0 = cast<ConstantFP>(V0);
  2990. Value *V1 = CI->getArgOperand(1);
  2991. ConstantFP *fpV1 = cast<ConstantFP>(V1);
  2992. llvm::Type *Ty = CI->getType();
  2993. Value *Result = nullptr;
  2994. if (Ty->isDoubleTy()) {
  2995. double dV0 = fpV0->getValueAPF().convertToDouble();
  2996. double dV1 = fpV1->getValueAPF().convertToDouble();
  2997. Value *dResult = ConstantFP::get(V0->getType(), doubleEvalFunc(dV0, dV1));
  2998. CI->replaceAllUsesWith(dResult);
  2999. Result = dResult;
  3000. } else {
  3001. DXASSERT_NOMSG(Ty->isFloatTy());
  3002. float fV0 = fpV0->getValueAPF().convertToFloat();
  3003. float fV1 = fpV1->getValueAPF().convertToFloat();
  3004. Value *dResult = ConstantFP::get(V0->getType(), floatEvalFunc(fV0, fV1));
  3005. CI->replaceAllUsesWith(dResult);
  3006. Result = dResult;
  3007. }
  3008. CI->eraseFromParent();
  3009. return Result;
  3010. }
  3011. static Value * TryEvalIntrinsic(CallInst *CI, IntrinsicOp intriOp) {
  3012. switch (intriOp) {
  3013. case IntrinsicOp::IOP_tan: {
  3014. return EvalUnaryIntrinsic(CI, tanf, tan);
  3015. } break;
  3016. case IntrinsicOp::IOP_tanh: {
  3017. return EvalUnaryIntrinsic(CI, tanhf, tanh);
  3018. } break;
  3019. case IntrinsicOp::IOP_sin: {
  3020. return EvalUnaryIntrinsic(CI, sinf, sin);
  3021. } break;
  3022. case IntrinsicOp::IOP_sinh: {
  3023. return EvalUnaryIntrinsic(CI, sinhf, sinh);
  3024. } break;
  3025. case IntrinsicOp::IOP_cos: {
  3026. return EvalUnaryIntrinsic(CI, cosf, cos);
  3027. } break;
  3028. case IntrinsicOp::IOP_cosh: {
  3029. return EvalUnaryIntrinsic(CI, coshf, cosh);
  3030. } break;
  3031. case IntrinsicOp::IOP_asin: {
  3032. return EvalUnaryIntrinsic(CI, asinf, asin);
  3033. } break;
  3034. case IntrinsicOp::IOP_acos: {
  3035. return EvalUnaryIntrinsic(CI, acosf, acos);
  3036. } break;
  3037. case IntrinsicOp::IOP_atan: {
  3038. return EvalUnaryIntrinsic(CI, atanf, atan);
  3039. } break;
  3040. case IntrinsicOp::IOP_atan2: {
  3041. Value *V0 = CI->getArgOperand(0);
  3042. ConstantFP *fpV0 = cast<ConstantFP>(V0);
  3043. Value *V1 = CI->getArgOperand(1);
  3044. ConstantFP *fpV1 = cast<ConstantFP>(V1);
  3045. llvm::Type *Ty = CI->getType();
  3046. Value *Result = nullptr;
  3047. if (Ty->isDoubleTy()) {
  3048. double dV0 = fpV0->getValueAPF().convertToDouble();
  3049. double dV1 = fpV1->getValueAPF().convertToDouble();
  3050. Value *atanV = ConstantFP::get(CI->getType(), atan(dV0 / dV1));
  3051. CI->replaceAllUsesWith(atanV);
  3052. Result = atanV;
  3053. } else {
  3054. DXASSERT_NOMSG(Ty->isFloatTy());
  3055. float fV0 = fpV0->getValueAPF().convertToFloat();
  3056. float fV1 = fpV1->getValueAPF().convertToFloat();
  3057. Value *atanV = ConstantFP::get(CI->getType(), atanf(fV0 / fV1));
  3058. CI->replaceAllUsesWith(atanV);
  3059. Result = atanV;
  3060. }
  3061. CI->eraseFromParent();
  3062. return Result;
  3063. } break;
  3064. case IntrinsicOp::IOP_sqrt: {
  3065. return EvalUnaryIntrinsic(CI, sqrtf, sqrt);
  3066. } break;
  3067. case IntrinsicOp::IOP_rsqrt: {
  3068. auto rsqrtF = [](float v) -> float { return 1.0 / sqrtf(v); };
  3069. auto rsqrtD = [](double v) -> double { return 1.0 / sqrt(v); };
  3070. return EvalUnaryIntrinsic(CI, rsqrtF, rsqrtD);
  3071. } break;
  3072. case IntrinsicOp::IOP_exp: {
  3073. return EvalUnaryIntrinsic(CI, expf, exp);
  3074. } break;
  3075. case IntrinsicOp::IOP_exp2: {
  3076. return EvalUnaryIntrinsic(CI, exp2f, exp2);
  3077. } break;
  3078. case IntrinsicOp::IOP_log: {
  3079. return EvalUnaryIntrinsic(CI, logf, log);
  3080. } break;
  3081. case IntrinsicOp::IOP_log10: {
  3082. return EvalUnaryIntrinsic(CI, log10f, log10);
  3083. } break;
  3084. case IntrinsicOp::IOP_log2: {
  3085. return EvalUnaryIntrinsic(CI, log2f, log2);
  3086. } break;
  3087. case IntrinsicOp::IOP_pow: {
  3088. return EvalBinaryIntrinsic(CI, powf, pow);
  3089. } break;
  3090. case IntrinsicOp::IOP_max: {
  3091. auto maxF = [](float a, float b) -> float { return a > b ? a:b; };
  3092. auto maxD = [](double a, double b) -> double { return a > b ? a:b; };
  3093. return EvalBinaryIntrinsic(CI, maxF, maxD);
  3094. } break;
  3095. case IntrinsicOp::IOP_min: {
  3096. auto minF = [](float a, float b) -> float { return a < b ? a:b; };
  3097. auto minD = [](double a, double b) -> double { return a < b ? a:b; };
  3098. return EvalBinaryIntrinsic(CI, minF, minD);
  3099. } break;
  3100. case IntrinsicOp::IOP_rcp: {
  3101. auto rcpF = [](float v) -> float { return 1.0 / v; };
  3102. auto rcpD = [](double v) -> double { return 1.0 / v; };
  3103. return EvalUnaryIntrinsic(CI, rcpF, rcpD);
  3104. } break;
  3105. case IntrinsicOp::IOP_ceil: {
  3106. return EvalUnaryIntrinsic(CI, ceilf, ceil);
  3107. } break;
  3108. case IntrinsicOp::IOP_floor: {
  3109. return EvalUnaryIntrinsic(CI, floorf, floor);
  3110. } break;
  3111. case IntrinsicOp::IOP_round: {
  3112. return EvalUnaryIntrinsic(CI, roundf, round);
  3113. } break;
  3114. case IntrinsicOp::IOP_trunc: {
  3115. return EvalUnaryIntrinsic(CI, truncf, trunc);
  3116. } break;
  3117. case IntrinsicOp::IOP_frac: {
  3118. auto fracF = [](float v) -> float {
  3119. int exp = 0;
  3120. return frexpf(v, &exp);
  3121. };
  3122. auto fracD = [](double v) -> double {
  3123. int exp = 0;
  3124. return frexp(v, &exp);
  3125. };
  3126. return EvalUnaryIntrinsic(CI, fracF, fracD);
  3127. } break;
  3128. case IntrinsicOp::IOP_isnan: {
  3129. Value *V = CI->getArgOperand(0);
  3130. ConstantFP *fV = cast<ConstantFP>(V);
  3131. bool isNan = fV->getValueAPF().isNaN();
  3132. Constant *cNan = ConstantInt::get(CI->getType(), isNan ? 1 : 0);
  3133. CI->replaceAllUsesWith(cNan);
  3134. CI->eraseFromParent();
  3135. return cNan;
  3136. } break;
  3137. case IntrinsicOp::IOP_firstbithigh: {
  3138. Value *V = CI->getArgOperand(0);
  3139. ConstantInt *iV = cast<ConstantInt>(V);
  3140. APInt v = iV->getValue();
  3141. Value *firstbit = nullptr;
  3142. if (v == 0) {
  3143. firstbit = ConstantInt::get(CI->getType(), -1);
  3144. } else {
  3145. bool mask = true;
  3146. if (v.isNegative())
  3147. mask = false;
  3148. unsigned bitWidth = v.getBitWidth();
  3149. for (int i = bitWidth - 2; i >= 0; i--) {
  3150. if (v[i] == mask) {
  3151. firstbit = ConstantInt::get(CI->getType(), bitWidth-1-i);
  3152. break;
  3153. }
  3154. }
  3155. }
  3156. CI->replaceAllUsesWith(firstbit);
  3157. CI->eraseFromParent();
  3158. return firstbit;
  3159. } break;
  3160. case IntrinsicOp::IOP_ufirstbithigh: {
  3161. Value *V = CI->getArgOperand(0);
  3162. ConstantInt *iV = cast<ConstantInt>(V);
  3163. APInt v = iV->getValue();
  3164. Value *firstbit = nullptr;
  3165. if (v == 0) {
  3166. firstbit = ConstantInt::get(CI->getType(), -1);
  3167. } else {
  3168. unsigned bitWidth = v.getBitWidth();
  3169. for (int i = bitWidth - 1; i >= 0; i--) {
  3170. if (v[i]) {
  3171. firstbit = ConstantInt::get(CI->getType(), bitWidth-1-i);
  3172. break;
  3173. }
  3174. }
  3175. }
  3176. CI->replaceAllUsesWith(firstbit);
  3177. CI->eraseFromParent();
  3178. return firstbit;
  3179. } break;
  3180. default:
  3181. return nullptr;
  3182. }
  3183. }
  3184. static void SimpleTransformForHLDXIR(Instruction *I,
  3185. std::vector<Instruction *> &deadInsts) {
  3186. unsigned opcode = I->getOpcode();
  3187. switch (opcode) {
  3188. case Instruction::BitCast: {
  3189. BitCastInst *BCI = cast<BitCastInst>(I);
  3190. llvm::Type *ToTy = BCI->getType();
  3191. llvm::Type *FromTy = BCI->getOperand(0)->getType();
  3192. if (ToTy->isPointerTy() && FromTy->isPointerTy()) {
  3193. ToTy = ToTy->getPointerElementType();
  3194. FromTy = FromTy->getPointerElementType();
  3195. llvm::Type *i1Ty = llvm::Type::getInt1Ty(ToTy->getContext());
  3196. if (ToTy->isVectorTy()) {
  3197. unsigned vecSize = ToTy->getVectorNumElements();
  3198. if (vecSize == 1 &&
  3199. ToTy->getVectorElementType() == FromTy) {
  3200. SimplifyScalarToVec1Splat(BCI, deadInsts);
  3201. } else if (FromTy->isVectorTy() && vecSize == 1) {
  3202. if (FromTy->getScalarType() == ToTy->getScalarType()) {
  3203. SimplifyVectorTrunc(BCI, deadInsts);
  3204. }
  3205. } else if (FromTy->isArrayTy()) {
  3206. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  3207. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  3208. if (FromTy->getArrayNumElements() == vecSize &&
  3209. FromEltTy == ToEltTy) {
  3210. SimplifyArrayToVector(BCI, deadInsts);
  3211. }
  3212. }
  3213. }
  3214. else if (FromTy == i1Ty) {
  3215. SimplifyBoolCast(BCI, i1Ty, deadInsts);
  3216. }
  3217. // TODO: support array to array cast.
  3218. }
  3219. } break;
  3220. case Instruction::Load: {
  3221. LoadInst *ldInst = cast<LoadInst>(I);
  3222. DXASSERT_LOCALVAR(ldInst, !HLMatrixLower::IsMatrixType(ldInst->getType()),
  3223. "matrix load should use HL LdStMatrix");
  3224. } break;
  3225. case Instruction::Store: {
  3226. StoreInst *stInst = cast<StoreInst>(I);
  3227. Value *V = stInst->getValueOperand();
  3228. DXASSERT_LOCALVAR(V, !HLMatrixLower::IsMatrixType(V->getType()),
  3229. "matrix store should use HL LdStMatrix");
  3230. } break;
  3231. case Instruction::LShr:
  3232. case Instruction::AShr:
  3233. case Instruction::Shl: {
  3234. llvm::BinaryOperator *BO = cast<llvm::BinaryOperator>(I);
  3235. Value *op2 = BO->getOperand(1);
  3236. IntegerType *Ty = cast<IntegerType>(BO->getType()->getScalarType());
  3237. unsigned bitWidth = Ty->getBitWidth();
  3238. // Clamp op2 to 0 ~ bitWidth-1
  3239. if (ConstantInt *cOp2 = dyn_cast<ConstantInt>(op2)) {
  3240. unsigned iOp2 = cOp2->getLimitedValue();
  3241. unsigned clampedOp2 = iOp2 & (bitWidth - 1);
  3242. if (iOp2 != clampedOp2) {
  3243. BO->setOperand(1, ConstantInt::get(op2->getType(), clampedOp2));
  3244. }
  3245. } else {
  3246. Value *mask = ConstantInt::get(op2->getType(), bitWidth - 1);
  3247. IRBuilder<> Builder(I);
  3248. op2 = Builder.CreateAnd(op2, mask);
  3249. BO->setOperand(1, op2);
  3250. }
  3251. } break;
  3252. }
  3253. }
  3254. // Do simple transform to make later lower pass easier.
  3255. static void SimpleTransformForHLDXIR(llvm::Module *pM) {
  3256. std::vector<Instruction *> deadInsts;
  3257. for (Function &F : pM->functions()) {
  3258. for (BasicBlock &BB : F.getBasicBlockList()) {
  3259. for (BasicBlock::iterator Iter = BB.begin(); Iter != BB.end(); ) {
  3260. Instruction *I = (Iter++);
  3261. SimpleTransformForHLDXIR(I, deadInsts);
  3262. }
  3263. }
  3264. }
  3265. llvm::Type *i32Ty = llvm::Type::getInt32Ty(pM->getContext());
  3266. for (GlobalVariable &GV : pM->globals()) {
  3267. if (HLModule::IsStaticGlobal(&GV)) {
  3268. for (User *U : GV.users()) {
  3269. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(U)) {
  3270. llvm::Type *ToTy = BCO->getType();
  3271. llvm::Type *FromTy = BCO->getOperand(0)->getType();
  3272. if (ToTy->isPointerTy() && FromTy->isPointerTy()) {
  3273. ToTy = ToTy->getPointerElementType();
  3274. FromTy = FromTy->getPointerElementType();
  3275. if (ToTy->isVectorTy()) {
  3276. unsigned vecSize = ToTy->getVectorNumElements();
  3277. if (FromTy->isArrayTy()) {
  3278. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  3279. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  3280. if (FromTy->getArrayNumElements() == vecSize &&
  3281. FromEltTy == ToEltTy) {
  3282. SimplifyArrayToVector(BCO, &GV, i32Ty, deadInsts);
  3283. }
  3284. }
  3285. }
  3286. // TODO: support array to array cast.
  3287. }
  3288. }
  3289. }
  3290. }
  3291. }
  3292. for (Instruction * I : deadInsts)
  3293. I->dropAllReferences();
  3294. for (Instruction * I : deadInsts)
  3295. I->eraseFromParent();
  3296. }
  3297. void CGMSHLSLRuntime::FinishCodeGen() {
  3298. SetEntryFunction();
  3299. // If at this point we haven't determined the entry function it's an error.
  3300. if (m_pHLModule->GetEntryFunction() == nullptr) {
  3301. assert(CGM.getDiags().hasErrorOccurred() &&
  3302. "else SetEntryFunction should have reported this condition");
  3303. return;
  3304. }
  3305. // Remove all useless functions.
  3306. if (!CGM.getCodeGenOpts().HLSLHighLevel) {
  3307. Function *patchConstantFunc = nullptr;
  3308. if (m_pHLModule->GetShaderModel()->IsHS()) {
  3309. patchConstantFunc = m_pHLModule->GetHLFunctionProps(EntryFunc)
  3310. .ShaderProps.HS.patchConstantFunc;
  3311. }
  3312. std::unordered_set<Function *> DeadFuncSet;
  3313. for (auto FIt = TheModule.functions().begin(),
  3314. FE = TheModule.functions().end();
  3315. FIt != FE;) {
  3316. Function *F = FIt++;
  3317. if (F != EntryFunc && F != patchConstantFunc && !F->isDeclaration()) {
  3318. if (F->user_empty())
  3319. F->eraseFromParent();
  3320. else
  3321. DeadFuncSet.insert(F);
  3322. }
  3323. }
  3324. while (!DeadFuncSet.empty()) {
  3325. bool noUpdate = true;
  3326. for (auto FIt = DeadFuncSet.begin(), FE = DeadFuncSet.end(); FIt != FE;) {
  3327. Function *F = *(FIt++);
  3328. if (F->user_empty()) {
  3329. DeadFuncSet.erase(F);
  3330. F->eraseFromParent();
  3331. noUpdate = false;
  3332. }
  3333. }
  3334. // Avoid dead loop.
  3335. if (noUpdate)
  3336. break;
  3337. }
  3338. // Remove unused external function.
  3339. for (auto FIt = TheModule.functions().begin(),
  3340. FE = TheModule.functions().end();
  3341. FIt != FE;) {
  3342. Function *F = FIt++;
  3343. if (F->isDeclaration() && F->user_empty()) {
  3344. if (m_IntrinsicMap.count(F))
  3345. m_IntrinsicMap.erase(F);
  3346. F->eraseFromParent();
  3347. }
  3348. }
  3349. }
  3350. // Create copy for clip plane.
  3351. for (Function *F : clipPlaneFuncList) {
  3352. HLFunctionProps &props = m_pHLModule->GetHLFunctionProps(F);
  3353. IRBuilder<> Builder(F->getEntryBlock().getFirstInsertionPt());
  3354. for (unsigned i = 0; i < DXIL::kNumClipPlanes; i++) {
  3355. Value *clipPlane = props.ShaderProps.VS.clipPlanes[i];
  3356. if (!clipPlane)
  3357. continue;
  3358. if (m_bDebugInfo) {
  3359. Builder.SetCurrentDebugLocation(debugInfoMap[clipPlane]);
  3360. }
  3361. llvm::Type *Ty = clipPlane->getType()->getPointerElementType();
  3362. // Constant *zeroInit = ConstantFP::get(Ty, 0);
  3363. GlobalVariable *GV = new llvm::GlobalVariable(
  3364. TheModule, Ty, /*IsConstant*/ false, // constant false to store.
  3365. llvm::GlobalValue::ExternalLinkage,
  3366. /*InitVal*/ nullptr, Twine("SV_ClipPlane") + Twine(i));
  3367. Value *initVal = Builder.CreateLoad(clipPlane);
  3368. Builder.CreateStore(initVal, GV);
  3369. props.ShaderProps.VS.clipPlanes[i] = GV;
  3370. }
  3371. }
  3372. // Allocate constant buffers.
  3373. AllocateDxilConstantBuffers(m_pHLModule);
  3374. // TODO: create temp variable for constant which has store use.
  3375. // Create Global variable and type annotation for each CBuffer.
  3376. ConstructCBuffer(m_pHLModule, CBufferType, m_ConstVarAnnotationMap);
  3377. // add global call to entry func
  3378. auto AddGlobalCall = [&](StringRef globalName, Instruction *InsertPt) {
  3379. GlobalVariable *GV = TheModule.getGlobalVariable(globalName);
  3380. if (GV) {
  3381. if (ConstantArray *CA = dyn_cast<ConstantArray>(GV->getInitializer())) {
  3382. IRBuilder<> Builder(InsertPt);
  3383. for (User::op_iterator i = CA->op_begin(), e = CA->op_end(); i != e;
  3384. ++i) {
  3385. if (isa<ConstantAggregateZero>(*i))
  3386. continue;
  3387. ConstantStruct *CS = cast<ConstantStruct>(*i);
  3388. if (isa<ConstantPointerNull>(CS->getOperand(1)))
  3389. continue;
  3390. // Must have a function or null ptr.
  3391. if (!isa<Function>(CS->getOperand(1)))
  3392. continue;
  3393. Function *Ctor = cast<Function>(CS->getOperand(1));
  3394. assert(Ctor->getReturnType()->isVoidTy() && Ctor->arg_size() == 0 &&
  3395. "function type must be void (void)");
  3396. Builder.CreateCall(Ctor);
  3397. }
  3398. // remove the GV
  3399. GV->eraseFromParent();
  3400. }
  3401. }
  3402. };
  3403. // need this for "llvm.global_dtors"?
  3404. AddGlobalCall("llvm.global_ctors",
  3405. EntryFunc->getEntryBlock().getFirstInsertionPt());
  3406. // translate opcode into parameter for intrinsic functions
  3407. AddOpcodeParamForIntrinsics(*m_pHLModule, m_IntrinsicMap);
  3408. // Pin entry point and constant buffers, mark everything else internal.
  3409. for (Function &f : m_pHLModule->GetModule()->functions()) {
  3410. if (&f == m_pHLModule->GetEntryFunction() || IsPatchConstantFunction(&f) ||
  3411. f.isDeclaration()) {
  3412. f.setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  3413. } else {
  3414. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  3415. }
  3416. // Always inline.
  3417. f.addFnAttr(llvm::Attribute::AlwaysInline);
  3418. }
  3419. // Do simple transform to make later lower pass easier.
  3420. SimpleTransformForHLDXIR(m_pHLModule->GetModule());
  3421. // Add semantic defines for extensions if any are available.
  3422. if (CGM.getCodeGenOpts().HLSLExtensionsCodegen) {
  3423. HLSLExtensionsCodegenHelper::SemanticDefineErrorList errors =
  3424. CGM.getCodeGenOpts().HLSLExtensionsCodegen->WriteSemanticDefines(m_pHLModule->GetModule());
  3425. DiagnosticsEngine &Diags = CGM.getDiags();
  3426. for (const HLSLExtensionsCodegenHelper::SemanticDefineError& error : errors) {
  3427. DiagnosticsEngine::Level level = DiagnosticsEngine::Error;
  3428. if (error.IsWarning())
  3429. level = DiagnosticsEngine::Warning;
  3430. unsigned DiagID = Diags.getCustomDiagID(level, "%0");
  3431. Diags.Report(SourceLocation::getFromRawEncoding(error.Location()), DiagID) << error.Message();
  3432. }
  3433. }
  3434. }
  3435. RValue CGMSHLSLRuntime::EmitHLSLBuiltinCallExpr(CodeGenFunction &CGF,
  3436. const FunctionDecl *FD,
  3437. const CallExpr *E,
  3438. ReturnValueSlot ReturnValue) {
  3439. StringRef name = FD->getName();
  3440. const Decl *TargetDecl = E->getCalleeDecl();
  3441. llvm::Value *Callee = CGF.EmitScalarExpr(E->getCallee());
  3442. RValue RV = CGF.EmitCall(E->getCallee()->getType(), Callee, E, ReturnValue,
  3443. TargetDecl);
  3444. if (RV.isScalar() && RV.getScalarVal() != nullptr) {
  3445. if (CallInst *CI = dyn_cast<CallInst>(RV.getScalarVal())) {
  3446. Function *F = CI->getCalledFunction();
  3447. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  3448. if (group == HLOpcodeGroup::HLIntrinsic) {
  3449. bool allOperandImm = true;
  3450. for (auto &operand : CI->arg_operands()) {
  3451. bool isImm = isa<ConstantInt>(operand) || isa<ConstantFP>(operand);
  3452. if (!isImm) {
  3453. allOperandImm = false;
  3454. break;
  3455. }
  3456. }
  3457. if (allOperandImm) {
  3458. unsigned intrinsicOpcode;
  3459. StringRef intrinsicGroup;
  3460. hlsl::GetIntrinsicOp(FD, intrinsicOpcode, intrinsicGroup);
  3461. IntrinsicOp opcode = static_cast<IntrinsicOp>(intrinsicOpcode);
  3462. if (Value *Result = TryEvalIntrinsic(CI, opcode)) {
  3463. RV = RValue::get(Result);
  3464. }
  3465. }
  3466. }
  3467. }
  3468. }
  3469. return RV;
  3470. }
  3471. static HLOpcodeGroup GetHLOpcodeGroup(const clang::Stmt::StmtClass stmtClass) {
  3472. switch (stmtClass) {
  3473. case Stmt::CStyleCastExprClass:
  3474. case Stmt::ImplicitCastExprClass:
  3475. case Stmt::CXXFunctionalCastExprClass:
  3476. return HLOpcodeGroup::HLCast;
  3477. case Stmt::InitListExprClass:
  3478. return HLOpcodeGroup::HLInit;
  3479. case Stmt::BinaryOperatorClass:
  3480. case Stmt::CompoundAssignOperatorClass:
  3481. return HLOpcodeGroup::HLBinOp;
  3482. case Stmt::UnaryOperatorClass:
  3483. return HLOpcodeGroup::HLUnOp;
  3484. case Stmt::ExtMatrixElementExprClass:
  3485. return HLOpcodeGroup::HLSubscript;
  3486. case Stmt::CallExprClass:
  3487. return HLOpcodeGroup::HLIntrinsic;
  3488. case Stmt::ConditionalOperatorClass:
  3489. return HLOpcodeGroup::HLSelect;
  3490. default:
  3491. llvm_unreachable("not support operation");
  3492. }
  3493. }
  3494. // NOTE: This table must match BinaryOperator::Opcode
  3495. static const HLBinaryOpcode BinaryOperatorKindMap[] = {
  3496. HLBinaryOpcode::Invalid, // PtrMemD
  3497. HLBinaryOpcode::Invalid, // PtrMemI
  3498. HLBinaryOpcode::Mul, HLBinaryOpcode::Div, HLBinaryOpcode::Rem,
  3499. HLBinaryOpcode::Add, HLBinaryOpcode::Sub, HLBinaryOpcode::Shl,
  3500. HLBinaryOpcode::Shr, HLBinaryOpcode::LT, HLBinaryOpcode::GT,
  3501. HLBinaryOpcode::LE, HLBinaryOpcode::GE, HLBinaryOpcode::EQ,
  3502. HLBinaryOpcode::NE, HLBinaryOpcode::And, HLBinaryOpcode::Xor,
  3503. HLBinaryOpcode::Or, HLBinaryOpcode::LAnd, HLBinaryOpcode::LOr,
  3504. HLBinaryOpcode::Invalid, // Assign,
  3505. // The assign part is done by matrix store
  3506. HLBinaryOpcode::Mul, // MulAssign
  3507. HLBinaryOpcode::Div, // DivAssign
  3508. HLBinaryOpcode::Rem, // RemAssign
  3509. HLBinaryOpcode::Add, // AddAssign
  3510. HLBinaryOpcode::Sub, // SubAssign
  3511. HLBinaryOpcode::Shl, // ShlAssign
  3512. HLBinaryOpcode::Shr, // ShrAssign
  3513. HLBinaryOpcode::And, // AndAssign
  3514. HLBinaryOpcode::Xor, // XorAssign
  3515. HLBinaryOpcode::Or, // OrAssign
  3516. HLBinaryOpcode::Invalid, // Comma
  3517. };
  3518. // NOTE: This table must match UnaryOperator::Opcode
  3519. static const HLUnaryOpcode UnaryOperatorKindMap[] = {
  3520. HLUnaryOpcode::PostInc, HLUnaryOpcode::PostDec,
  3521. HLUnaryOpcode::PreInc, HLUnaryOpcode::PreDec,
  3522. HLUnaryOpcode::Invalid, // AddrOf,
  3523. HLUnaryOpcode::Invalid, // Deref,
  3524. HLUnaryOpcode::Plus, HLUnaryOpcode::Minus,
  3525. HLUnaryOpcode::Not, HLUnaryOpcode::LNot,
  3526. HLUnaryOpcode::Invalid, // Real,
  3527. HLUnaryOpcode::Invalid, // Imag,
  3528. HLUnaryOpcode::Invalid, // Extension
  3529. };
  3530. static bool IsRowMajorMatrix(QualType Ty, bool bDefaultRowMajor) {
  3531. if (const AttributedType *AT = Ty->getAs<AttributedType>()) {
  3532. if (AT->getAttrKind() == AttributedType::attr_hlsl_row_major)
  3533. return true;
  3534. else if (AT->getAttrKind() == AttributedType::attr_hlsl_column_major)
  3535. return false;
  3536. else
  3537. return bDefaultRowMajor;
  3538. } else {
  3539. return bDefaultRowMajor;
  3540. }
  3541. }
  3542. static bool IsUnsigned(QualType Ty) {
  3543. Ty = Ty.getCanonicalType().getNonReferenceType();
  3544. if (hlsl::IsHLSLVecMatType(Ty))
  3545. Ty = CGHLSLRuntime::GetHLSLVecMatElementType(Ty);
  3546. if (Ty->isExtVectorType())
  3547. Ty = Ty->getAs<clang::ExtVectorType>()->getElementType();
  3548. return Ty->isUnsignedIntegerType();
  3549. }
  3550. static unsigned GetHLOpcode(const Expr *E) {
  3551. switch (E->getStmtClass()) {
  3552. case Stmt::CompoundAssignOperatorClass:
  3553. case Stmt::BinaryOperatorClass: {
  3554. const clang::BinaryOperator *binOp = cast<clang::BinaryOperator>(E);
  3555. HLBinaryOpcode binOpcode = BinaryOperatorKindMap[binOp->getOpcode()];
  3556. if (HasUnsignedOpcode(binOpcode)) {
  3557. if (IsUnsigned(binOp->getLHS()->getType())) {
  3558. binOpcode = GetUnsignedOpcode(binOpcode);
  3559. }
  3560. }
  3561. return static_cast<unsigned>(binOpcode);
  3562. }
  3563. case Stmt::UnaryOperatorClass: {
  3564. const UnaryOperator *unOp = cast<clang::UnaryOperator>(E);
  3565. HLUnaryOpcode unOpcode = UnaryOperatorKindMap[unOp->getOpcode()];
  3566. return static_cast<unsigned>(unOpcode);
  3567. }
  3568. case Stmt::ImplicitCastExprClass:
  3569. case Stmt::CStyleCastExprClass: {
  3570. const CastExpr *CE = cast<CastExpr>(E);
  3571. bool toUnsigned = IsUnsigned(E->getType());
  3572. bool fromUnsigned = IsUnsigned(CE->getSubExpr()->getType());
  3573. if (toUnsigned && fromUnsigned)
  3574. return static_cast<unsigned>(HLCastOpcode::UnsignedUnsignedCast);
  3575. else if (toUnsigned)
  3576. return static_cast<unsigned>(HLCastOpcode::ToUnsignedCast);
  3577. else if (fromUnsigned)
  3578. return static_cast<unsigned>(HLCastOpcode::FromUnsignedCast);
  3579. else
  3580. return static_cast<unsigned>(HLCastOpcode::DefaultCast);
  3581. }
  3582. default:
  3583. return 0;
  3584. }
  3585. }
  3586. static Value *
  3587. EmitHLSLMatrixOperationCallImp(CGBuilderTy &Builder, HLOpcodeGroup group,
  3588. unsigned opcode, llvm::Type *RetType,
  3589. ArrayRef<Value *> paramList, llvm::Module &M) {
  3590. SmallVector<llvm::Type *, 4> paramTyList;
  3591. // Add the opcode param
  3592. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  3593. paramTyList.emplace_back(opcodeTy);
  3594. for (Value *param : paramList) {
  3595. paramTyList.emplace_back(param->getType());
  3596. }
  3597. llvm::FunctionType *funcTy =
  3598. llvm::FunctionType::get(RetType, paramTyList, false);
  3599. Function *opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode);
  3600. SmallVector<Value *, 4> opcodeParamList;
  3601. Value *opcodeConst = Constant::getIntegerValue(opcodeTy, APInt(32, opcode));
  3602. opcodeParamList.emplace_back(opcodeConst);
  3603. opcodeParamList.append(paramList.begin(), paramList.end());
  3604. return Builder.CreateCall(opFunc, opcodeParamList);
  3605. }
  3606. static Value *EmitHLSLArrayInit(CGBuilderTy &Builder, HLOpcodeGroup group,
  3607. unsigned opcode, llvm::Type *RetType,
  3608. ArrayRef<Value *> paramList, llvm::Module &M) {
  3609. // It's a matrix init.
  3610. if (!RetType->isVoidTy())
  3611. return EmitHLSLMatrixOperationCallImp(Builder, group, opcode, RetType,
  3612. paramList, M);
  3613. Value *arrayPtr = paramList[0];
  3614. llvm::ArrayType *AT =
  3615. cast<llvm::ArrayType>(arrayPtr->getType()->getPointerElementType());
  3616. // Avoid the arrayPtr.
  3617. unsigned paramSize = paramList.size() - 1;
  3618. // Support simple case here.
  3619. if (paramSize == AT->getArrayNumElements()) {
  3620. bool typeMatch = true;
  3621. llvm::Type *EltTy = AT->getArrayElementType();
  3622. if (EltTy->isAggregateType()) {
  3623. // Aggregate Type use pointer in initList.
  3624. EltTy = llvm::PointerType::get(EltTy, 0);
  3625. }
  3626. for (unsigned i = 1; i < paramList.size(); i++) {
  3627. if (paramList[i]->getType() != EltTy) {
  3628. typeMatch = false;
  3629. break;
  3630. }
  3631. }
  3632. // Both size and type match.
  3633. if (typeMatch) {
  3634. bool isPtr = EltTy->isPointerTy();
  3635. llvm::Type *i32Ty = llvm::Type::getInt32Ty(EltTy->getContext());
  3636. Constant *zero = ConstantInt::get(i32Ty, 0);
  3637. for (unsigned i = 1; i < paramList.size(); i++) {
  3638. Constant *idx = ConstantInt::get(i32Ty, i - 1);
  3639. Value *GEP = Builder.CreateInBoundsGEP(arrayPtr, {zero, idx});
  3640. Value *Elt = paramList[i];
  3641. if (isPtr) {
  3642. Elt = Builder.CreateLoad(Elt);
  3643. }
  3644. Builder.CreateStore(Elt, GEP);
  3645. }
  3646. // The return value will not be used.
  3647. return nullptr;
  3648. }
  3649. }
  3650. // Other case will be lowered in later pass.
  3651. return EmitHLSLMatrixOperationCallImp(Builder, group, opcode, RetType,
  3652. paramList, M);
  3653. }
  3654. void CGMSHLSLRuntime::FlattenValToInitList(CodeGenFunction &CGF, SmallVector<Value *, 4> &elts,
  3655. SmallVector<QualType, 4> &eltTys,
  3656. QualType Ty, Value *val) {
  3657. CGBuilderTy &Builder = CGF.Builder;
  3658. llvm::Type *valTy = val->getType();
  3659. if (valTy->isPointerTy()) {
  3660. llvm::Type *valEltTy = valTy->getPointerElementType();
  3661. if (valEltTy->isVectorTy() ||
  3662. valEltTy->isSingleValueType()) {
  3663. Value *ldVal = Builder.CreateLoad(val);
  3664. FlattenValToInitList(CGF, elts, eltTys, Ty, ldVal);
  3665. } else if (HLMatrixLower::IsMatrixType(valEltTy)) {
  3666. Value *ldVal = EmitHLSLMatrixLoad(Builder, val, Ty);
  3667. FlattenValToInitList(CGF, elts, eltTys, Ty, ldVal);
  3668. } else {
  3669. llvm::Type *i32Ty = llvm::Type::getInt32Ty(valTy->getContext());
  3670. Value *zero = ConstantInt::get(i32Ty, 0);
  3671. if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(valEltTy)) {
  3672. QualType EltTy = Ty->getAsArrayTypeUnsafe()->getElementType();
  3673. for (unsigned i = 0; i < AT->getArrayNumElements(); i++) {
  3674. Value *gepIdx = ConstantInt::get(i32Ty, i);
  3675. Value *EltPtr = Builder.CreateInBoundsGEP(val, {zero, gepIdx});
  3676. FlattenValToInitList(CGF, elts, eltTys, EltTy,EltPtr);
  3677. }
  3678. } else {
  3679. // Struct.
  3680. StructType *ST = cast<StructType>(valEltTy);
  3681. if (HLModule::IsHLSLObjectType(ST)) {
  3682. // Save object directly like basic type.
  3683. elts.emplace_back(Builder.CreateLoad(val));
  3684. eltTys.emplace_back(Ty);
  3685. } else {
  3686. RecordDecl *RD = Ty->getAsStructureType()->getDecl();
  3687. const CGRecordLayout& RL = CGF.getTypes().getCGRecordLayout(RD);
  3688. // Take care base.
  3689. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  3690. if (CXXRD->getNumBases()) {
  3691. for (const auto &I : CXXRD->bases()) {
  3692. const CXXRecordDecl *BaseDecl = cast<CXXRecordDecl>(
  3693. I.getType()->castAs<RecordType>()->getDecl());
  3694. if (BaseDecl->field_empty())
  3695. continue;
  3696. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  3697. unsigned i = RL.getNonVirtualBaseLLVMFieldNo(BaseDecl);
  3698. Value *gepIdx = ConstantInt::get(i32Ty, i);
  3699. Value *EltPtr = Builder.CreateInBoundsGEP(val, {zero, gepIdx});
  3700. FlattenValToInitList(CGF, elts, eltTys, parentTy, EltPtr);
  3701. }
  3702. }
  3703. }
  3704. for (auto fieldIter = RD->field_begin(), fieldEnd = RD->field_end();
  3705. fieldIter != fieldEnd; ++fieldIter) {
  3706. unsigned i = RL.getLLVMFieldNo(*fieldIter);
  3707. Value *gepIdx = ConstantInt::get(i32Ty, i);
  3708. Value *EltPtr = Builder.CreateInBoundsGEP(val, {zero, gepIdx});
  3709. FlattenValToInitList(CGF, elts, eltTys, fieldIter->getType(), EltPtr);
  3710. }
  3711. }
  3712. }
  3713. }
  3714. } else {
  3715. if (HLMatrixLower::IsMatrixType(valTy)) {
  3716. unsigned col, row;
  3717. llvm::Type *EltTy = HLMatrixLower::GetMatrixInfo(valTy, col, row);
  3718. unsigned matSize = col * row;
  3719. bool isRowMajor = IsRowMajorMatrix(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor);
  3720. HLCastOpcode opcode = isRowMajor ? HLCastOpcode::RowMatrixToVecCast
  3721. : HLCastOpcode::ColMatrixToVecCast;
  3722. // Cast to vector.
  3723. val = EmitHLSLMatrixOperationCallImp(
  3724. Builder, HLOpcodeGroup::HLCast,
  3725. static_cast<unsigned>(opcode),
  3726. llvm::VectorType::get(EltTy, matSize), {val}, TheModule);
  3727. valTy = val->getType();
  3728. }
  3729. if (valTy->isVectorTy()) {
  3730. QualType EltTy = GetHLSLVecMatElementType(Ty);
  3731. unsigned vecSize = valTy->getVectorNumElements();
  3732. for (unsigned i = 0; i < vecSize; i++) {
  3733. Value *Elt = Builder.CreateExtractElement(val, i);
  3734. elts.emplace_back(Elt);
  3735. eltTys.emplace_back(EltTy);
  3736. }
  3737. } else {
  3738. DXASSERT(valTy->isSingleValueType(), "must be single value type here");
  3739. elts.emplace_back(val);
  3740. eltTys.emplace_back(Ty);
  3741. }
  3742. }
  3743. }
  3744. // Cast elements in initlist if not match the target type.
  3745. // idx is current element index in initlist, Ty is target type.
  3746. static void AddMissingCastOpsInInitList(SmallVector<Value *, 4> &elts, SmallVector<QualType, 4> eltTys, unsigned &idx, QualType Ty, CodeGenFunction &CGF) {
  3747. if (Ty->isArrayType()) {
  3748. const clang::ArrayType *AT = Ty->getAsArrayTypeUnsafe();
  3749. // Must be ConstantArrayType here.
  3750. unsigned arraySize = cast<ConstantArrayType>(AT)->getSize().getLimitedValue();
  3751. QualType EltTy = AT->getElementType();
  3752. for (unsigned i = 0; i < arraySize; i++)
  3753. AddMissingCastOpsInInitList(elts, eltTys, idx, EltTy, CGF);
  3754. } else if (IsHLSLVecType(Ty)) {
  3755. QualType EltTy = GetHLSLVecElementType(Ty);
  3756. unsigned vecSize = GetHLSLVecSize(Ty);
  3757. for (unsigned i=0;i< vecSize;i++)
  3758. AddMissingCastOpsInInitList(elts, eltTys, idx, EltTy, CGF);
  3759. } else if (IsHLSLMatType(Ty)) {
  3760. QualType EltTy = GetHLSLMatElementType(Ty);
  3761. unsigned row, col;
  3762. GetHLSLMatRowColCount(Ty, row, col);
  3763. unsigned matSize = row*col;
  3764. for (unsigned i = 0; i < matSize; i++)
  3765. AddMissingCastOpsInInitList(elts, eltTys, idx, EltTy, CGF);
  3766. } else if (Ty->isRecordType()) {
  3767. if (HLModule::IsHLSLObjectType(CGF.ConvertType(Ty))) {
  3768. // Skip hlsl object.
  3769. idx++;
  3770. } else {
  3771. const RecordType *RT = Ty->getAsStructureType();
  3772. // For CXXRecord.
  3773. if (!RT)
  3774. RT = Ty->getAs<RecordType>();
  3775. RecordDecl *RD = RT->getDecl();
  3776. for (FieldDecl *field : RD->fields())
  3777. AddMissingCastOpsInInitList(elts, eltTys, idx, field->getType(), CGF);
  3778. }
  3779. }
  3780. else {
  3781. // Basic type.
  3782. Value *val = elts[idx];
  3783. llvm::Type *srcTy = val->getType();
  3784. llvm::Type *dstTy = CGF.ConvertType(Ty);
  3785. if (srcTy != dstTy) {
  3786. Instruction::CastOps castOp =
  3787. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  3788. IsUnsigned(eltTys[idx]), IsUnsigned(Ty), srcTy, dstTy));
  3789. elts[idx] = CGF.Builder.CreateCast(castOp, val, dstTy);
  3790. }
  3791. idx++;
  3792. }
  3793. }
  3794. static void StoreInitListToDestPtr(Value *DestPtr, SmallVector<Value *, 4> &elts, unsigned &idx, CGBuilderTy &Builder, llvm::Module &M) {
  3795. llvm::Type *Ty = DestPtr->getType()->getPointerElementType();
  3796. llvm::Type *i32Ty = llvm::Type::getInt32Ty(Ty->getContext());
  3797. if (Ty->isVectorTy()) {
  3798. Value *Result = UndefValue::get(Ty);
  3799. for (unsigned i = 0; i < Ty->getVectorNumElements(); i++)
  3800. Result = Builder.CreateInsertElement(Result, elts[idx+i], i);
  3801. Builder.CreateStore(Result, DestPtr);
  3802. idx += Ty->getVectorNumElements();
  3803. } else if (HLMatrixLower::IsMatrixType(Ty)) {
  3804. unsigned row, col;
  3805. HLMatrixLower::GetMatrixInfo(Ty, col, row);
  3806. std::vector<Value*> matInitList(col*row);
  3807. for (unsigned i = 0; i < col; i++) {
  3808. for (unsigned r = 0; r < row; r++) {
  3809. unsigned matIdx = i * row + r;
  3810. matInitList[matIdx] = elts[idx+matIdx];
  3811. }
  3812. }
  3813. idx += row*col;
  3814. Value *matVal = EmitHLSLMatrixOperationCallImp(Builder, HLOpcodeGroup::HLInit,
  3815. /*opcode*/0, Ty, matInitList, M);
  3816. EmitHLSLMatrixOperationCallImp(Builder, HLOpcodeGroup::HLMatLoadStore,
  3817. static_cast<unsigned>(HLMatLoadStoreOpcode::ColMatStore), Ty,
  3818. {DestPtr, matVal}, M);
  3819. } else if (Ty->isStructTy()) {
  3820. if (HLModule::IsHLSLObjectType(Ty)) {
  3821. Builder.CreateStore(elts[idx], DestPtr);
  3822. idx++;
  3823. } else {
  3824. Constant *zero = ConstantInt::get(i32Ty, 0);
  3825. for (unsigned i = 0; i < Ty->getStructNumElements(); i++) {
  3826. Constant *gepIdx = ConstantInt::get(i32Ty, i);
  3827. Value *GEP = Builder.CreateInBoundsGEP(DestPtr, {zero, gepIdx});
  3828. StoreInitListToDestPtr(GEP, elts, idx, Builder, M);
  3829. }
  3830. }
  3831. } else if (Ty->isArrayTy()) {
  3832. Constant *zero = ConstantInt::get(i32Ty, 0);
  3833. for (unsigned i = 0; i < Ty->getArrayNumElements(); i++) {
  3834. Constant *gepIdx = ConstantInt::get(i32Ty, i);
  3835. Value *GEP = Builder.CreateInBoundsGEP(DestPtr, {zero, gepIdx});
  3836. StoreInitListToDestPtr(GEP, elts, idx, Builder, M);
  3837. }
  3838. } else {
  3839. DXASSERT(Ty->isSingleValueType(), "invalid type");
  3840. llvm::Type *i1Ty = Builder.getInt1Ty();
  3841. Value *V = elts[idx];
  3842. if (V->getType() == i1Ty && DestPtr->getType()->getPointerElementType() != i1Ty) {
  3843. V = Builder.CreateZExt(V, DestPtr->getType()->getPointerElementType());
  3844. }
  3845. Builder.CreateStore(V, DestPtr);
  3846. idx++;
  3847. }
  3848. }
  3849. void CGMSHLSLRuntime::ScanInitList(CodeGenFunction &CGF, InitListExpr *E,
  3850. SmallVector<Value *, 4> &EltValList,
  3851. SmallVector<QualType, 4> &EltTyList) {
  3852. unsigned NumInitElements = E->getNumInits();
  3853. for (unsigned i = 0; i != NumInitElements; ++i) {
  3854. Expr *init = E->getInit(i);
  3855. QualType iType = init->getType();
  3856. if (InitListExpr *initList = dyn_cast<InitListExpr>(init)) {
  3857. ScanInitList(CGF, initList, EltValList, EltTyList);
  3858. } else if (CodeGenFunction::hasScalarEvaluationKind(iType)) {
  3859. llvm::Value *initVal = CGF.EmitScalarExpr(init);
  3860. FlattenValToInitList(CGF, EltValList, EltTyList, iType, initVal);
  3861. } else {
  3862. AggValueSlot Slot =
  3863. CGF.CreateAggTemp(init->getType(), "Agg.InitList.tmp");
  3864. CGF.EmitAggExpr(init, Slot);
  3865. llvm::Value *aggPtr = Slot.getAddr();
  3866. FlattenValToInitList(CGF, EltValList, EltTyList, iType, aggPtr);
  3867. }
  3868. }
  3869. }
  3870. unsigned CGMSHLSLRuntime::ScanInitList(InitListExpr *E) {
  3871. unsigned NumInitElements = E->getNumInits();
  3872. unsigned size = 0;
  3873. for (unsigned i = 0; i != NumInitElements; ++i) {
  3874. Expr *init = E->getInit(i);
  3875. QualType iType = init->getType();
  3876. if (InitListExpr *initList = dyn_cast<InitListExpr>(init)) {
  3877. size += ScanInitList(initList);
  3878. } else if (CodeGenFunction::hasScalarEvaluationKind(iType)) {
  3879. size += GetElementCount(iType);
  3880. } else {
  3881. DXASSERT(0, "not support yet");
  3882. }
  3883. }
  3884. return size;
  3885. }
  3886. QualType CGMSHLSLRuntime::UpdateHLSLIncompleteArrayType(VarDecl &D) {
  3887. if (!D.hasInit())
  3888. return D.getType();
  3889. InitListExpr *E = dyn_cast<InitListExpr>(D.getInit());
  3890. if (!E)
  3891. return D.getType();
  3892. unsigned arrayEltCount = ScanInitList(E);
  3893. QualType ResultTy = E->getType();
  3894. QualType EltTy = QualType(ResultTy->getArrayElementTypeNoTypeQual(), 0);
  3895. unsigned eltCount = GetElementCount(EltTy);
  3896. llvm::APInt ArySize(32, arrayEltCount / eltCount);
  3897. QualType ArrayTy = CGM.getContext().getConstantArrayType(
  3898. EltTy, ArySize, clang::ArrayType::Normal, 0);
  3899. D.setType(ArrayTy);
  3900. E->setType(ArrayTy);
  3901. return ArrayTy;
  3902. }
  3903. Value *CGMSHLSLRuntime::EmitHLSLInitListExpr(CodeGenFunction &CGF, InitListExpr *E,
  3904. // The destPtr when emiting aggregate init, for normal case, it will be null.
  3905. Value *DestPtr) {
  3906. SmallVector<Value *, 4> EltValList;
  3907. SmallVector<QualType, 4> EltTyList;
  3908. ScanInitList(CGF, E, EltValList, EltTyList);
  3909. QualType ResultTy = E->getType();
  3910. unsigned idx = 0;
  3911. // Create cast if need.
  3912. AddMissingCastOpsInInitList(EltValList, EltTyList, idx, ResultTy, CGF);
  3913. DXASSERT(idx == EltValList.size(), "size must match");
  3914. llvm::Type *RetTy = CGF.ConvertType(ResultTy);
  3915. if (DestPtr) {
  3916. SmallVector<Value *, 4> ParamList;
  3917. DXASSERT(RetTy->isAggregateType(), "");
  3918. ParamList.emplace_back(DestPtr);
  3919. ParamList.append(EltValList.begin(), EltValList.end());
  3920. idx = 0;
  3921. StoreInitListToDestPtr(DestPtr, EltValList, idx, CGF.Builder, TheModule);
  3922. return nullptr;
  3923. }
  3924. if (IsHLSLVecType(ResultTy)) {
  3925. Value *Result = UndefValue::get(RetTy);
  3926. for (unsigned i = 0; i < RetTy->getVectorNumElements(); i++)
  3927. Result = CGF.Builder.CreateInsertElement(Result, EltValList[i], i);
  3928. return Result;
  3929. } else {
  3930. // Must be matrix here.
  3931. DXASSERT(IsHLSLMatType(ResultTy), "must be matrix type here.");
  3932. return EmitHLSLMatrixOperationCallImp(CGF.Builder, HLOpcodeGroup::HLInit,
  3933. /*opcode*/ 0, RetTy, EltValList,
  3934. TheModule);
  3935. }
  3936. }
  3937. Value *CGMSHLSLRuntime::EmitHLSLMatrixOperationCall(
  3938. CodeGenFunction &CGF, const clang::Expr *E, llvm::Type *RetType,
  3939. ArrayRef<Value *> paramList) {
  3940. HLOpcodeGroup group = GetHLOpcodeGroup(E->getStmtClass());
  3941. unsigned opcode = GetHLOpcode(E);
  3942. if (group == HLOpcodeGroup::HLInit)
  3943. return EmitHLSLArrayInit(CGF.Builder, group, opcode, RetType, paramList,
  3944. TheModule);
  3945. else
  3946. return EmitHLSLMatrixOperationCallImp(CGF.Builder, group, opcode, RetType,
  3947. paramList, TheModule);
  3948. }
  3949. void CGMSHLSLRuntime::EmitHLSLDiscard(CodeGenFunction &CGF) {
  3950. EmitHLSLMatrixOperationCallImp(
  3951. CGF.Builder, HLOpcodeGroup::HLIntrinsic,
  3952. static_cast<unsigned>(IntrinsicOp::IOP_clip),
  3953. llvm::Type::getVoidTy(CGF.getLLVMContext()),
  3954. {ConstantFP::get(llvm::Type::getFloatTy(CGF.getLLVMContext()), -1.0f)},
  3955. TheModule);
  3956. }
  3957. Value *CGMSHLSLRuntime::EmitHLSLLiteralCast(CodeGenFunction &CGF, Value *Src,
  3958. QualType SrcType,
  3959. QualType DstType) {
  3960. auto &Builder = CGF.Builder;
  3961. llvm::Type *DstTy = CGF.ConvertType(DstType);
  3962. bool bDstSigned = DstType->isSignedIntegerType();
  3963. if (ConstantInt *CI = dyn_cast<ConstantInt>(Src)) {
  3964. APInt v = CI->getValue();
  3965. if (llvm::IntegerType *IT = dyn_cast<llvm::IntegerType>(DstTy)) {
  3966. v = v.trunc(IT->getBitWidth());
  3967. switch (IT->getBitWidth()) {
  3968. case 32:
  3969. return Builder.getInt32(v.getLimitedValue());
  3970. case 64:
  3971. return Builder.getInt64(v.getLimitedValue());
  3972. case 16:
  3973. return Builder.getInt16(v.getLimitedValue());
  3974. case 8:
  3975. return Builder.getInt8(v.getLimitedValue());
  3976. default:
  3977. return nullptr;
  3978. }
  3979. } else {
  3980. DXASSERT_NOMSG(DstTy->isFloatingPointTy());
  3981. int64_t val = v.getLimitedValue();
  3982. if (v.isNegative())
  3983. val = 0-v.abs().getLimitedValue();
  3984. if (DstTy->isDoubleTy())
  3985. return ConstantFP::get(DstTy, (double)val);
  3986. else if (DstTy->isFloatTy())
  3987. return ConstantFP::get(DstTy, (float)val);
  3988. else {
  3989. if (bDstSigned)
  3990. return Builder.CreateSIToFP(Src, DstTy);
  3991. else
  3992. return Builder.CreateUIToFP(Src, DstTy);
  3993. }
  3994. }
  3995. } else if (ConstantFP *CF = dyn_cast<ConstantFP>(Src)) {
  3996. APFloat v = CF->getValueAPF();
  3997. double dv = v.convertToDouble();
  3998. if (llvm::IntegerType *IT = dyn_cast<llvm::IntegerType>(DstTy)) {
  3999. switch (IT->getBitWidth()) {
  4000. case 32:
  4001. return Builder.getInt32(dv);
  4002. case 64:
  4003. return Builder.getInt64(dv);
  4004. case 16:
  4005. return Builder.getInt16(dv);
  4006. case 8:
  4007. return Builder.getInt8(dv);
  4008. default:
  4009. return nullptr;
  4010. }
  4011. } else {
  4012. if (DstTy->isFloatTy()) {
  4013. float fv = dv;
  4014. return ConstantFP::get(DstTy->getContext(), APFloat(fv));
  4015. } else {
  4016. return Builder.CreateFPTrunc(Src, DstTy);
  4017. }
  4018. }
  4019. } else if (UndefValue *UV = dyn_cast<UndefValue>(Src)) {
  4020. return UndefValue::get(DstTy);
  4021. } else {
  4022. Instruction *I = cast<Instruction>(Src);
  4023. if (SelectInst *SI = dyn_cast<SelectInst>(I)) {
  4024. Value *T = SI->getTrueValue();
  4025. Value *F = SI->getFalseValue();
  4026. Value *Cond = SI->getCondition();
  4027. if (isa<llvm::ConstantInt>(T) && isa<llvm::ConstantInt>(F)) {
  4028. llvm::APInt lhs = cast<llvm::ConstantInt>(T)->getValue();
  4029. llvm::APInt rhs = cast<llvm::ConstantInt>(F)->getValue();
  4030. if (DstTy == Builder.getInt32Ty()) {
  4031. T = Builder.getInt32(lhs.getLimitedValue());
  4032. F = Builder.getInt32(rhs.getLimitedValue());
  4033. Value *Sel = Builder.CreateSelect(Cond, T, F, "cond");
  4034. return Sel;
  4035. } else if (DstTy->isFloatingPointTy()) {
  4036. T = ConstantFP::get(DstTy, lhs.getLimitedValue());
  4037. F = ConstantFP::get(DstTy, rhs.getLimitedValue());
  4038. Value *Sel = Builder.CreateSelect(Cond, T, F, "cond");
  4039. return Sel;
  4040. }
  4041. } else if (isa<llvm::ConstantFP>(T) && isa<llvm::ConstantFP>(F)) {
  4042. llvm::APFloat lhs = cast<llvm::ConstantFP>(T)->getValueAPF();
  4043. llvm::APFloat rhs = cast<llvm::ConstantFP>(F)->getValueAPF();
  4044. double ld = lhs.convertToDouble();
  4045. double rd = rhs.convertToDouble();
  4046. if (DstTy->isFloatTy()) {
  4047. float lf = ld;
  4048. float rf = rd;
  4049. T = ConstantFP::get(DstTy->getContext(), APFloat(lf));
  4050. F = ConstantFP::get(DstTy->getContext(), APFloat(rf));
  4051. Value *Sel = Builder.CreateSelect(Cond, T, F, "cond");
  4052. return Sel;
  4053. } else if (DstTy == Builder.getInt32Ty()) {
  4054. T = Builder.getInt32(ld);
  4055. F = Builder.getInt32(rd);
  4056. Value *Sel = Builder.CreateSelect(Cond, T, F, "cond");
  4057. return Sel;
  4058. } else if (DstTy == Builder.getInt64Ty()) {
  4059. T = Builder.getInt64(ld);
  4060. F = Builder.getInt64(rd);
  4061. Value *Sel = Builder.CreateSelect(Cond, T, F, "cond");
  4062. return Sel;
  4063. }
  4064. }
  4065. }
  4066. // TODO: support other opcode if need.
  4067. return nullptr;
  4068. }
  4069. }
  4070. Value *CGMSHLSLRuntime::EmitHLSLMatrixSubscript(CodeGenFunction &CGF,
  4071. llvm::Type *RetType,
  4072. llvm::Value *Ptr,
  4073. llvm::Value *Idx,
  4074. clang::QualType Ty) {
  4075. unsigned opcode =
  4076. IsRowMajorMatrix(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor)
  4077. ? static_cast<unsigned>(HLSubscriptOpcode::RowMatSubscript)
  4078. : static_cast<unsigned>(HLSubscriptOpcode::ColMatSubscript);
  4079. Value *matBase = Ptr;
  4080. if (matBase->getType()->isPointerTy()) {
  4081. RetType =
  4082. llvm::PointerType::get(RetType->getPointerElementType(),
  4083. matBase->getType()->getPointerAddressSpace());
  4084. }
  4085. return EmitHLSLMatrixOperationCallImp(CGF.Builder, HLOpcodeGroup::HLSubscript,
  4086. opcode, RetType, {Ptr, Idx}, TheModule);
  4087. }
  4088. Value *CGMSHLSLRuntime::EmitHLSLMatrixElement(CodeGenFunction &CGF,
  4089. llvm::Type *RetType,
  4090. ArrayRef<Value *> paramList,
  4091. QualType Ty) {
  4092. unsigned opcode =
  4093. IsRowMajorMatrix(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor)
  4094. ? static_cast<unsigned>(HLSubscriptOpcode::RowMatElement)
  4095. : static_cast<unsigned>(HLSubscriptOpcode::ColMatElement);
  4096. Value *matBase = paramList[0];
  4097. if (matBase->getType()->isPointerTy()) {
  4098. RetType =
  4099. llvm::PointerType::get(RetType->getPointerElementType(),
  4100. matBase->getType()->getPointerAddressSpace());
  4101. }
  4102. return EmitHLSLMatrixOperationCallImp(CGF.Builder, HLOpcodeGroup::HLSubscript,
  4103. opcode, RetType, paramList, TheModule);
  4104. }
  4105. Value *CGMSHLSLRuntime::EmitHLSLMatrixLoad(CGBuilderTy &Builder, Value *Ptr,
  4106. QualType Ty) {
  4107. unsigned opcode =
  4108. IsRowMajorMatrix(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor)
  4109. ? static_cast<unsigned>(HLMatLoadStoreOpcode::RowMatLoad)
  4110. : static_cast<unsigned>(HLMatLoadStoreOpcode::ColMatLoad);
  4111. return EmitHLSLMatrixOperationCallImp(
  4112. Builder, HLOpcodeGroup::HLMatLoadStore, opcode,
  4113. Ptr->getType()->getPointerElementType(), {Ptr}, TheModule);
  4114. }
  4115. void CGMSHLSLRuntime::EmitHLSLMatrixStore(CGBuilderTy &Builder, Value *Val,
  4116. Value *DestPtr, QualType Ty) {
  4117. unsigned opcode =
  4118. IsRowMajorMatrix(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor)
  4119. ? static_cast<unsigned>(HLMatLoadStoreOpcode::RowMatStore)
  4120. : static_cast<unsigned>(HLMatLoadStoreOpcode::ColMatStore);
  4121. EmitHLSLMatrixOperationCallImp(Builder, HLOpcodeGroup::HLMatLoadStore, opcode,
  4122. Val->getType(), {DestPtr, Val}, TheModule);
  4123. }
  4124. Value *CGMSHLSLRuntime::EmitHLSLMatrixLoad(CodeGenFunction &CGF, Value *Ptr,
  4125. QualType Ty) {
  4126. return EmitHLSLMatrixLoad(CGF.Builder, Ptr, Ty);
  4127. }
  4128. void CGMSHLSLRuntime::EmitHLSLMatrixStore(CodeGenFunction &CGF, Value *Val,
  4129. Value *DestPtr, QualType Ty) {
  4130. EmitHLSLMatrixStore(CGF.Builder, Val, DestPtr, Ty);
  4131. }
  4132. // Copy data from srcPtr to destPtr.
  4133. static void SimplePtrCopy(Value *DestPtr, Value *SrcPtr,
  4134. ArrayRef<Value *> idxList, CGBuilderTy &Builder) {
  4135. if (idxList.size() > 1) {
  4136. DestPtr = Builder.CreateInBoundsGEP(DestPtr, idxList);
  4137. SrcPtr = Builder.CreateInBoundsGEP(SrcPtr, idxList);
  4138. }
  4139. llvm::LoadInst *ld = Builder.CreateLoad(SrcPtr);
  4140. Builder.CreateStore(ld, DestPtr);
  4141. }
  4142. // Get Element val from SrvVal with extract value.
  4143. static Value *GetEltVal(Value *SrcVal, ArrayRef<Value*> idxList,
  4144. CGBuilderTy &Builder) {
  4145. Value *Val = SrcVal;
  4146. // Skip beginning pointer type.
  4147. for (unsigned i = 1; i < idxList.size(); i++) {
  4148. ConstantInt *idx = cast<ConstantInt>(idxList[i]);
  4149. llvm::Type *Ty = Val->getType();
  4150. if (Ty->isAggregateType()) {
  4151. Val = Builder.CreateExtractValue(Val, idx->getLimitedValue());
  4152. }
  4153. }
  4154. return Val;
  4155. }
  4156. // Copy srcVal to destPtr.
  4157. static void SimpleValCopy(Value *DestPtr, Value *SrcVal,
  4158. ArrayRef<Value*> idxList,
  4159. CGBuilderTy &Builder) {
  4160. Value *DestGEP = Builder.CreateInBoundsGEP(DestPtr, idxList);
  4161. Value *Val = GetEltVal(SrcVal, idxList, Builder);
  4162. Builder.CreateStore(Val, DestGEP);
  4163. }
  4164. static void SimpleCopy(Value *Dest, Value *Src,
  4165. ArrayRef<Value *> idxList,
  4166. CGBuilderTy &Builder) {
  4167. if (Src->getType()->isPointerTy())
  4168. SimplePtrCopy(Dest, Src, idxList, Builder);
  4169. else
  4170. SimpleValCopy(Dest, Src, idxList, Builder);
  4171. }
  4172. void CGMSHLSLRuntime::FlattenAggregatePtrToGepList(
  4173. CodeGenFunction &CGF, Value *Ptr, SmallVector<Value *, 4> &idxList,
  4174. clang::QualType Type, llvm::Type *Ty, SmallVector<Value *, 4> &GepList,
  4175. SmallVector<QualType, 4> &EltTyList) {
  4176. if (llvm::PointerType *PT = dyn_cast<llvm::PointerType>(Ty)) {
  4177. Constant *idx = Constant::getIntegerValue(
  4178. IntegerType::get(Ty->getContext(), 32), APInt(32, 0));
  4179. idxList.emplace_back(idx);
  4180. FlattenAggregatePtrToGepList(CGF, Ptr, idxList, Type, PT->getElementType(),
  4181. GepList, EltTyList);
  4182. idxList.pop_back();
  4183. } else if (HLMatrixLower::IsMatrixType(Ty)) {
  4184. // Use matLd/St for matrix.
  4185. unsigned col, row;
  4186. llvm::Type *EltTy = HLMatrixLower::GetMatrixInfo(Ty, col, row);
  4187. llvm::PointerType *EltPtrTy =
  4188. llvm::PointerType::get(EltTy, Ptr->getType()->getPointerAddressSpace());
  4189. QualType EltQualTy = hlsl::GetHLSLMatElementType(Type);
  4190. Value *matPtr = CGF.Builder.CreateInBoundsGEP(Ptr, idxList);
  4191. // Flatten matrix to elements.
  4192. for (unsigned r = 0; r < row; r++) {
  4193. for (unsigned c = 0; c < col; c++) {
  4194. ConstantInt *cRow = CGF.Builder.getInt32(r);
  4195. ConstantInt *cCol = CGF.Builder.getInt32(c);
  4196. Constant *CV = llvm::ConstantVector::get({cRow, cCol});
  4197. GepList.push_back(
  4198. EmitHLSLMatrixElement(CGF, EltPtrTy, {matPtr, CV}, Type));
  4199. EltTyList.push_back(EltQualTy);
  4200. }
  4201. }
  4202. } else if (StructType *ST = dyn_cast<StructType>(Ty)) {
  4203. if (HLModule::IsHLSLObjectType(ST)) {
  4204. // Avoid split HLSL object.
  4205. Value *GEP = CGF.Builder.CreateInBoundsGEP(Ptr, idxList);
  4206. GepList.push_back(GEP);
  4207. EltTyList.push_back(Type);
  4208. return;
  4209. }
  4210. const clang::RecordType *RT = Type->getAsStructureType();
  4211. RecordDecl *RD = RT->getDecl();
  4212. auto fieldIter = RD->field_begin();
  4213. const CGRecordLayout &RL = CGF.getTypes().getCGRecordLayout(RD);
  4214. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  4215. if (CXXRD->getNumBases()) {
  4216. // Add base as field.
  4217. for (const auto &I : CXXRD->bases()) {
  4218. const CXXRecordDecl *BaseDecl =
  4219. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  4220. // Skip empty struct.
  4221. if (BaseDecl->field_empty())
  4222. continue;
  4223. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  4224. llvm::Type *parentType = CGF.ConvertType(parentTy);
  4225. unsigned i = RL.getNonVirtualBaseLLVMFieldNo(BaseDecl);
  4226. Constant *idx = llvm::Constant::getIntegerValue(
  4227. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4228. idxList.emplace_back(idx);
  4229. FlattenAggregatePtrToGepList(CGF, Ptr, idxList, parentTy, parentType,
  4230. GepList, EltTyList);
  4231. idxList.pop_back();
  4232. }
  4233. }
  4234. }
  4235. for (auto fieldIter = RD->field_begin(), fieldEnd = RD->field_end();
  4236. fieldIter != fieldEnd; ++fieldIter) {
  4237. unsigned i = RL.getLLVMFieldNo(*fieldIter);
  4238. llvm::Type *ET = ST->getElementType(i);
  4239. Constant *idx = llvm::Constant::getIntegerValue(
  4240. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4241. idxList.emplace_back(idx);
  4242. FlattenAggregatePtrToGepList(CGF, Ptr, idxList, fieldIter->getType(), ET,
  4243. GepList, EltTyList);
  4244. idxList.pop_back();
  4245. }
  4246. } else if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty)) {
  4247. llvm::Type *ET = AT->getElementType();
  4248. QualType EltType = CGF.getContext().getBaseElementType(Type);
  4249. for (uint32_t i = 0; i < AT->getNumElements(); i++) {
  4250. Constant *idx = Constant::getIntegerValue(
  4251. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4252. idxList.emplace_back(idx);
  4253. FlattenAggregatePtrToGepList(CGF, Ptr, idxList, EltType, ET, GepList,
  4254. EltTyList);
  4255. idxList.pop_back();
  4256. }
  4257. } else if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  4258. // Flatten vector too.
  4259. QualType EltTy = hlsl::GetHLSLVecElementType(Type);
  4260. for (uint32_t i = 0; i < VT->getNumElements(); i++) {
  4261. Constant *idx = CGF.Builder.getInt32(i);
  4262. idxList.emplace_back(idx);
  4263. Value *GEP = CGF.Builder.CreateInBoundsGEP(Ptr, idxList);
  4264. GepList.push_back(GEP);
  4265. EltTyList.push_back(EltTy);
  4266. idxList.pop_back();
  4267. }
  4268. } else {
  4269. Value *GEP = CGF.Builder.CreateInBoundsGEP(Ptr, idxList);
  4270. GepList.push_back(GEP);
  4271. EltTyList.push_back(Type);
  4272. }
  4273. }
  4274. void CGMSHLSLRuntime::LoadFlattenedGepList(CodeGenFunction &CGF,
  4275. ArrayRef<Value *> GepList,
  4276. ArrayRef<QualType> EltTyList,
  4277. SmallVector<Value *, 4> &EltList) {
  4278. unsigned eltSize = GepList.size();
  4279. for (unsigned i = 0; i < eltSize; i++) {
  4280. Value *Ptr = GepList[i];
  4281. QualType Type = EltTyList[i];
  4282. // Everying is element type.
  4283. EltList.push_back(CGF.Builder.CreateLoad(Ptr));
  4284. }
  4285. }
  4286. void CGMSHLSLRuntime::StoreFlattenedGepList(CodeGenFunction &CGF, ArrayRef<Value *> GepList,
  4287. ArrayRef<QualType> GepTyList, ArrayRef<Value *> EltValList, ArrayRef<QualType> SrcTyList) {
  4288. unsigned eltSize = GepList.size();
  4289. for (unsigned i = 0; i < eltSize; i++) {
  4290. Value *Ptr = GepList[i];
  4291. QualType DestType = GepTyList[i];
  4292. Value *Val = EltValList[i];
  4293. QualType SrcType = SrcTyList[i];
  4294. llvm::Type *Ty = Ptr->getType()->getPointerElementType();
  4295. // Everything is element type.
  4296. if (Ty != Val->getType()) {
  4297. Instruction::CastOps castOp =
  4298. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  4299. IsUnsigned(SrcType), IsUnsigned(DestType), Val->getType(), Ty));
  4300. Val = CGF.Builder.CreateCast(castOp, Val, Ty);
  4301. }
  4302. CGF.Builder.CreateStore(Val, Ptr);
  4303. }
  4304. }
  4305. // Copy element data from SrcPtr to DestPtr by generate following IR.
  4306. // element = Ld SrcGEP
  4307. // St element, DestGEP
  4308. // idxList stored the index to generate GetElementPtr for current element.
  4309. // Type is QualType of current element.
  4310. // Ty is llvm::Type of current element.
  4311. void CGMSHLSLRuntime::EmitHLSLAggregateCopy(
  4312. CodeGenFunction &CGF, llvm::Value *SrcPtr, llvm::Value *DestPtr,
  4313. SmallVector<Value *, 4> &idxList, clang::QualType Type, llvm::Type *Ty) {
  4314. if (llvm::PointerType *PT = dyn_cast<llvm::PointerType>(Ty)) {
  4315. Constant *idx = Constant::getIntegerValue(
  4316. IntegerType::get(Ty->getContext(), 32), APInt(32, 0));
  4317. idxList.emplace_back(idx);
  4318. EmitHLSLAggregateCopy(CGF, SrcPtr, DestPtr, idxList, Type,
  4319. PT->getElementType());
  4320. idxList.pop_back();
  4321. } else if (HLMatrixLower::IsMatrixType(Ty)) {
  4322. // Use matLd/St for matrix.
  4323. Value *srcGEP = CGF.Builder.CreateInBoundsGEP(SrcPtr, idxList);
  4324. Value *dstGEP = CGF.Builder.CreateInBoundsGEP(DestPtr, idxList);
  4325. Value *ldMat = EmitHLSLMatrixLoad(CGF, srcGEP, Type);
  4326. EmitHLSLMatrixStore(CGF, ldMat, dstGEP, Type);
  4327. } else if (StructType *ST = dyn_cast<StructType>(Ty)) {
  4328. if (HLModule::IsHLSLObjectType(ST)) {
  4329. // Avoid split HLSL object.
  4330. SimpleCopy(DestPtr, SrcPtr, idxList, CGF.Builder);
  4331. return;
  4332. }
  4333. const clang::RecordType *RT = Type->getAsStructureType();
  4334. RecordDecl *RD = RT->getDecl();
  4335. auto fieldIter = RD->field_begin();
  4336. const CGRecordLayout &RL = CGF.getTypes().getCGRecordLayout(RD);
  4337. // Take care base.
  4338. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  4339. if (CXXRD->getNumBases()) {
  4340. for (const auto &I : CXXRD->bases()) {
  4341. const CXXRecordDecl *BaseDecl =
  4342. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  4343. if (BaseDecl->field_empty())
  4344. continue;
  4345. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  4346. unsigned i = RL.getNonVirtualBaseLLVMFieldNo(BaseDecl);
  4347. llvm::Type *ET = ST->getElementType(i);
  4348. Constant *idx = llvm::Constant::getIntegerValue(
  4349. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4350. idxList.emplace_back(idx);
  4351. EmitHLSLAggregateCopy(CGF, SrcPtr, DestPtr, idxList,
  4352. parentTy, ET);
  4353. idxList.pop_back();
  4354. }
  4355. }
  4356. }
  4357. for (auto fieldIter = RD->field_begin(), fieldEnd = RD->field_end();
  4358. fieldIter != fieldEnd; ++fieldIter) {
  4359. unsigned i = RL.getLLVMFieldNo(*fieldIter);
  4360. llvm::Type *ET = ST->getElementType(i);
  4361. Constant *idx = llvm::Constant::getIntegerValue(
  4362. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4363. idxList.emplace_back(idx);
  4364. EmitHLSLAggregateCopy(CGF, SrcPtr, DestPtr, idxList, fieldIter->getType(),
  4365. ET);
  4366. idxList.pop_back();
  4367. }
  4368. } else if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty)) {
  4369. llvm::Type *ET = AT->getElementType();
  4370. QualType EltType = CGF.getContext().getBaseElementType(Type);
  4371. for (uint32_t i = 0; i < AT->getNumElements(); i++) {
  4372. Constant *idx = Constant::getIntegerValue(
  4373. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4374. idxList.emplace_back(idx);
  4375. EmitHLSLAggregateCopy(CGF, SrcPtr, DestPtr, idxList, EltType, ET);
  4376. idxList.pop_back();
  4377. }
  4378. } else {
  4379. SimpleCopy(DestPtr, SrcPtr, idxList, CGF.Builder);
  4380. }
  4381. }
  4382. void CGMSHLSLRuntime::EmitHLSLAggregateCopy(CodeGenFunction &CGF, llvm::Value *SrcPtr,
  4383. llvm::Value *DestPtr,
  4384. clang::QualType Ty) {
  4385. SmallVector<Value *, 4> idxList;
  4386. EmitHLSLAggregateCopy(CGF, SrcPtr, DestPtr, idxList, Ty, SrcPtr->getType());
  4387. }
  4388. void CGMSHLSLRuntime::EmitHLSLFlatConversionAggregateCopy(CodeGenFunction &CGF, llvm::Value *SrcPtr,
  4389. clang::QualType SrcTy,
  4390. llvm::Value *DestPtr,
  4391. clang::QualType DestTy) {
  4392. // It is possiable to implement EmitHLSLAggregateCopy, EmitHLSLAggregateStore the same way.
  4393. // But split value to scalar will generate many instruction when src type is same as dest type.
  4394. SmallVector<Value *, 4> idxList;
  4395. SmallVector<Value *, 4> SrcGEPList;
  4396. SmallVector<QualType, 4> SrcEltTyList;
  4397. FlattenAggregatePtrToGepList(CGF, SrcPtr, idxList, SrcTy, SrcPtr->getType(), SrcGEPList,
  4398. SrcEltTyList);
  4399. SmallVector<Value *, 4> LdEltList;
  4400. LoadFlattenedGepList(CGF, SrcGEPList, SrcEltTyList, LdEltList);
  4401. idxList.clear();
  4402. SmallVector<Value *, 4> DestGEPList;
  4403. SmallVector<QualType, 4> DestEltTyList;
  4404. FlattenAggregatePtrToGepList(CGF, DestPtr, idxList, DestTy, DestPtr->getType(), DestGEPList, DestEltTyList);
  4405. StoreFlattenedGepList(CGF, DestGEPList, DestEltTyList, LdEltList, SrcEltTyList);
  4406. }
  4407. // Store element data from Val to DestPtr by generate following IR.
  4408. // element = ExtractVal SrcVal
  4409. // St element, DestGEP
  4410. // idxList stored the index to generate GetElementPtr for current element.
  4411. // Type is QualType of current element.
  4412. // Ty is llvm::Type of current element.
  4413. void CGMSHLSLRuntime::EmitHLSLAggregateStore(
  4414. CodeGenFunction &CGF, llvm::Value *SrcVal, llvm::Value *DestPtr,
  4415. SmallVector<Value *, 4> &idxList, clang::QualType Type, llvm::Type *Ty) {
  4416. if (llvm::PointerType *PT = dyn_cast<llvm::PointerType>(Ty)) {
  4417. Constant *idx = Constant::getIntegerValue(
  4418. IntegerType::get(Ty->getContext(), 32), APInt(32, 0));
  4419. idxList.emplace_back(idx);
  4420. EmitHLSLAggregateStore(CGF, SrcVal, DestPtr, idxList, Type, PT->getElementType());
  4421. idxList.pop_back();
  4422. }
  4423. else if (HLMatrixLower::IsMatrixType(Ty)) {
  4424. // Use matLd/St for matrix.
  4425. Value *dstGEP = CGF.Builder.CreateInBoundsGEP(DestPtr, idxList);
  4426. Value *ldMat = GetEltVal(SrcVal, idxList, CGF.Builder);
  4427. EmitHLSLMatrixStore(CGF, ldMat, dstGEP, Type);
  4428. }
  4429. else if (StructType *ST = dyn_cast<StructType>(Ty)) {
  4430. if (HLModule::IsHLSLObjectType(ST)) {
  4431. // Avoid split HLSL object.
  4432. SimpleCopy(DestPtr, SrcVal, idxList, CGF.Builder);
  4433. return;
  4434. }
  4435. const clang::RecordType *RT = Type->getAsStructureType();
  4436. RecordDecl *RD = RT->getDecl();
  4437. auto fieldIter = RD->field_begin();
  4438. const CGRecordLayout& RL = CGF.getTypes().getCGRecordLayout(RD);
  4439. // Take care base.
  4440. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  4441. if (CXXRD->getNumBases()) {
  4442. for (const auto &I : CXXRD->bases()) {
  4443. const CXXRecordDecl *BaseDecl = cast<CXXRecordDecl>(
  4444. I.getType()->castAs<RecordType>()->getDecl());
  4445. if (BaseDecl->field_empty())
  4446. continue;
  4447. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  4448. unsigned i = RL.getNonVirtualBaseLLVMFieldNo(BaseDecl);
  4449. llvm::Type *ET = ST->getElementType(i);
  4450. Constant *idx = llvm::Constant::getIntegerValue(
  4451. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4452. idxList.emplace_back(idx);
  4453. EmitHLSLAggregateStore(CGF, SrcVal, DestPtr, idxList,
  4454. parentTy, ET);
  4455. idxList.pop_back();
  4456. }
  4457. }
  4458. }
  4459. for (auto fieldIter = RD->field_begin(), fieldEnd = RD->field_end();
  4460. fieldIter != fieldEnd; ++fieldIter) {
  4461. unsigned i = RL.getLLVMFieldNo(*fieldIter);
  4462. llvm::Type *ET = ST->getElementType(i);
  4463. Constant *idx = llvm::Constant::getIntegerValue(
  4464. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4465. idxList.emplace_back(idx);
  4466. EmitHLSLAggregateStore(CGF, SrcVal, DestPtr, idxList, fieldIter->getType(), ET);
  4467. idxList.pop_back();
  4468. }
  4469. }
  4470. else if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty)) {
  4471. llvm::Type *ET = AT->getElementType();
  4472. QualType EltType = CGF.getContext().getBaseElementType(Type);
  4473. for (uint32_t i = 0; i < AT->getNumElements(); i++) {
  4474. Constant *idx = Constant::getIntegerValue(
  4475. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4476. idxList.emplace_back(idx);
  4477. EmitHLSLAggregateStore(CGF, SrcVal, DestPtr, idxList, EltType, ET);
  4478. idxList.pop_back();
  4479. }
  4480. }
  4481. else {
  4482. SimpleValCopy(DestPtr, SrcVal, idxList, CGF.Builder);
  4483. }
  4484. }
  4485. void CGMSHLSLRuntime::EmitHLSLAggregateStore(CodeGenFunction &CGF, llvm::Value *SrcVal,
  4486. llvm::Value *DestPtr,
  4487. clang::QualType Ty) {
  4488. SmallVector<Value *, 4> idxList;
  4489. // Add first 0 for DestPtr.
  4490. Constant *idx = Constant::getIntegerValue(
  4491. IntegerType::get(SrcVal->getContext(), 32), APInt(32, 0));
  4492. idxList.emplace_back(idx);
  4493. EmitHLSLAggregateStore(CGF, SrcVal, DestPtr, idxList, Ty, SrcVal->getType());
  4494. }
  4495. static void SimpleFlatValCopy(Value *DestPtr, Value *SrcVal, QualType Ty,
  4496. QualType SrcTy, ArrayRef<Value *> idxList,
  4497. CGBuilderTy &Builder) {
  4498. Value *DestGEP = Builder.CreateInBoundsGEP(DestPtr, idxList);
  4499. llvm::Type *ToTy = DestGEP->getType()->getPointerElementType();
  4500. llvm::Type *EltToTy = ToTy;
  4501. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(ToTy)) {
  4502. EltToTy = VT->getElementType();
  4503. }
  4504. if (EltToTy != SrcVal->getType()) {
  4505. Instruction::CastOps castOp =
  4506. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  4507. IsUnsigned(SrcTy), IsUnsigned(Ty), SrcVal->getType(), ToTy));
  4508. SrcVal = Builder.CreateCast(castOp, SrcVal, EltToTy);
  4509. }
  4510. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(ToTy)) {
  4511. llvm::VectorType *VT1 = llvm::VectorType::get(EltToTy, 1);
  4512. Value *V1 =
  4513. Builder.CreateInsertElement(UndefValue::get(VT1), SrcVal, (uint64_t)0);
  4514. std::vector<int> shufIdx(VT->getNumElements(), 0);
  4515. Value *Vec = Builder.CreateShuffleVector(V1, V1, shufIdx);
  4516. Builder.CreateStore(Vec, DestGEP);
  4517. } else
  4518. Builder.CreateStore(SrcVal, DestGEP);
  4519. }
  4520. void CGMSHLSLRuntime::EmitHLSLFlatConversionToAggregate(
  4521. CodeGenFunction &CGF, Value *SrcVal, llvm::Value *DestPtr,
  4522. SmallVector<Value *, 4> &idxList, QualType Type, QualType SrcType,
  4523. llvm::Type *Ty) {
  4524. if (llvm::PointerType *PT = dyn_cast<llvm::PointerType>(Ty)) {
  4525. Constant *idx = Constant::getIntegerValue(
  4526. IntegerType::get(Ty->getContext(), 32), APInt(32, 0));
  4527. idxList.emplace_back(idx);
  4528. EmitHLSLFlatConversionToAggregate(CGF, SrcVal, DestPtr, idxList, Type,
  4529. SrcType, PT->getElementType());
  4530. idxList.pop_back();
  4531. } else if (HLMatrixLower::IsMatrixType(Ty)) {
  4532. // Use matLd/St for matrix.
  4533. Value *dstGEP = CGF.Builder.CreateInBoundsGEP(DestPtr, idxList);
  4534. unsigned row, col;
  4535. llvm::Type *EltTy = HLMatrixLower::GetMatrixInfo(Ty, col, row);
  4536. llvm::VectorType *VT1 = llvm::VectorType::get(EltTy, 1);
  4537. if (EltTy != SrcVal->getType()) {
  4538. Instruction::CastOps castOp =
  4539. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  4540. IsUnsigned(SrcType), IsUnsigned(Type), SrcVal->getType(), EltTy));
  4541. SrcVal = CGF.Builder.CreateCast(castOp, SrcVal, EltTy);
  4542. }
  4543. Value *V1 = CGF.Builder.CreateInsertElement(UndefValue::get(VT1), SrcVal,
  4544. (uint64_t)0);
  4545. std::vector<int> shufIdx(col * row, 0);
  4546. Value *VecMat = CGF.Builder.CreateShuffleVector(V1, V1, shufIdx);
  4547. Value *MatInit = EmitHLSLMatrixOperationCallImp(
  4548. CGF.Builder, HLOpcodeGroup::HLInit, 0, Ty, {VecMat}, TheModule);
  4549. EmitHLSLMatrixStore(CGF, MatInit, dstGEP, Type);
  4550. } else if (StructType *ST = dyn_cast<StructType>(Ty)) {
  4551. DXASSERT(!HLModule::IsHLSLObjectType(ST), "cannot cast to hlsl object, Sema should reject");
  4552. const clang::RecordType *RT = Type->getAsStructureType();
  4553. RecordDecl *RD = RT->getDecl();
  4554. auto fieldIter = RD->field_begin();
  4555. const CGRecordLayout &RL = CGF.getTypes().getCGRecordLayout(RD);
  4556. // Take care base.
  4557. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  4558. if (CXXRD->getNumBases()) {
  4559. for (const auto &I : CXXRD->bases()) {
  4560. const CXXRecordDecl *BaseDecl =
  4561. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  4562. if (BaseDecl->field_empty())
  4563. continue;
  4564. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  4565. unsigned i = RL.getNonVirtualBaseLLVMFieldNo(BaseDecl);
  4566. llvm::Type *ET = ST->getElementType(i);
  4567. Constant *idx = llvm::Constant::getIntegerValue(
  4568. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4569. idxList.emplace_back(idx);
  4570. EmitHLSLFlatConversionToAggregate(CGF, SrcVal, DestPtr, idxList,
  4571. parentTy, SrcType, ET);
  4572. idxList.pop_back();
  4573. }
  4574. }
  4575. }
  4576. for (auto fieldIter = RD->field_begin(), fieldEnd = RD->field_end();
  4577. fieldIter != fieldEnd; ++fieldIter) {
  4578. unsigned i = RL.getLLVMFieldNo(*fieldIter);
  4579. llvm::Type *ET = ST->getElementType(i);
  4580. Constant *idx = llvm::Constant::getIntegerValue(
  4581. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4582. idxList.emplace_back(idx);
  4583. EmitHLSLFlatConversionToAggregate(CGF, SrcVal, DestPtr, idxList,
  4584. fieldIter->getType(), SrcType, ET);
  4585. idxList.pop_back();
  4586. }
  4587. } else if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty)) {
  4588. llvm::Type *ET = AT->getElementType();
  4589. QualType EltType = CGF.getContext().getBaseElementType(Type);
  4590. for (uint32_t i = 0; i < AT->getNumElements(); i++) {
  4591. Constant *idx = Constant::getIntegerValue(
  4592. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  4593. idxList.emplace_back(idx);
  4594. EmitHLSLFlatConversionToAggregate(CGF, SrcVal, DestPtr, idxList, EltType,
  4595. SrcType, ET);
  4596. idxList.pop_back();
  4597. }
  4598. } else {
  4599. SimpleFlatValCopy(DestPtr, SrcVal, Type, SrcType, idxList, CGF.Builder);
  4600. }
  4601. }
  4602. void CGMSHLSLRuntime::EmitHLSLFlatConversionToAggregate(CodeGenFunction &CGF,
  4603. Value *Val,
  4604. Value *DestPtr,
  4605. QualType Ty,
  4606. QualType SrcTy) {
  4607. if (SrcTy->isBuiltinType()) {
  4608. SmallVector<Value *, 4> idxList;
  4609. // Add first 0 for DestPtr.
  4610. Constant *idx = Constant::getIntegerValue(
  4611. IntegerType::get(Val->getContext(), 32), APInt(32, 0));
  4612. idxList.emplace_back(idx);
  4613. EmitHLSLFlatConversionToAggregate(
  4614. CGF, Val, DestPtr, idxList, Ty, SrcTy,
  4615. DestPtr->getType()->getPointerElementType());
  4616. }
  4617. else {
  4618. SmallVector<Value *, 4> idxList;
  4619. SmallVector<Value *, 4> DestGEPList;
  4620. SmallVector<QualType, 4> DestEltTyList;
  4621. FlattenAggregatePtrToGepList(CGF, DestPtr, idxList, Ty, DestPtr->getType(), DestGEPList, DestEltTyList);
  4622. SmallVector<Value *, 4> EltList;
  4623. SmallVector<QualType, 4> EltTyList;
  4624. FlattenValToInitList(CGF, EltList, EltTyList, SrcTy, Val);
  4625. StoreFlattenedGepList(CGF, DestGEPList, DestEltTyList, EltList, EltTyList);
  4626. }
  4627. }
  4628. void CGMSHLSLRuntime::EmitHLSLRootSignature(CodeGenFunction &CGF,
  4629. HLSLRootSignatureAttr *RSA,
  4630. Function *Fn) {
  4631. StringRef StrRef = RSA->getSignatureName();
  4632. DiagnosticsEngine &Diags = CGF.getContext().getDiagnostics();
  4633. SourceLocation SLoc = RSA->getLocation();
  4634. std::string OSStr;
  4635. raw_string_ostream OS(OSStr);
  4636. hlsl::DxilVersionedRootSignatureDesc *D = nullptr;
  4637. DXASSERT(CGF.getLangOpts().RootSigMajor == 1,
  4638. "else EmitHLSLRootSignature needs to be updated");
  4639. hlsl::DxilRootSignatureVersion Ver;
  4640. if (CGF.getLangOpts().RootSigMinor == 0) {
  4641. Ver = hlsl::DxilRootSignatureVersion::Version_1_0;
  4642. }
  4643. else {
  4644. DXASSERT(CGF.getLangOpts().RootSigMinor == 1,
  4645. "else EmitHLSLRootSignature needs to be updated");
  4646. Ver = hlsl::DxilRootSignatureVersion::Version_1_1;
  4647. }
  4648. if (ParseHLSLRootSignature(StrRef.data(), StrRef.size(), Ver, &D, SLoc,
  4649. Diags)) {
  4650. CComPtr<IDxcBlob> pSignature;
  4651. CComPtr<IDxcBlobEncoding> pErrors;
  4652. hlsl::SerializeRootSignature(D, &pSignature, &pErrors, false);
  4653. if (pSignature == nullptr) {
  4654. DXASSERT(pErrors != nullptr, "else serialize failed with no msg");
  4655. ReportHLSLRootSigError(Diags, SLoc,
  4656. (char *)pErrors->GetBufferPointer(), pErrors->GetBufferSize());
  4657. hlsl::DeleteRootSignature(D);
  4658. }
  4659. else {
  4660. llvm::Module *pModule = Fn->getParent();
  4661. pModule->GetHLModule().GetRootSignature().Assign(D, pSignature);
  4662. }
  4663. }
  4664. }
  4665. void CGMSHLSLRuntime::EmitHLSLOutParamConversionInit(
  4666. CodeGenFunction &CGF, const FunctionDecl *FD, const CallExpr *E,
  4667. llvm::SmallVector<LValue, 8> &castArgList,
  4668. llvm::SmallVector<const Stmt *, 8> &argList,
  4669. const std::function<void(const VarDecl *, llvm::Value *)> &TmpArgMap) {
  4670. // Special case: skip first argument of CXXOperatorCall (it is "this").
  4671. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(E) ? 1 : 0;
  4672. for (uint32_t i = 0; i < FD->getNumParams(); i++) {
  4673. const ParmVarDecl *Param = FD->getParamDecl(i);
  4674. const Expr *Arg = E->getArg(i+ArgsToSkip);
  4675. QualType ParamTy = Param->getType().getNonReferenceType();
  4676. if (!Param->isModifierOut())
  4677. continue;
  4678. // get original arg
  4679. LValue argLV = CGF.EmitLValue(Arg);
  4680. // create temp Var
  4681. VarDecl *tmpArg =
  4682. VarDecl::Create(CGF.getContext(), const_cast<FunctionDecl *>(FD),
  4683. SourceLocation(), SourceLocation(),
  4684. /*IdentifierInfo*/ nullptr, ParamTy,
  4685. CGF.getContext().getTrivialTypeSourceInfo(ParamTy),
  4686. StorageClass::SC_Auto);
  4687. // Aggregate type will be indirect param convert to pointer type.
  4688. // So don't update to ReferenceType, use RValue for it.
  4689. bool isAggregateType = (ParamTy->isArrayType() || ParamTy->isRecordType()) &&
  4690. !hlsl::IsHLSLVecMatType(ParamTy);
  4691. const DeclRefExpr *tmpRef = DeclRefExpr::Create(
  4692. CGF.getContext(), NestedNameSpecifierLoc(), SourceLocation(), tmpArg,
  4693. /*enclosing*/ false, tmpArg->getLocation(), ParamTy,
  4694. isAggregateType ? VK_RValue : VK_LValue);
  4695. // update the arg
  4696. argList[i] = tmpRef;
  4697. // create alloc for the tmp arg
  4698. Value *tmpArgAddr = nullptr;
  4699. BasicBlock *InsertBlock = CGF.Builder.GetInsertBlock();
  4700. Function *F = InsertBlock->getParent();
  4701. BasicBlock *EntryBlock = &F->getEntryBlock();
  4702. if (ParamTy->isBooleanType()) {
  4703. // Create i32 for bool.
  4704. ParamTy = CGM.getContext().IntTy;
  4705. }
  4706. // Make sure the alloca is in entry block to stop inline create stacksave.
  4707. IRBuilder<> Builder(EntryBlock->getFirstInsertionPt());
  4708. tmpArgAddr = Builder.CreateAlloca(CGF.ConvertType(ParamTy));
  4709. // add it to local decl map
  4710. TmpArgMap(tmpArg, tmpArgAddr);
  4711. LValue tmpLV = LValue::MakeAddr(tmpArgAddr, ParamTy, argLV.getAlignment(),
  4712. CGF.getContext());
  4713. // save for cast after call
  4714. castArgList.emplace_back(tmpLV);
  4715. castArgList.emplace_back(argLV);
  4716. bool isObject = HLModule::IsHLSLObjectType(
  4717. tmpArgAddr->getType()->getPointerElementType());
  4718. // cast before the call
  4719. if (Param->isModifierIn() &&
  4720. // Don't copy object
  4721. !isObject) {
  4722. Value *outVal = nullptr;
  4723. bool isAggrageteTy = ParamTy->isAggregateType();
  4724. isAggrageteTy &= !IsHLSLVecMatType(ParamTy);
  4725. if (!isAggrageteTy) {
  4726. if (!IsHLSLMatType(ParamTy)) {
  4727. RValue outRVal = CGF.EmitLoadOfLValue(argLV, SourceLocation());
  4728. outVal = outRVal.getScalarVal();
  4729. } else {
  4730. Value *argAddr = argLV.getAddress();
  4731. outVal = EmitHLSLMatrixLoad(CGF, argAddr, ParamTy);
  4732. }
  4733. llvm::Type *ToTy = tmpArgAddr->getType()->getPointerElementType();
  4734. Instruction::CastOps castOp =
  4735. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  4736. IsUnsigned(argLV.getType()), IsUnsigned(tmpLV.getType()),
  4737. outVal->getType(), ToTy));
  4738. Value *castVal = CGF.Builder.CreateCast(castOp, outVal, ToTy);
  4739. if (!HLMatrixLower::IsMatrixType(ToTy))
  4740. CGF.Builder.CreateStore(castVal, tmpArgAddr);
  4741. else
  4742. EmitHLSLMatrixStore(CGF, castVal, tmpArgAddr, ParamTy);
  4743. } else {
  4744. EmitHLSLAggregateCopy(CGF, argLV.getAddress(), tmpLV.getAddress(),
  4745. ParamTy);
  4746. }
  4747. }
  4748. }
  4749. }
  4750. void CGMSHLSLRuntime::EmitHLSLOutParamConversionCopyBack(
  4751. CodeGenFunction &CGF, llvm::SmallVector<LValue, 8> &castArgList) {
  4752. for (uint32_t i = 0; i < castArgList.size(); i += 2) {
  4753. // cast after the call
  4754. LValue tmpLV = castArgList[i];
  4755. LValue argLV = castArgList[i + 1];
  4756. QualType argTy = argLV.getType().getNonReferenceType();
  4757. Value *tmpArgAddr = tmpLV.getAddress();
  4758. Value *outVal = nullptr;
  4759. bool isAggrageteTy = argTy->isAggregateType();
  4760. isAggrageteTy &= !IsHLSLVecMatType(argTy);
  4761. bool isObject = HLModule::IsHLSLObjectType(
  4762. tmpArgAddr->getType()->getPointerElementType());
  4763. if (!isObject) {
  4764. if (!isAggrageteTy) {
  4765. if (!IsHLSLMatType(argTy))
  4766. outVal = CGF.Builder.CreateLoad(tmpArgAddr);
  4767. else
  4768. outVal = EmitHLSLMatrixLoad(CGF, tmpArgAddr, argTy);
  4769. llvm::Type *ToTy = CGF.ConvertType(argTy);
  4770. llvm::Type *FromTy = outVal->getType();
  4771. Value *castVal = outVal;
  4772. if (ToTy == FromTy) {
  4773. // Don't need cast.
  4774. } else if (ToTy->getScalarType() == FromTy->getScalarType()) {
  4775. if (ToTy->getScalarType() == ToTy) {
  4776. DXASSERT(FromTy->isVectorTy() &&
  4777. FromTy->getVectorNumElements() == 1,
  4778. "must be vector of 1 element");
  4779. castVal = CGF.Builder.CreateExtractElement(outVal, (uint64_t)0);
  4780. } else {
  4781. DXASSERT(!FromTy->isVectorTy(), "must be scalar type");
  4782. DXASSERT(ToTy->isVectorTy() && ToTy->getVectorNumElements() == 1,
  4783. "must be vector of 1 element");
  4784. castVal = UndefValue::get(ToTy);
  4785. castVal =
  4786. CGF.Builder.CreateInsertElement(castVal, outVal, (uint64_t)0);
  4787. }
  4788. } else {
  4789. Instruction::CastOps castOp =
  4790. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  4791. IsUnsigned(tmpLV.getType()), IsUnsigned(argLV.getType()),
  4792. outVal->getType(), ToTy));
  4793. castVal = CGF.Builder.CreateCast(castOp, outVal, ToTy);
  4794. }
  4795. if (!HLMatrixLower::IsMatrixType(ToTy))
  4796. CGF.EmitStoreThroughLValue(RValue::get(castVal), argLV);
  4797. else {
  4798. Value *destPtr = argLV.getAddress();
  4799. EmitHLSLMatrixStore(CGF, castVal, destPtr, argTy);
  4800. }
  4801. } else {
  4802. EmitHLSLAggregateCopy(CGF, tmpLV.getAddress(), argLV.getAddress(),
  4803. argTy);
  4804. }
  4805. } else
  4806. tmpArgAddr->replaceAllUsesWith(argLV.getAddress());
  4807. }
  4808. }
  4809. CGHLSLRuntime *CodeGen::CreateMSHLSLRuntime(CodeGenModule &CGM) {
  4810. return new CGMSHLSLRuntime(CGM);
  4811. }