CGHLSLMS.cpp 243 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588
  1. //===----- CGHLSLMS.cpp - Interface to HLSL Runtime ----------------===//
  2. ///////////////////////////////////////////////////////////////////////////////
  3. // //
  4. // CGHLSLMS.cpp //
  5. // Copyright (C) Microsoft Corporation. All rights reserved. //
  6. // This file is distributed under the University of Illinois Open Source //
  7. // License. See LICENSE.TXT for details. //
  8. // //
  9. // This provides a class for HLSL code generation. //
  10. // //
  11. ///////////////////////////////////////////////////////////////////////////////
  12. #include "CGHLSLRuntime.h"
  13. #include "CodeGenFunction.h"
  14. #include "CodeGenModule.h"
  15. #include "CGRecordLayout.h"
  16. #include "dxc/HlslIntrinsicOp.h"
  17. #include "dxc/HLSL/HLMatrixLowerHelper.h"
  18. #include "dxc/HLSL/HLModule.h"
  19. #include "dxc/HLSL/DxilUtil.h"
  20. #include "dxc/HLSL/HLOperations.h"
  21. #include "dxc/HLSL/DxilOperations.h"
  22. #include "dxc/HLSL/DxilTypeSystem.h"
  23. #include "clang/AST/DeclTemplate.h"
  24. #include "clang/AST/HlslTypes.h"
  25. #include "clang/Frontend/CodeGenOptions.h"
  26. #include "clang/Lex/HLSLMacroExpander.h"
  27. #include "llvm/ADT/STLExtras.h"
  28. #include "llvm/ADT/StringSwitch.h"
  29. #include "llvm/ADT/SmallPtrSet.h"
  30. #include "llvm/IR/Constants.h"
  31. #include "llvm/IR/IRBuilder.h"
  32. #include "llvm/IR/GetElementPtrTypeIterator.h"
  33. #include "llvm/Transforms/Utils/Cloning.h"
  34. #include "llvm/IR/InstIterator.h"
  35. #include <memory>
  36. #include <unordered_map>
  37. #include <unordered_set>
  38. #include <set>
  39. #include "dxc/HLSL/DxilRootSignature.h"
  40. #include "dxc/HLSL/DxilCBuffer.h"
  41. #include "clang/Parse/ParseHLSL.h" // root sig would be in Parser if part of lang
  42. #include "dxc/Support/WinIncludes.h" // stream support
  43. #include "dxc/dxcapi.h" // stream support
  44. #include "dxc/HLSL/HLSLExtensionsCodegenHelper.h"
  45. #include "dxc/HLSL/DxilGenerationPass.h" // support pause/resume passes
  46. using namespace clang;
  47. using namespace CodeGen;
  48. using namespace hlsl;
  49. using namespace llvm;
  50. using std::unique_ptr;
  51. static const bool KeepUndefinedTrue = true; // Keep interpolation mode undefined if not set explicitly.
  52. // Define constant variables exposed in DxilConstants.h
  53. namespace hlsl {
  54. namespace DXIL {
  55. // TODO: revisit data layout descriptions for the following:
  56. // - x64 pointers?
  57. // - Keep elf manging(m:e)?
  58. // For legacy data layout, everything less than 32 align to 32.
  59. const char* kLegacyLayoutString = "e-m:e-p:32:32-i1:32-i8:32-i16:32-i32:32-i64:64-f16:32-f32:32-f:64:64-n8:16:32:64";
  60. // New data layout with native low precision types
  61. const char* kNewLayoutString = "e-m:e-p:32:32-i1:32-i8:8-i16:16-i32:32-i64:64-f16:16-f32:32-f64:64-n8:16:32:64";
  62. // Function Attributes
  63. // TODO: consider generating attributes from hctdb
  64. const char* kFP32DenormKindString = "fp32-denorm-mode";
  65. const char* kFP32DenormValueAnyString = "any";
  66. const char* kFP32DenormValuePreserveString = "preserve";
  67. const char* kFP32DenormValueFtzString = "ftz";
  68. } // DXIL
  69. } // hlsl
  70. namespace {
  71. /// Use this class to represent HLSL cbuffer in high-level DXIL.
  72. class HLCBuffer : public DxilCBuffer {
  73. public:
  74. HLCBuffer() = default;
  75. virtual ~HLCBuffer() = default;
  76. void AddConst(std::unique_ptr<DxilResourceBase> &pItem);
  77. std::vector<std::unique_ptr<DxilResourceBase>> &GetConstants();
  78. private:
  79. std::vector<std::unique_ptr<DxilResourceBase>> constants; // constants inside const buffer
  80. };
  81. //------------------------------------------------------------------------------
  82. //
  83. // HLCBuffer methods.
  84. //
  85. void HLCBuffer::AddConst(std::unique_ptr<DxilResourceBase> &pItem) {
  86. pItem->SetID(constants.size());
  87. constants.push_back(std::move(pItem));
  88. }
  89. std::vector<std::unique_ptr<DxilResourceBase>> &HLCBuffer::GetConstants() {
  90. return constants;
  91. }
  92. class CGMSHLSLRuntime : public CGHLSLRuntime {
  93. private:
  94. /// Convenience reference to LLVM Context
  95. llvm::LLVMContext &Context;
  96. /// Convenience reference to the current module
  97. llvm::Module &TheModule;
  98. HLModule *m_pHLModule;
  99. llvm::Type *CBufferType;
  100. uint32_t globalCBIndex;
  101. // TODO: make sure how minprec works
  102. llvm::DataLayout dataLayout;
  103. // decl map to constant id for program
  104. llvm::DenseMap<HLSLBufferDecl *, uint32_t> constantBufMap;
  105. // Map for resource type to resource metadata value.
  106. std::unordered_map<llvm::Type *, MDNode*> resMetadataMap;
  107. bool m_bDebugInfo;
  108. bool m_bIsLib;
  109. HLCBuffer &GetGlobalCBuffer() {
  110. return *static_cast<HLCBuffer*>(&(m_pHLModule->GetCBuffer(globalCBIndex)));
  111. }
  112. void AddConstant(VarDecl *constDecl, HLCBuffer &CB);
  113. uint32_t AddSampler(VarDecl *samplerDecl);
  114. uint32_t AddUAVSRV(VarDecl *decl, hlsl::DxilResourceBase::Class resClass);
  115. bool SetUAVSRV(SourceLocation loc, hlsl::DxilResourceBase::Class resClass,
  116. DxilResource *hlslRes, const RecordDecl *RD);
  117. uint32_t AddCBuffer(HLSLBufferDecl *D);
  118. hlsl::DxilResourceBase::Class TypeToClass(clang::QualType Ty);
  119. // Save the entryFunc so don't need to find it with original name.
  120. struct EntryFunctionInfo {
  121. clang::SourceLocation SL = clang::SourceLocation();
  122. llvm::Function *Func = nullptr;
  123. };
  124. EntryFunctionInfo Entry;
  125. // Map to save patch constant functions
  126. struct PatchConstantInfo {
  127. clang::SourceLocation SL = clang::SourceLocation();
  128. llvm::Function *Func = nullptr;
  129. std::uint32_t NumOverloads = 0;
  130. };
  131. StringMap<PatchConstantInfo> patchConstantFunctionMap;
  132. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  133. patchConstantFunctionPropsMap;
  134. bool IsPatchConstantFunction(const Function *F);
  135. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  136. HSEntryPatchConstantFuncAttr;
  137. // Map to save entry functions.
  138. StringMap<EntryFunctionInfo> entryFunctionMap;
  139. // Map to save static global init exp.
  140. std::unordered_map<Expr *, GlobalVariable *> staticConstGlobalInitMap;
  141. std::unordered_map<GlobalVariable *, std::vector<Constant *>>
  142. staticConstGlobalInitListMap;
  143. std::unordered_map<GlobalVariable *, Function *> staticConstGlobalCtorMap;
  144. // List for functions with clip plane.
  145. std::vector<Function *> clipPlaneFuncList;
  146. std::unordered_map<Value *, DebugLoc> debugInfoMap;
  147. DxilRootSignatureVersion rootSigVer;
  148. Value *EmitHLSLMatrixLoad(CGBuilderTy &Builder, Value *Ptr, QualType Ty);
  149. void EmitHLSLMatrixStore(CGBuilderTy &Builder, Value *Val, Value *DestPtr,
  150. QualType Ty);
  151. // Flatten the val into scalar val and push into elts and eltTys.
  152. void FlattenValToInitList(CodeGenFunction &CGF, SmallVector<Value *, 4> &elts,
  153. SmallVector<QualType, 4> &eltTys, QualType Ty,
  154. Value *val);
  155. // Push every value on InitListExpr into EltValList and EltTyList.
  156. void ScanInitList(CodeGenFunction &CGF, InitListExpr *E,
  157. SmallVector<Value *, 4> &EltValList,
  158. SmallVector<QualType, 4> &EltTyList);
  159. void FlattenAggregatePtrToGepList(CodeGenFunction &CGF, Value *Ptr,
  160. SmallVector<Value *, 4> &idxList,
  161. clang::QualType Type, llvm::Type *Ty,
  162. SmallVector<Value *, 4> &GepList,
  163. SmallVector<QualType, 4> &EltTyList);
  164. void LoadFlattenedGepList(CodeGenFunction &CGF, ArrayRef<Value *> GepList,
  165. ArrayRef<QualType> EltTyList,
  166. SmallVector<Value *, 4> &EltList);
  167. void StoreFlattenedGepList(CodeGenFunction &CGF, ArrayRef<Value *> GepList,
  168. ArrayRef<QualType> GepTyList,
  169. ArrayRef<Value *> EltValList,
  170. ArrayRef<QualType> SrcTyList);
  171. void EmitHLSLAggregateCopy(CodeGenFunction &CGF, llvm::Value *SrcPtr,
  172. llvm::Value *DestPtr,
  173. SmallVector<Value *, 4> &idxList,
  174. clang::QualType SrcType,
  175. clang::QualType DestType,
  176. llvm::Type *Ty);
  177. void EmitHLSLFlatConversionToAggregate(CodeGenFunction &CGF, Value *SrcVal,
  178. llvm::Value *DestPtr,
  179. SmallVector<Value *, 4> &idxList,
  180. QualType Type, QualType SrcType,
  181. llvm::Type *Ty);
  182. void EmitHLSLRootSignature(CodeGenFunction &CGF, HLSLRootSignatureAttr *RSA,
  183. llvm::Function *Fn) override;
  184. void CheckParameterAnnotation(SourceLocation SLoc,
  185. const DxilParameterAnnotation &paramInfo,
  186. bool isPatchConstantFunction);
  187. void CheckParameterAnnotation(SourceLocation SLoc,
  188. DxilParamInputQual paramQual,
  189. llvm::StringRef semFullName,
  190. bool isPatchConstantFunction);
  191. void SetEntryFunction();
  192. SourceLocation SetSemantic(const NamedDecl *decl,
  193. DxilParameterAnnotation &paramInfo);
  194. hlsl::InterpolationMode GetInterpMode(const Decl *decl, CompType compType,
  195. bool bKeepUndefined);
  196. hlsl::CompType GetCompType(const BuiltinType *BT);
  197. // save intrinsic opcode
  198. std::vector<std::pair<Function *, unsigned>> m_IntrinsicMap;
  199. void AddHLSLIntrinsicOpcodeToFunction(Function *, unsigned opcode);
  200. // Type annotation related.
  201. unsigned ConstructStructAnnotation(DxilStructAnnotation *annotation,
  202. const RecordDecl *RD,
  203. DxilTypeSystem &dxilTypeSys);
  204. unsigned AddTypeAnnotation(QualType Ty, DxilTypeSystem &dxilTypeSys,
  205. unsigned &arrayEltSize);
  206. MDNode *GetOrAddResTypeMD(QualType resTy);
  207. void ConstructFieldAttributedAnnotation(DxilFieldAnnotation &fieldAnnotation,
  208. QualType fieldTy,
  209. bool bDefaultRowMajor);
  210. std::unordered_map<Constant*, DxilFieldAnnotation> m_ConstVarAnnotationMap;
  211. public:
  212. CGMSHLSLRuntime(CodeGenModule &CGM);
  213. bool IsHlslObjectType(llvm::Type * Ty) override;
  214. /// Add resouce to the program
  215. void addResource(Decl *D) override;
  216. void SetPatchConstantFunction(const EntryFunctionInfo &EntryFunc);
  217. void SetPatchConstantFunctionWithAttr(
  218. const EntryFunctionInfo &EntryFunc,
  219. const clang::HLSLPatchConstantFuncAttr *PatchConstantFuncAttr);
  220. void FinishCodeGen() override;
  221. bool IsTrivalInitListExpr(CodeGenFunction &CGF, InitListExpr *E) override;
  222. Value *EmitHLSLInitListExpr(CodeGenFunction &CGF, InitListExpr *E, Value *DestPtr) override;
  223. Constant *EmitHLSLConstInitListExpr(CodeGenModule &CGM, InitListExpr *E) override;
  224. RValue EmitHLSLBuiltinCallExpr(CodeGenFunction &CGF, const FunctionDecl *FD,
  225. const CallExpr *E,
  226. ReturnValueSlot ReturnValue) override;
  227. void EmitHLSLOutParamConversionInit(
  228. CodeGenFunction &CGF, const FunctionDecl *FD, const CallExpr *E,
  229. llvm::SmallVector<LValue, 8> &castArgList,
  230. llvm::SmallVector<const Stmt *, 8> &argList,
  231. const std::function<void(const VarDecl *, llvm::Value *)> &TmpArgMap)
  232. override;
  233. void EmitHLSLOutParamConversionCopyBack(
  234. CodeGenFunction &CGF, llvm::SmallVector<LValue, 8> &castArgList) override;
  235. Value *EmitHLSLMatrixOperationCall(CodeGenFunction &CGF, const clang::Expr *E,
  236. llvm::Type *RetType,
  237. ArrayRef<Value *> paramList) override;
  238. void EmitHLSLDiscard(CodeGenFunction &CGF) override;
  239. Value *EmitHLSLMatrixSubscript(CodeGenFunction &CGF, llvm::Type *RetType,
  240. Value *Ptr, Value *Idx, QualType Ty) override;
  241. Value *EmitHLSLMatrixElement(CodeGenFunction &CGF, llvm::Type *RetType,
  242. ArrayRef<Value *> paramList,
  243. QualType Ty) override;
  244. Value *EmitHLSLMatrixLoad(CodeGenFunction &CGF, Value *Ptr,
  245. QualType Ty) override;
  246. void EmitHLSLMatrixStore(CodeGenFunction &CGF, Value *Val, Value *DestPtr,
  247. QualType Ty) override;
  248. void EmitHLSLAggregateCopy(CodeGenFunction &CGF, llvm::Value *SrcPtr,
  249. llvm::Value *DestPtr,
  250. clang::QualType Ty) override;
  251. void EmitHLSLAggregateStore(CodeGenFunction &CGF, llvm::Value *Val,
  252. llvm::Value *DestPtr,
  253. clang::QualType Ty) override;
  254. void EmitHLSLFlatConversionToAggregate(CodeGenFunction &CGF, Value *Val,
  255. Value *DestPtr,
  256. QualType Ty,
  257. QualType SrcTy) override;
  258. Value *EmitHLSLLiteralCast(CodeGenFunction &CGF, Value *Src, QualType SrcType,
  259. QualType DstType) override;
  260. void EmitHLSLFlatConversionAggregateCopy(CodeGenFunction &CGF, llvm::Value *SrcPtr,
  261. clang::QualType SrcTy,
  262. llvm::Value *DestPtr,
  263. clang::QualType DestTy) override;
  264. void AddHLSLFunctionInfo(llvm::Function *, const FunctionDecl *FD) override;
  265. void EmitHLSLFunctionProlog(llvm::Function *, const FunctionDecl *FD) override;
  266. void AddControlFlowHint(CodeGenFunction &CGF, const Stmt &S,
  267. llvm::TerminatorInst *TI,
  268. ArrayRef<const Attr *> Attrs) override;
  269. void FinishAutoVar(CodeGenFunction &CGF, const VarDecl &D, llvm::Value *V) override;
  270. /// Get or add constant to the program
  271. HLCBuffer &GetOrCreateCBuffer(HLSLBufferDecl *D);
  272. };
  273. }
  274. void clang::CompileRootSignature(
  275. StringRef rootSigStr, DiagnosticsEngine &Diags, SourceLocation SLoc,
  276. hlsl::DxilRootSignatureVersion rootSigVer,
  277. hlsl::RootSignatureHandle *pRootSigHandle) {
  278. std::string OSStr;
  279. llvm::raw_string_ostream OS(OSStr);
  280. hlsl::DxilVersionedRootSignatureDesc *D = nullptr;
  281. if (ParseHLSLRootSignature(rootSigStr.data(), rootSigStr.size(), rootSigVer,
  282. &D, SLoc, Diags)) {
  283. CComPtr<IDxcBlob> pSignature;
  284. CComPtr<IDxcBlobEncoding> pErrors;
  285. hlsl::SerializeRootSignature(D, &pSignature, &pErrors, false);
  286. if (pSignature == nullptr) {
  287. assert(pErrors != nullptr && "else serialize failed with no msg");
  288. ReportHLSLRootSigError(Diags, SLoc, (char *)pErrors->GetBufferPointer(),
  289. pErrors->GetBufferSize());
  290. hlsl::DeleteRootSignature(D);
  291. } else {
  292. pRootSigHandle->Assign(D, pSignature);
  293. }
  294. }
  295. }
  296. //------------------------------------------------------------------------------
  297. //
  298. // CGMSHLSLRuntime methods.
  299. //
  300. CGMSHLSLRuntime::CGMSHLSLRuntime(CodeGenModule &CGM)
  301. : CGHLSLRuntime(CGM), Context(CGM.getLLVMContext()),
  302. TheModule(CGM.getModule()),
  303. CBufferType(
  304. llvm::StructType::create(TheModule.getContext(), "ConstantBuffer")),
  305. dataLayout(CGM.getLangOpts().UseMinPrecision
  306. ? hlsl::DXIL::kLegacyLayoutString
  307. : hlsl::DXIL::kNewLayoutString), Entry() {
  308. const hlsl::ShaderModel *SM =
  309. hlsl::ShaderModel::GetByName(CGM.getCodeGenOpts().HLSLProfile.c_str());
  310. // Only accept valid, 6.0 shader model.
  311. if (!SM->IsValid() || SM->GetMajor() != 6) {
  312. DiagnosticsEngine &Diags = CGM.getDiags();
  313. unsigned DiagID =
  314. Diags.getCustomDiagID(DiagnosticsEngine::Error, "invalid profile %0");
  315. Diags.Report(DiagID) << CGM.getCodeGenOpts().HLSLProfile;
  316. return;
  317. }
  318. m_bIsLib = SM->IsLib();
  319. // TODO: add AllResourceBound.
  320. if (CGM.getCodeGenOpts().HLSLAvoidControlFlow && !CGM.getCodeGenOpts().HLSLAllResourcesBound) {
  321. if (SM->IsSM51Plus()) {
  322. DiagnosticsEngine &Diags = CGM.getDiags();
  323. unsigned DiagID =
  324. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  325. "Gfa option cannot be used in SM_5_1+ unless "
  326. "all_resources_bound flag is specified");
  327. Diags.Report(DiagID);
  328. }
  329. }
  330. // Create HLModule.
  331. const bool skipInit = true;
  332. m_pHLModule = &TheModule.GetOrCreateHLModule(skipInit);
  333. // Set Option.
  334. HLOptions opts;
  335. opts.bIEEEStrict = CGM.getCodeGenOpts().UnsafeFPMath;
  336. opts.bDefaultRowMajor = CGM.getCodeGenOpts().HLSLDefaultRowMajor;
  337. opts.bDisableOptimizations = CGM.getCodeGenOpts().DisableLLVMOpts;
  338. opts.bLegacyCBufferLoad = !CGM.getCodeGenOpts().HLSLNotUseLegacyCBufLoad;
  339. opts.bAllResourcesBound = CGM.getCodeGenOpts().HLSLAllResourcesBound;
  340. opts.PackingStrategy = CGM.getCodeGenOpts().HLSLSignaturePackingStrategy;
  341. opts.bUseMinPrecision = CGM.getLangOpts().UseMinPrecision;
  342. m_pHLModule->SetHLOptions(opts);
  343. m_pHLModule->SetValidatorVersion(CGM.getCodeGenOpts().HLSLValidatorMajorVer, CGM.getCodeGenOpts().HLSLValidatorMinorVer);
  344. m_bDebugInfo = CGM.getCodeGenOpts().getDebugInfo() == CodeGenOptions::FullDebugInfo;
  345. // set profile
  346. m_pHLModule->SetShaderModel(SM);
  347. // set entry name
  348. m_pHLModule->SetEntryFunctionName(CGM.getCodeGenOpts().HLSLEntryFunction);
  349. // set root signature version.
  350. if (CGM.getLangOpts().RootSigMinor == 0) {
  351. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_0;
  352. }
  353. else {
  354. DXASSERT(CGM.getLangOpts().RootSigMinor == 1,
  355. "else CGMSHLSLRuntime Constructor needs to be updated");
  356. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_1;
  357. }
  358. DXASSERT(CGM.getLangOpts().RootSigMajor == 1,
  359. "else CGMSHLSLRuntime Constructor needs to be updated");
  360. // add globalCB
  361. unique_ptr<HLCBuffer> CB = llvm::make_unique<HLCBuffer>();
  362. std::string globalCBName = "$Globals";
  363. CB->SetGlobalSymbol(nullptr);
  364. CB->SetGlobalName(globalCBName);
  365. globalCBIndex = m_pHLModule->GetCBuffers().size();
  366. CB->SetID(globalCBIndex);
  367. CB->SetRangeSize(1);
  368. CB->SetLowerBound(UINT_MAX);
  369. DXVERIFY_NOMSG(globalCBIndex == m_pHLModule->AddCBuffer(std::move(CB)));
  370. // set Float Denorm Mode
  371. m_pHLModule->SetFloat32DenormMode(CGM.getCodeGenOpts().HLSLFloat32DenormMode);
  372. }
  373. bool CGMSHLSLRuntime::IsHlslObjectType(llvm::Type *Ty) {
  374. return HLModule::IsHLSLObjectType(Ty);
  375. }
  376. void CGMSHLSLRuntime::AddHLSLIntrinsicOpcodeToFunction(Function *F,
  377. unsigned opcode) {
  378. m_IntrinsicMap.emplace_back(F,opcode);
  379. }
  380. void CGMSHLSLRuntime::CheckParameterAnnotation(
  381. SourceLocation SLoc, const DxilParameterAnnotation &paramInfo,
  382. bool isPatchConstantFunction) {
  383. if (!paramInfo.HasSemanticString()) {
  384. return;
  385. }
  386. llvm::StringRef semFullName = paramInfo.GetSemanticStringRef();
  387. DxilParamInputQual paramQual = paramInfo.GetParamInputQual();
  388. if (paramQual == DxilParamInputQual::Inout) {
  389. CheckParameterAnnotation(SLoc, DxilParamInputQual::In, semFullName, isPatchConstantFunction);
  390. CheckParameterAnnotation(SLoc, DxilParamInputQual::Out, semFullName, isPatchConstantFunction);
  391. return;
  392. }
  393. CheckParameterAnnotation(SLoc, paramQual, semFullName, isPatchConstantFunction);
  394. }
  395. void CGMSHLSLRuntime::CheckParameterAnnotation(
  396. SourceLocation SLoc, DxilParamInputQual paramQual, llvm::StringRef semFullName,
  397. bool isPatchConstantFunction) {
  398. const ShaderModel *SM = m_pHLModule->GetShaderModel();
  399. DXIL::SigPointKind sigPoint = SigPointFromInputQual(
  400. paramQual, SM->GetKind(), isPatchConstantFunction);
  401. llvm::StringRef semName;
  402. unsigned semIndex;
  403. Semantic::DecomposeNameAndIndex(semFullName, &semName, &semIndex);
  404. const Semantic *pSemantic =
  405. Semantic::GetByName(semName, sigPoint, SM->GetMajor(), SM->GetMinor());
  406. if (pSemantic->IsInvalid()) {
  407. DiagnosticsEngine &Diags = CGM.getDiags();
  408. const ShaderModel *shader = m_pHLModule->GetShaderModel();
  409. unsigned DiagID =
  410. Diags.getCustomDiagID(DiagnosticsEngine::Error, "invalid semantic '%0' for %1 %2.%3");
  411. Diags.Report(SLoc, DiagID) << semName << shader->GetKindName() << shader->GetMajor() << shader->GetMinor();
  412. }
  413. }
  414. SourceLocation
  415. CGMSHLSLRuntime::SetSemantic(const NamedDecl *decl,
  416. DxilParameterAnnotation &paramInfo) {
  417. for (const hlsl::UnusualAnnotation *it : decl->getUnusualAnnotations()) {
  418. if (it->getKind() == hlsl::UnusualAnnotation::UA_SemanticDecl) {
  419. const hlsl::SemanticDecl *sd = cast<hlsl::SemanticDecl>(it);
  420. paramInfo.SetSemanticString(sd->SemanticName);
  421. return it->Loc;
  422. }
  423. }
  424. return SourceLocation();
  425. }
  426. static DXIL::TessellatorDomain StringToDomain(StringRef domain) {
  427. if (domain == "isoline")
  428. return DXIL::TessellatorDomain::IsoLine;
  429. if (domain == "tri")
  430. return DXIL::TessellatorDomain::Tri;
  431. if (domain == "quad")
  432. return DXIL::TessellatorDomain::Quad;
  433. return DXIL::TessellatorDomain::Undefined;
  434. }
  435. static DXIL::TessellatorPartitioning StringToPartitioning(StringRef partition) {
  436. if (partition == "integer")
  437. return DXIL::TessellatorPartitioning::Integer;
  438. if (partition == "pow2")
  439. return DXIL::TessellatorPartitioning::Pow2;
  440. if (partition == "fractional_even")
  441. return DXIL::TessellatorPartitioning::FractionalEven;
  442. if (partition == "fractional_odd")
  443. return DXIL::TessellatorPartitioning::FractionalOdd;
  444. return DXIL::TessellatorPartitioning::Undefined;
  445. }
  446. static DXIL::TessellatorOutputPrimitive
  447. StringToTessOutputPrimitive(StringRef primitive) {
  448. if (primitive == "point")
  449. return DXIL::TessellatorOutputPrimitive::Point;
  450. if (primitive == "line")
  451. return DXIL::TessellatorOutputPrimitive::Line;
  452. if (primitive == "triangle_cw")
  453. return DXIL::TessellatorOutputPrimitive::TriangleCW;
  454. if (primitive == "triangle_ccw")
  455. return DXIL::TessellatorOutputPrimitive::TriangleCCW;
  456. return DXIL::TessellatorOutputPrimitive::Undefined;
  457. }
  458. static unsigned RoundToAlign(unsigned num, unsigned mod) {
  459. // round num to next highest mod
  460. if (mod != 0)
  461. return mod * ((num + mod - 1) / mod);
  462. return num;
  463. }
  464. // Align cbuffer offset in legacy mode (16 bytes per row).
  465. static unsigned AlignBufferOffsetInLegacy(unsigned offset, unsigned size,
  466. unsigned scalarSizeInBytes,
  467. bool bNeedNewRow) {
  468. if (unsigned remainder = (offset & 0xf)) {
  469. // Start from new row
  470. if (remainder + size > 16 || bNeedNewRow) {
  471. return offset + 16 - remainder;
  472. }
  473. // If not, naturally align data
  474. return RoundToAlign(offset, scalarSizeInBytes);
  475. }
  476. return offset;
  477. }
  478. static unsigned AlignBaseOffset(unsigned baseOffset, unsigned size,
  479. QualType Ty, bool bDefaultRowMajor) {
  480. bool needNewAlign = Ty->isArrayType();
  481. if (IsHLSLMatType(Ty)) {
  482. bool bColMajor = !bDefaultRowMajor;
  483. if (const AttributedType *AT = dyn_cast<AttributedType>(Ty)) {
  484. switch (AT->getAttrKind()) {
  485. case AttributedType::Kind::attr_hlsl_column_major:
  486. bColMajor = true;
  487. break;
  488. case AttributedType::Kind::attr_hlsl_row_major:
  489. bColMajor = false;
  490. break;
  491. default:
  492. // Do nothing
  493. break;
  494. }
  495. }
  496. unsigned row, col;
  497. hlsl::GetHLSLMatRowColCount(Ty, row, col);
  498. needNewAlign |= bColMajor && col > 1;
  499. needNewAlign |= !bColMajor && row > 1;
  500. }
  501. unsigned scalarSizeInBytes = 4;
  502. const clang::BuiltinType *BT = Ty->getAs<clang::BuiltinType>();
  503. if (hlsl::IsHLSLVecMatType(Ty)) {
  504. BT = CGHLSLRuntime::GetHLSLVecMatElementType(Ty)->getAs<clang::BuiltinType>();
  505. }
  506. if (BT) {
  507. if (BT->getKind() == clang::BuiltinType::Kind::Double ||
  508. BT->getKind() == clang::BuiltinType::Kind::LongLong)
  509. scalarSizeInBytes = 8;
  510. else if (BT->getKind() == clang::BuiltinType::Kind::Half ||
  511. BT->getKind() == clang::BuiltinType::Kind::Short ||
  512. BT->getKind() == clang::BuiltinType::Kind::UShort)
  513. scalarSizeInBytes = 2;
  514. }
  515. return AlignBufferOffsetInLegacy(baseOffset, size, scalarSizeInBytes, needNewAlign);
  516. }
  517. static unsigned AlignBaseOffset(QualType Ty, unsigned baseOffset,
  518. bool bDefaultRowMajor,
  519. CodeGen::CodeGenModule &CGM,
  520. llvm::DataLayout &layout) {
  521. QualType paramTy = Ty.getCanonicalType();
  522. if (const ReferenceType *RefType = dyn_cast<ReferenceType>(paramTy))
  523. paramTy = RefType->getPointeeType();
  524. // Get size.
  525. llvm::Type *Type = CGM.getTypes().ConvertType(paramTy);
  526. unsigned size = layout.getTypeAllocSize(Type);
  527. return AlignBaseOffset(baseOffset, size, paramTy, bDefaultRowMajor);
  528. }
  529. static unsigned GetMatrixSizeInCB(QualType Ty, bool defaultRowMajor,
  530. bool b64Bit) {
  531. bool bColMajor = !defaultRowMajor;
  532. if (const AttributedType *AT = dyn_cast<AttributedType>(Ty)) {
  533. switch (AT->getAttrKind()) {
  534. case AttributedType::Kind::attr_hlsl_column_major:
  535. bColMajor = true;
  536. break;
  537. case AttributedType::Kind::attr_hlsl_row_major:
  538. bColMajor = false;
  539. break;
  540. default:
  541. // Do nothing
  542. break;
  543. }
  544. }
  545. unsigned row, col;
  546. hlsl::GetHLSLMatRowColCount(Ty, row, col);
  547. unsigned EltSize = b64Bit ? 8 : 4;
  548. // Align to 4 * 4bytes.
  549. unsigned alignment = 4 * 4;
  550. if (bColMajor) {
  551. unsigned rowSize = EltSize * row;
  552. // 3x64bit or 4x64bit align to 32 bytes.
  553. if (rowSize > alignment)
  554. alignment <<= 1;
  555. return alignment * (col - 1) + row * EltSize;
  556. } else {
  557. unsigned rowSize = EltSize * col;
  558. // 3x64bit or 4x64bit align to 32 bytes.
  559. if (rowSize > alignment)
  560. alignment <<= 1;
  561. return alignment * (row - 1) + col * EltSize;
  562. }
  563. }
  564. static CompType::Kind BuiltinTyToCompTy(const BuiltinType *BTy, bool bSNorm,
  565. bool bUNorm) {
  566. CompType::Kind kind = CompType::Kind::Invalid;
  567. switch (BTy->getKind()) {
  568. case BuiltinType::UInt:
  569. kind = CompType::Kind::U32;
  570. break;
  571. case BuiltinType::Min16UInt: // HLSL Change
  572. case BuiltinType::UShort:
  573. kind = CompType::Kind::U16;
  574. break;
  575. case BuiltinType::ULongLong:
  576. kind = CompType::Kind::U64;
  577. break;
  578. case BuiltinType::Int:
  579. kind = CompType::Kind::I32;
  580. break;
  581. // HLSL Changes begin
  582. case BuiltinType::Min12Int:
  583. case BuiltinType::Min16Int:
  584. // HLSL Changes end
  585. case BuiltinType::Short:
  586. kind = CompType::Kind::I16;
  587. break;
  588. case BuiltinType::LongLong:
  589. kind = CompType::Kind::I64;
  590. break;
  591. // HLSL Changes begin
  592. case BuiltinType::Min10Float:
  593. case BuiltinType::Min16Float:
  594. // HLSL Changes end
  595. case BuiltinType::Half:
  596. if (bSNorm)
  597. kind = CompType::Kind::SNormF16;
  598. else if (bUNorm)
  599. kind = CompType::Kind::UNormF16;
  600. else
  601. kind = CompType::Kind::F16;
  602. break;
  603. case BuiltinType::HalfFloat: // HLSL Change
  604. case BuiltinType::Float:
  605. if (bSNorm)
  606. kind = CompType::Kind::SNormF32;
  607. else if (bUNorm)
  608. kind = CompType::Kind::UNormF32;
  609. else
  610. kind = CompType::Kind::F32;
  611. break;
  612. case BuiltinType::Double:
  613. if (bSNorm)
  614. kind = CompType::Kind::SNormF64;
  615. else if (bUNorm)
  616. kind = CompType::Kind::UNormF64;
  617. else
  618. kind = CompType::Kind::F64;
  619. break;
  620. case BuiltinType::Bool:
  621. kind = CompType::Kind::I1;
  622. break;
  623. default:
  624. // Other types not used by HLSL.
  625. break;
  626. }
  627. return kind;
  628. }
  629. static DxilSampler::SamplerKind KeywordToSamplerKind(llvm::StringRef keyword) {
  630. // TODO: refactor for faster search (switch by 1/2/3 first letters, then
  631. // compare)
  632. return llvm::StringSwitch<DxilSampler::SamplerKind>(keyword)
  633. .Case("SamplerState", DxilSampler::SamplerKind::Default)
  634. .Case("SamplerComparisonState", DxilSampler::SamplerKind::Comparison)
  635. .Default(DxilSampler::SamplerKind::Invalid);
  636. }
  637. MDNode *CGMSHLSLRuntime::GetOrAddResTypeMD(QualType resTy) {
  638. const RecordType *RT = resTy->getAs<RecordType>();
  639. if (!RT)
  640. return nullptr;
  641. RecordDecl *RD = RT->getDecl();
  642. SourceLocation loc = RD->getLocation();
  643. hlsl::DxilResourceBase::Class resClass = TypeToClass(resTy);
  644. llvm::Type *Ty = CGM.getTypes().ConvertType(resTy);
  645. auto it = resMetadataMap.find(Ty);
  646. if (it != resMetadataMap.end())
  647. return it->second;
  648. // Save resource type metadata.
  649. switch (resClass) {
  650. case DXIL::ResourceClass::UAV: {
  651. DxilResource UAV;
  652. // TODO: save globalcoherent to variable in EmitHLSLBuiltinCallExpr.
  653. SetUAVSRV(loc, resClass, &UAV, RD);
  654. // Set global symbol to save type.
  655. UAV.SetGlobalSymbol(UndefValue::get(Ty));
  656. MDNode *MD = m_pHLModule->DxilUAVToMDNode(UAV);
  657. resMetadataMap[Ty] = MD;
  658. return MD;
  659. } break;
  660. case DXIL::ResourceClass::SRV: {
  661. DxilResource SRV;
  662. SetUAVSRV(loc, resClass, &SRV, RD);
  663. // Set global symbol to save type.
  664. SRV.SetGlobalSymbol(UndefValue::get(Ty));
  665. MDNode *MD = m_pHLModule->DxilSRVToMDNode(SRV);
  666. resMetadataMap[Ty] = MD;
  667. return MD;
  668. } break;
  669. case DXIL::ResourceClass::Sampler: {
  670. DxilSampler S;
  671. DxilSampler::SamplerKind kind = KeywordToSamplerKind(RD->getName());
  672. S.SetSamplerKind(kind);
  673. // Set global symbol to save type.
  674. S.SetGlobalSymbol(UndefValue::get(Ty));
  675. MDNode *MD = m_pHLModule->DxilSamplerToMDNode(S);
  676. resMetadataMap[Ty] = MD;
  677. return MD;
  678. }
  679. default:
  680. // Skip OutputStream for GS.
  681. return nullptr;
  682. }
  683. }
  684. void CGMSHLSLRuntime::ConstructFieldAttributedAnnotation(
  685. DxilFieldAnnotation &fieldAnnotation, QualType fieldTy,
  686. bool bDefaultRowMajor) {
  687. QualType Ty = fieldTy;
  688. if (Ty->isReferenceType())
  689. Ty = Ty.getNonReferenceType();
  690. // Get element type.
  691. if (Ty->isArrayType()) {
  692. while (isa<clang::ArrayType>(Ty)) {
  693. const clang::ArrayType *ATy = dyn_cast<clang::ArrayType>(Ty);
  694. Ty = ATy->getElementType();
  695. }
  696. }
  697. QualType EltTy = Ty;
  698. if (hlsl::IsHLSLMatType(Ty)) {
  699. DxilMatrixAnnotation Matrix;
  700. Matrix.Orientation = bDefaultRowMajor ? MatrixOrientation::RowMajor
  701. : MatrixOrientation::ColumnMajor;
  702. if (const AttributedType *AT = dyn_cast<AttributedType>(Ty)) {
  703. switch (AT->getAttrKind()) {
  704. case AttributedType::Kind::attr_hlsl_column_major:
  705. Matrix.Orientation = MatrixOrientation::ColumnMajor;
  706. break;
  707. case AttributedType::Kind::attr_hlsl_row_major:
  708. Matrix.Orientation = MatrixOrientation::RowMajor;
  709. break;
  710. default:
  711. // Do nothing
  712. break;
  713. }
  714. }
  715. hlsl::GetHLSLMatRowColCount(Ty, Matrix.Rows, Matrix.Cols);
  716. fieldAnnotation.SetMatrixAnnotation(Matrix);
  717. EltTy = hlsl::GetHLSLMatElementType(Ty);
  718. }
  719. if (hlsl::IsHLSLVecType(Ty))
  720. EltTy = hlsl::GetHLSLVecElementType(Ty);
  721. if (IsHLSLResourceType(Ty)) {
  722. MDNode *MD = GetOrAddResTypeMD(Ty);
  723. fieldAnnotation.SetResourceAttribute(MD);
  724. }
  725. bool bSNorm = false;
  726. bool bUNorm = false;
  727. if (const AttributedType *AT = dyn_cast<AttributedType>(Ty)) {
  728. switch (AT->getAttrKind()) {
  729. case AttributedType::Kind::attr_hlsl_snorm:
  730. bSNorm = true;
  731. break;
  732. case AttributedType::Kind::attr_hlsl_unorm:
  733. bUNorm = true;
  734. break;
  735. default:
  736. // Do nothing
  737. break;
  738. }
  739. }
  740. if (EltTy->isBuiltinType()) {
  741. const BuiltinType *BTy = EltTy->getAs<BuiltinType>();
  742. CompType::Kind kind = BuiltinTyToCompTy(BTy, bSNorm, bUNorm);
  743. fieldAnnotation.SetCompType(kind);
  744. } else if (EltTy->isEnumeralType()) {
  745. const EnumType *ETy = EltTy->getAs<EnumType>();
  746. QualType type = ETy->getDecl()->getIntegerType();
  747. if (const BuiltinType *BTy =
  748. dyn_cast<BuiltinType>(type->getCanonicalTypeInternal()))
  749. fieldAnnotation.SetCompType(BuiltinTyToCompTy(BTy, bSNorm, bUNorm));
  750. } else {
  751. DXASSERT(!bSNorm && !bUNorm,
  752. "snorm/unorm on invalid type, validate at handleHLSLTypeAttr");
  753. }
  754. }
  755. static void ConstructFieldInterpolation(DxilFieldAnnotation &fieldAnnotation,
  756. FieldDecl *fieldDecl) {
  757. // Keep undefined for interpMode here.
  758. InterpolationMode InterpMode = {fieldDecl->hasAttr<HLSLNoInterpolationAttr>(),
  759. fieldDecl->hasAttr<HLSLLinearAttr>(),
  760. fieldDecl->hasAttr<HLSLNoPerspectiveAttr>(),
  761. fieldDecl->hasAttr<HLSLCentroidAttr>(),
  762. fieldDecl->hasAttr<HLSLSampleAttr>()};
  763. if (InterpMode.GetKind() != InterpolationMode::Kind::Undefined)
  764. fieldAnnotation.SetInterpolationMode(InterpMode);
  765. }
  766. unsigned CGMSHLSLRuntime::ConstructStructAnnotation(DxilStructAnnotation *annotation,
  767. const RecordDecl *RD,
  768. DxilTypeSystem &dxilTypeSys) {
  769. unsigned fieldIdx = 0;
  770. unsigned offset = 0;
  771. bool bDefaultRowMajor = m_pHLModule->GetHLOptions().bDefaultRowMajor;
  772. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  773. if (CXXRD->getNumBases()) {
  774. // Add base as field.
  775. for (const auto &I : CXXRD->bases()) {
  776. const CXXRecordDecl *BaseDecl =
  777. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  778. std::string fieldSemName = "";
  779. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  780. // Align offset.
  781. offset = AlignBaseOffset(parentTy, offset, bDefaultRowMajor, CGM,
  782. dataLayout);
  783. unsigned CBufferOffset = offset;
  784. unsigned arrayEltSize = 0;
  785. // Process field to make sure the size of field is ready.
  786. unsigned size =
  787. AddTypeAnnotation(parentTy, dxilTypeSys, arrayEltSize);
  788. // Update offset.
  789. offset += size;
  790. if (size > 0) {
  791. DxilFieldAnnotation &fieldAnnotation =
  792. annotation->GetFieldAnnotation(fieldIdx++);
  793. fieldAnnotation.SetCBufferOffset(CBufferOffset);
  794. fieldAnnotation.SetFieldName(BaseDecl->getNameAsString());
  795. }
  796. }
  797. }
  798. }
  799. for (auto fieldDecl : RD->fields()) {
  800. std::string fieldSemName = "";
  801. QualType fieldTy = fieldDecl->getType();
  802. // Align offset.
  803. offset = AlignBaseOffset(fieldTy, offset, bDefaultRowMajor, CGM, dataLayout);
  804. unsigned CBufferOffset = offset;
  805. // Try to get info from fieldDecl.
  806. for (const hlsl::UnusualAnnotation *it :
  807. fieldDecl->getUnusualAnnotations()) {
  808. switch (it->getKind()) {
  809. case hlsl::UnusualAnnotation::UA_SemanticDecl: {
  810. const hlsl::SemanticDecl *sd = cast<hlsl::SemanticDecl>(it);
  811. fieldSemName = sd->SemanticName;
  812. } break;
  813. case hlsl::UnusualAnnotation::UA_ConstantPacking: {
  814. const hlsl::ConstantPacking *cp = cast<hlsl::ConstantPacking>(it);
  815. CBufferOffset = cp->Subcomponent << 2;
  816. CBufferOffset += cp->ComponentOffset;
  817. // Change to byte.
  818. CBufferOffset <<= 2;
  819. } break;
  820. case hlsl::UnusualAnnotation::UA_RegisterAssignment: {
  821. // register assignment only works on global constant.
  822. DiagnosticsEngine &Diags = CGM.getDiags();
  823. unsigned DiagID = Diags.getCustomDiagID(
  824. DiagnosticsEngine::Error,
  825. "location semantics cannot be specified on members.");
  826. Diags.Report(it->Loc, DiagID);
  827. return 0;
  828. } break;
  829. default:
  830. llvm_unreachable("only semantic for input/output");
  831. break;
  832. }
  833. }
  834. unsigned arrayEltSize = 0;
  835. // Process field to make sure the size of field is ready.
  836. unsigned size = AddTypeAnnotation(fieldDecl->getType(), dxilTypeSys, arrayEltSize);
  837. // Update offset.
  838. offset += size;
  839. DxilFieldAnnotation &fieldAnnotation = annotation->GetFieldAnnotation(fieldIdx++);
  840. ConstructFieldAttributedAnnotation(fieldAnnotation, fieldTy, bDefaultRowMajor);
  841. ConstructFieldInterpolation(fieldAnnotation, fieldDecl);
  842. if (fieldDecl->hasAttr<HLSLPreciseAttr>())
  843. fieldAnnotation.SetPrecise();
  844. fieldAnnotation.SetCBufferOffset(CBufferOffset);
  845. fieldAnnotation.SetFieldName(fieldDecl->getName());
  846. if (!fieldSemName.empty())
  847. fieldAnnotation.SetSemanticString(fieldSemName);
  848. }
  849. annotation->SetCBufferSize(offset);
  850. if (offset == 0) {
  851. annotation->MarkEmptyStruct();
  852. }
  853. return offset;
  854. }
  855. static bool IsElementInputOutputType(QualType Ty) {
  856. return Ty->isBuiltinType() || hlsl::IsHLSLVecMatType(Ty) || Ty->isEnumeralType();
  857. }
  858. // Return the size for constant buffer of each decl.
  859. unsigned CGMSHLSLRuntime::AddTypeAnnotation(QualType Ty,
  860. DxilTypeSystem &dxilTypeSys,
  861. unsigned &arrayEltSize) {
  862. QualType paramTy = Ty.getCanonicalType();
  863. if (const ReferenceType *RefType = dyn_cast<ReferenceType>(paramTy))
  864. paramTy = RefType->getPointeeType();
  865. // Get size.
  866. llvm::Type *Type = CGM.getTypes().ConvertType(paramTy);
  867. unsigned size = dataLayout.getTypeAllocSize(Type);
  868. if (IsHLSLMatType(Ty)) {
  869. unsigned col, row;
  870. llvm::Type *EltTy = HLMatrixLower::GetMatrixInfo(Type, col, row);
  871. bool b64Bit = dataLayout.getTypeAllocSize(EltTy) == 8;
  872. size = GetMatrixSizeInCB(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor,
  873. b64Bit);
  874. }
  875. // Skip element types.
  876. if (IsElementInputOutputType(paramTy))
  877. return size;
  878. else if (IsHLSLStreamOutputType(Ty)) {
  879. return AddTypeAnnotation(GetHLSLOutputPatchElementType(Ty), dxilTypeSys,
  880. arrayEltSize);
  881. } else if (IsHLSLInputPatchType(Ty))
  882. return AddTypeAnnotation(GetHLSLInputPatchElementType(Ty), dxilTypeSys,
  883. arrayEltSize);
  884. else if (IsHLSLOutputPatchType(Ty))
  885. return AddTypeAnnotation(GetHLSLOutputPatchElementType(Ty), dxilTypeSys,
  886. arrayEltSize);
  887. else if (const RecordType *RT = paramTy->getAsStructureType()) {
  888. RecordDecl *RD = RT->getDecl();
  889. llvm::StructType *ST = CGM.getTypes().ConvertRecordDeclType(RD);
  890. // Skip if already created.
  891. if (DxilStructAnnotation *annotation = dxilTypeSys.GetStructAnnotation(ST)) {
  892. unsigned structSize = annotation->GetCBufferSize();
  893. return structSize;
  894. }
  895. DxilStructAnnotation *annotation = dxilTypeSys.AddStructAnnotation(ST);
  896. return ConstructStructAnnotation(annotation, RD, dxilTypeSys);
  897. } else if (const RecordType *RT = dyn_cast<RecordType>(paramTy)) {
  898. // For this pointer.
  899. RecordDecl *RD = RT->getDecl();
  900. llvm::StructType *ST = CGM.getTypes().ConvertRecordDeclType(RD);
  901. // Skip if already created.
  902. if (DxilStructAnnotation *annotation = dxilTypeSys.GetStructAnnotation(ST)) {
  903. unsigned structSize = annotation->GetCBufferSize();
  904. return structSize;
  905. }
  906. DxilStructAnnotation *annotation = dxilTypeSys.AddStructAnnotation(ST);
  907. return ConstructStructAnnotation(annotation, RD, dxilTypeSys);
  908. } else if (IsHLSLResourceType(Ty)) {
  909. // Save result type info.
  910. AddTypeAnnotation(GetHLSLResourceResultType(Ty), dxilTypeSys, arrayEltSize);
  911. // Resource don't count for cbuffer size.
  912. return 0;
  913. } else {
  914. unsigned arraySize = 0;
  915. QualType arrayElementTy = Ty;
  916. if (Ty->isConstantArrayType()) {
  917. const ConstantArrayType *arrayTy =
  918. CGM.getContext().getAsConstantArrayType(Ty);
  919. DXASSERT(arrayTy != nullptr, "Must array type here");
  920. arraySize = arrayTy->getSize().getLimitedValue();
  921. arrayElementTy = arrayTy->getElementType();
  922. }
  923. else if (Ty->isIncompleteArrayType()) {
  924. const IncompleteArrayType *arrayTy = CGM.getContext().getAsIncompleteArrayType(Ty);
  925. arrayElementTy = arrayTy->getElementType();
  926. } else {
  927. DXASSERT(0, "Must array type here");
  928. }
  929. unsigned elementSize = AddTypeAnnotation(arrayElementTy, dxilTypeSys, arrayEltSize);
  930. // Only set arrayEltSize once.
  931. if (arrayEltSize == 0)
  932. arrayEltSize = elementSize;
  933. // Align to 4 * 4bytes.
  934. unsigned alignedSize = (elementSize + 15) & 0xfffffff0;
  935. return alignedSize * (arraySize - 1) + elementSize;
  936. }
  937. }
  938. static DxilResource::Kind KeywordToKind(StringRef keyword) {
  939. // TODO: refactor for faster search (switch by 1/2/3 first letters, then
  940. // compare)
  941. if (keyword == "Texture1D" || keyword == "RWTexture1D" || keyword == "RasterizerOrderedTexture1D")
  942. return DxilResource::Kind::Texture1D;
  943. if (keyword == "Texture2D" || keyword == "RWTexture2D" || keyword == "RasterizerOrderedTexture2D")
  944. return DxilResource::Kind::Texture2D;
  945. if (keyword == "Texture2DMS" || keyword == "RWTexture2DMS")
  946. return DxilResource::Kind::Texture2DMS;
  947. if (keyword == "Texture3D" || keyword == "RWTexture3D" || keyword == "RasterizerOrderedTexture3D")
  948. return DxilResource::Kind::Texture3D;
  949. if (keyword == "TextureCube" || keyword == "RWTextureCube")
  950. return DxilResource::Kind::TextureCube;
  951. if (keyword == "Texture1DArray" || keyword == "RWTexture1DArray" || keyword == "RasterizerOrderedTexture1DArray")
  952. return DxilResource::Kind::Texture1DArray;
  953. if (keyword == "Texture2DArray" || keyword == "RWTexture2DArray" || keyword == "RasterizerOrderedTexture2DArray")
  954. return DxilResource::Kind::Texture2DArray;
  955. if (keyword == "Texture2DMSArray" || keyword == "RWTexture2DMSArray")
  956. return DxilResource::Kind::Texture2DMSArray;
  957. if (keyword == "TextureCubeArray" || keyword == "RWTextureCubeArray")
  958. return DxilResource::Kind::TextureCubeArray;
  959. if (keyword == "ByteAddressBuffer" || keyword == "RWByteAddressBuffer" || keyword == "RasterizerOrderedByteAddressBuffer")
  960. return DxilResource::Kind::RawBuffer;
  961. if (keyword == "StructuredBuffer" || keyword == "RWStructuredBuffer" || keyword == "RasterizerOrderedStructuredBuffer")
  962. return DxilResource::Kind::StructuredBuffer;
  963. if (keyword == "AppendStructuredBuffer" || keyword == "ConsumeStructuredBuffer")
  964. return DxilResource::Kind::StructuredBuffer;
  965. // TODO: this is not efficient.
  966. bool isBuffer = keyword == "Buffer";
  967. isBuffer |= keyword == "RWBuffer";
  968. isBuffer |= keyword == "RasterizerOrderedBuffer";
  969. if (isBuffer)
  970. return DxilResource::Kind::TypedBuffer;
  971. return DxilResource::Kind::Invalid;
  972. }
  973. void CGMSHLSLRuntime::AddHLSLFunctionInfo(Function *F, const FunctionDecl *FD) {
  974. // Add hlsl intrinsic attr
  975. unsigned intrinsicOpcode;
  976. StringRef intrinsicGroup;
  977. if (hlsl::GetIntrinsicOp(FD, intrinsicOpcode, intrinsicGroup)) {
  978. AddHLSLIntrinsicOpcodeToFunction(F, intrinsicOpcode);
  979. F->addFnAttr(hlsl::HLPrefix, intrinsicGroup);
  980. // Save resource type annotation.
  981. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD)) {
  982. const CXXRecordDecl *RD = MD->getParent();
  983. // For nested case like sample_slice_type.
  984. if (const CXXRecordDecl *PRD =
  985. dyn_cast<CXXRecordDecl>(RD->getDeclContext())) {
  986. RD = PRD;
  987. }
  988. QualType recordTy = MD->getASTContext().getRecordType(RD);
  989. hlsl::DxilResourceBase::Class resClass = TypeToClass(recordTy);
  990. llvm::Type *Ty = CGM.getTypes().ConvertType(recordTy);
  991. llvm::FunctionType *FT = F->getFunctionType();
  992. // Save resource type metadata.
  993. switch (resClass) {
  994. case DXIL::ResourceClass::UAV: {
  995. MDNode *MD = GetOrAddResTypeMD(recordTy);
  996. DXASSERT(MD, "else invalid resource type");
  997. resMetadataMap[Ty] = MD;
  998. } break;
  999. case DXIL::ResourceClass::SRV: {
  1000. MDNode *Meta = GetOrAddResTypeMD(recordTy);
  1001. DXASSERT(Meta, "else invalid resource type");
  1002. resMetadataMap[Ty] = Meta;
  1003. if (FT->getNumParams() > 1) {
  1004. QualType paramTy = MD->getParamDecl(0)->getType();
  1005. // Add sampler type.
  1006. if (TypeToClass(paramTy) == DXIL::ResourceClass::Sampler) {
  1007. llvm::Type *Ty = FT->getParamType(1)->getPointerElementType();
  1008. MDNode *MD = GetOrAddResTypeMD(paramTy);
  1009. DXASSERT(MD, "else invalid resource type");
  1010. resMetadataMap[Ty] = MD;
  1011. }
  1012. }
  1013. } break;
  1014. default:
  1015. // Skip OutputStream for GS.
  1016. break;
  1017. }
  1018. }
  1019. StringRef lower;
  1020. if (hlsl::GetIntrinsicLowering(FD, lower))
  1021. hlsl::SetHLLowerStrategy(F, lower);
  1022. // Don't need to add FunctionQual for intrinsic function.
  1023. return;
  1024. }
  1025. if (m_pHLModule->GetFloat32DenormMode() == DXIL::Float32DenormMode::FTZ) {
  1026. F->addFnAttr(DXIL::kFP32DenormKindString, DXIL::kFP32DenormValueFtzString);
  1027. }
  1028. else if (m_pHLModule->GetFloat32DenormMode() == DXIL::Float32DenormMode::Preserve) {
  1029. F->addFnAttr(DXIL::kFP32DenormKindString, DXIL::kFP32DenormValuePreserveString);
  1030. }
  1031. else if (m_pHLModule->GetFloat32DenormMode() == DXIL::Float32DenormMode::Any) {
  1032. F->addFnAttr(DXIL::kFP32DenormKindString, DXIL::kFP32DenormValueAnyString);
  1033. }
  1034. // Set entry function
  1035. const std::string &entryName = m_pHLModule->GetEntryFunctionName();
  1036. bool isEntry = FD->getNameAsString() == entryName;
  1037. if (isEntry) {
  1038. Entry.Func = F;
  1039. Entry.SL = FD->getLocation();
  1040. }
  1041. DiagnosticsEngine &Diags = CGM.getDiags();
  1042. std::unique_ptr<DxilFunctionProps> funcProps =
  1043. llvm::make_unique<DxilFunctionProps>();
  1044. funcProps->shaderKind = DXIL::ShaderKind::Invalid;
  1045. bool isCS = false;
  1046. bool isGS = false;
  1047. bool isHS = false;
  1048. bool isDS = false;
  1049. bool isVS = false;
  1050. bool isPS = false;
  1051. if (const HLSLShaderAttr *Attr = FD->getAttr<HLSLShaderAttr>()) {
  1052. // Stage is already validate in HandleDeclAttributeForHLSL.
  1053. // Here just check first letter.
  1054. switch (Attr->getStage()[0]) {
  1055. case 'c':
  1056. isCS = true;
  1057. funcProps->shaderKind = DXIL::ShaderKind::Compute;
  1058. break;
  1059. case 'v':
  1060. isVS = true;
  1061. funcProps->shaderKind = DXIL::ShaderKind::Vertex;
  1062. break;
  1063. case 'h':
  1064. isHS = true;
  1065. funcProps->shaderKind = DXIL::ShaderKind::Hull;
  1066. break;
  1067. case 'd':
  1068. isDS = true;
  1069. funcProps->shaderKind = DXIL::ShaderKind::Domain;
  1070. break;
  1071. case 'g':
  1072. isGS = true;
  1073. funcProps->shaderKind = DXIL::ShaderKind::Geometry;
  1074. break;
  1075. case 'p':
  1076. isPS = true;
  1077. funcProps->shaderKind = DXIL::ShaderKind::Pixel;
  1078. break;
  1079. default: {
  1080. unsigned DiagID = Diags.getCustomDiagID(
  1081. DiagnosticsEngine::Error, "Invalid profile for shader attribute");
  1082. Diags.Report(Attr->getLocation(), DiagID);
  1083. } break;
  1084. }
  1085. }
  1086. // Save patch constant function to patchConstantFunctionMap.
  1087. bool isPatchConstantFunction = false;
  1088. if (!isEntry && CGM.getContext().IsPatchConstantFunctionDecl(FD)) {
  1089. isPatchConstantFunction = true;
  1090. auto &PCI = patchConstantFunctionMap[FD->getName()];
  1091. PCI.SL = FD->getLocation();
  1092. PCI.Func = F;
  1093. ++PCI.NumOverloads;
  1094. for (ParmVarDecl *parmDecl : FD->parameters()) {
  1095. QualType Ty = parmDecl->getType();
  1096. if (IsHLSLOutputPatchType(Ty)) {
  1097. funcProps->ShaderProps.HS.outputControlPoints =
  1098. GetHLSLOutputPatchCount(parmDecl->getType());
  1099. } else if (IsHLSLInputPatchType(Ty)) {
  1100. funcProps->ShaderProps.HS.inputControlPoints =
  1101. GetHLSLInputPatchCount(parmDecl->getType());
  1102. }
  1103. }
  1104. funcProps->shaderKind = DXIL::ShaderKind::Hull;
  1105. }
  1106. const ShaderModel *SM = m_pHLModule->GetShaderModel();
  1107. if (isEntry) {
  1108. funcProps->shaderKind = SM->GetKind();
  1109. }
  1110. // Geometry shader.
  1111. if (const HLSLMaxVertexCountAttr *Attr =
  1112. FD->getAttr<HLSLMaxVertexCountAttr>()) {
  1113. isGS = true;
  1114. funcProps->shaderKind = DXIL::ShaderKind::Geometry;
  1115. funcProps->ShaderProps.GS.maxVertexCount = Attr->getCount();
  1116. funcProps->ShaderProps.GS.inputPrimitive = DXIL::InputPrimitive::Undefined;
  1117. if (isEntry && !SM->IsGS()) {
  1118. unsigned DiagID =
  1119. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  1120. "attribute maxvertexcount only valid for GS.");
  1121. Diags.Report(Attr->getLocation(), DiagID);
  1122. return;
  1123. }
  1124. }
  1125. if (const HLSLInstanceAttr *Attr = FD->getAttr<HLSLInstanceAttr>()) {
  1126. unsigned instanceCount = Attr->getCount();
  1127. funcProps->ShaderProps.GS.instanceCount = instanceCount;
  1128. if (isEntry && !SM->IsGS()) {
  1129. unsigned DiagID =
  1130. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  1131. "attribute maxvertexcount only valid for GS.");
  1132. Diags.Report(Attr->getLocation(), DiagID);
  1133. return;
  1134. }
  1135. } else {
  1136. // Set default instance count.
  1137. if (isGS)
  1138. funcProps->ShaderProps.GS.instanceCount = 1;
  1139. }
  1140. // Computer shader.
  1141. if (const HLSLNumThreadsAttr *Attr = FD->getAttr<HLSLNumThreadsAttr>()) {
  1142. isCS = true;
  1143. funcProps->shaderKind = DXIL::ShaderKind::Compute;
  1144. funcProps->ShaderProps.CS.numThreads[0] = Attr->getX();
  1145. funcProps->ShaderProps.CS.numThreads[1] = Attr->getY();
  1146. funcProps->ShaderProps.CS.numThreads[2] = Attr->getZ();
  1147. if (isEntry && !SM->IsCS()) {
  1148. unsigned DiagID = Diags.getCustomDiagID(
  1149. DiagnosticsEngine::Error, "attribute numthreads only valid for CS.");
  1150. Diags.Report(Attr->getLocation(), DiagID);
  1151. return;
  1152. }
  1153. }
  1154. // Hull shader.
  1155. if (const HLSLPatchConstantFuncAttr *Attr =
  1156. FD->getAttr<HLSLPatchConstantFuncAttr>()) {
  1157. if (isEntry && !SM->IsHS()) {
  1158. unsigned DiagID = Diags.getCustomDiagID(
  1159. DiagnosticsEngine::Error,
  1160. "attribute patchconstantfunc only valid for HS.");
  1161. Diags.Report(Attr->getLocation(), DiagID);
  1162. return;
  1163. }
  1164. isHS = true;
  1165. funcProps->shaderKind = DXIL::ShaderKind::Hull;
  1166. HSEntryPatchConstantFuncAttr[F] = Attr;
  1167. } else {
  1168. // TODO: This is a duplicate check. We also have this check in
  1169. // hlsl::DiagnoseTranslationUnit(clang::Sema*).
  1170. if (isEntry && SM->IsHS()) {
  1171. unsigned DiagID = Diags.getCustomDiagID(
  1172. DiagnosticsEngine::Error,
  1173. "HS entry point must have the patchconstantfunc attribute");
  1174. Diags.Report(FD->getLocation(), DiagID);
  1175. return;
  1176. }
  1177. }
  1178. if (const HLSLOutputControlPointsAttr *Attr =
  1179. FD->getAttr<HLSLOutputControlPointsAttr>()) {
  1180. if (isHS) {
  1181. funcProps->ShaderProps.HS.outputControlPoints = Attr->getCount();
  1182. } else if (isEntry && !SM->IsHS()) {
  1183. unsigned DiagID = Diags.getCustomDiagID(
  1184. DiagnosticsEngine::Error,
  1185. "attribute outputcontrolpoints only valid for HS.");
  1186. Diags.Report(Attr->getLocation(), DiagID);
  1187. return;
  1188. }
  1189. }
  1190. if (const HLSLPartitioningAttr *Attr = FD->getAttr<HLSLPartitioningAttr>()) {
  1191. if (isHS) {
  1192. DXIL::TessellatorPartitioning partition =
  1193. StringToPartitioning(Attr->getScheme());
  1194. funcProps->ShaderProps.HS.partition = partition;
  1195. } else if (isEntry && !SM->IsHS()) {
  1196. unsigned DiagID =
  1197. Diags.getCustomDiagID(DiagnosticsEngine::Warning,
  1198. "attribute partitioning only valid for HS.");
  1199. Diags.Report(Attr->getLocation(), DiagID);
  1200. }
  1201. }
  1202. if (const HLSLOutputTopologyAttr *Attr =
  1203. FD->getAttr<HLSLOutputTopologyAttr>()) {
  1204. if (isHS) {
  1205. DXIL::TessellatorOutputPrimitive primitive =
  1206. StringToTessOutputPrimitive(Attr->getTopology());
  1207. funcProps->ShaderProps.HS.outputPrimitive = primitive;
  1208. } else if (isEntry && !SM->IsHS()) {
  1209. unsigned DiagID =
  1210. Diags.getCustomDiagID(DiagnosticsEngine::Warning,
  1211. "attribute outputtopology only valid for HS.");
  1212. Diags.Report(Attr->getLocation(), DiagID);
  1213. }
  1214. }
  1215. if (isHS) {
  1216. funcProps->ShaderProps.HS.maxTessFactor = DXIL::kHSMaxTessFactorUpperBound;
  1217. funcProps->ShaderProps.HS.inputControlPoints = DXIL::kHSDefaultInputControlPointCount;
  1218. }
  1219. if (const HLSLMaxTessFactorAttr *Attr =
  1220. FD->getAttr<HLSLMaxTessFactorAttr>()) {
  1221. if (isHS) {
  1222. // TODO: change getFactor to return float.
  1223. llvm::APInt intV(32, Attr->getFactor());
  1224. funcProps->ShaderProps.HS.maxTessFactor = intV.bitsToFloat();
  1225. } else if (isEntry && !SM->IsHS()) {
  1226. unsigned DiagID =
  1227. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  1228. "attribute maxtessfactor only valid for HS.");
  1229. Diags.Report(Attr->getLocation(), DiagID);
  1230. return;
  1231. }
  1232. }
  1233. // Hull or domain shader.
  1234. if (const HLSLDomainAttr *Attr = FD->getAttr<HLSLDomainAttr>()) {
  1235. if (isEntry && !SM->IsHS() && !SM->IsDS()) {
  1236. unsigned DiagID =
  1237. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  1238. "attribute domain only valid for HS or DS.");
  1239. Diags.Report(Attr->getLocation(), DiagID);
  1240. return;
  1241. }
  1242. isDS = !isHS;
  1243. if (isDS)
  1244. funcProps->shaderKind = DXIL::ShaderKind::Domain;
  1245. DXIL::TessellatorDomain domain = StringToDomain(Attr->getDomainType());
  1246. if (isHS)
  1247. funcProps->ShaderProps.HS.domain = domain;
  1248. else
  1249. funcProps->ShaderProps.DS.domain = domain;
  1250. }
  1251. // Vertex shader.
  1252. if (const HLSLClipPlanesAttr *Attr = FD->getAttr<HLSLClipPlanesAttr>()) {
  1253. if (isEntry && !SM->IsVS()) {
  1254. unsigned DiagID = Diags.getCustomDiagID(
  1255. DiagnosticsEngine::Error, "attribute clipplane only valid for VS.");
  1256. Diags.Report(Attr->getLocation(), DiagID);
  1257. return;
  1258. }
  1259. isVS = true;
  1260. // The real job is done at EmitHLSLFunctionProlog where debug info is
  1261. // available. Only set shader kind here.
  1262. funcProps->shaderKind = DXIL::ShaderKind::Vertex;
  1263. }
  1264. // Pixel shader.
  1265. if (const HLSLEarlyDepthStencilAttr *Attr =
  1266. FD->getAttr<HLSLEarlyDepthStencilAttr>()) {
  1267. if (isEntry && !SM->IsPS()) {
  1268. unsigned DiagID = Diags.getCustomDiagID(
  1269. DiagnosticsEngine::Error,
  1270. "attribute earlydepthstencil only valid for PS.");
  1271. Diags.Report(Attr->getLocation(), DiagID);
  1272. return;
  1273. }
  1274. isPS = true;
  1275. funcProps->ShaderProps.PS.EarlyDepthStencil = true;
  1276. funcProps->shaderKind = DXIL::ShaderKind::Pixel;
  1277. }
  1278. const unsigned profileAttributes = isCS + isHS + isDS + isGS + isVS + isPS;
  1279. // TODO: check this in front-end and report error.
  1280. DXASSERT(profileAttributes < 2, "profile attributes are mutual exclusive");
  1281. if (isEntry) {
  1282. switch (funcProps->shaderKind) {
  1283. case ShaderModel::Kind::Compute:
  1284. case ShaderModel::Kind::Hull:
  1285. case ShaderModel::Kind::Domain:
  1286. case ShaderModel::Kind::Geometry:
  1287. case ShaderModel::Kind::Vertex:
  1288. case ShaderModel::Kind::Pixel:
  1289. DXASSERT(funcProps->shaderKind == SM->GetKind(),
  1290. "attribute profile not match entry function profile");
  1291. break;
  1292. case ShaderModel::Kind::Library:
  1293. case ShaderModel::Kind::Invalid:
  1294. // Non-shader stage shadermodels don't have entry points.
  1295. break;
  1296. }
  1297. }
  1298. DxilFunctionAnnotation *FuncAnnotation =
  1299. m_pHLModule->AddFunctionAnnotation(F);
  1300. bool bDefaultRowMajor = m_pHLModule->GetHLOptions().bDefaultRowMajor;
  1301. // Param Info
  1302. unsigned streamIndex = 0;
  1303. unsigned inputPatchCount = 0;
  1304. unsigned outputPatchCount = 0;
  1305. unsigned ArgNo = 0;
  1306. unsigned ParmIdx = 0;
  1307. if (const CXXMethodDecl *MethodDecl = dyn_cast<CXXMethodDecl>(FD)) {
  1308. QualType ThisTy = MethodDecl->getThisType(FD->getASTContext());
  1309. DxilParameterAnnotation &paramAnnotation =
  1310. FuncAnnotation->GetParameterAnnotation(ArgNo++);
  1311. // Construct annoation for this pointer.
  1312. ConstructFieldAttributedAnnotation(paramAnnotation, ThisTy,
  1313. bDefaultRowMajor);
  1314. }
  1315. // Ret Info
  1316. QualType retTy = FD->getReturnType();
  1317. DxilParameterAnnotation *pRetTyAnnotation = nullptr;
  1318. if (F->getReturnType()->isVoidTy() && !retTy->isVoidType()) {
  1319. // SRet.
  1320. pRetTyAnnotation = &FuncAnnotation->GetParameterAnnotation(ArgNo++);
  1321. } else {
  1322. pRetTyAnnotation = &FuncAnnotation->GetRetTypeAnnotation();
  1323. }
  1324. DxilParameterAnnotation &retTyAnnotation = *pRetTyAnnotation;
  1325. // keep Undefined here, we cannot decide for struct
  1326. retTyAnnotation.SetInterpolationMode(
  1327. GetInterpMode(FD, CompType::Kind::Invalid, /*bKeepUndefined*/ true)
  1328. .GetKind());
  1329. SourceLocation retTySemanticLoc = SetSemantic(FD, retTyAnnotation);
  1330. retTyAnnotation.SetParamInputQual(DxilParamInputQual::Out);
  1331. if (isEntry) {
  1332. CheckParameterAnnotation(retTySemanticLoc, retTyAnnotation,
  1333. /*isPatchConstantFunction*/ false);
  1334. }
  1335. ConstructFieldAttributedAnnotation(retTyAnnotation, retTy, bDefaultRowMajor);
  1336. if (FD->hasAttr<HLSLPreciseAttr>())
  1337. retTyAnnotation.SetPrecise();
  1338. for (; ArgNo < F->arg_size(); ++ArgNo, ++ParmIdx) {
  1339. DxilParameterAnnotation &paramAnnotation =
  1340. FuncAnnotation->GetParameterAnnotation(ArgNo);
  1341. const ParmVarDecl *parmDecl = FD->getParamDecl(ParmIdx);
  1342. ConstructFieldAttributedAnnotation(paramAnnotation, parmDecl->getType(),
  1343. bDefaultRowMajor);
  1344. if (parmDecl->hasAttr<HLSLPreciseAttr>())
  1345. paramAnnotation.SetPrecise();
  1346. // keep Undefined here, we cannot decide for struct
  1347. InterpolationMode paramIM =
  1348. GetInterpMode(parmDecl, CompType::Kind::Invalid, KeepUndefinedTrue);
  1349. paramAnnotation.SetInterpolationMode(paramIM);
  1350. SourceLocation paramSemanticLoc = SetSemantic(parmDecl, paramAnnotation);
  1351. DxilParamInputQual dxilInputQ = DxilParamInputQual::In;
  1352. if (parmDecl->hasAttr<HLSLInOutAttr>())
  1353. dxilInputQ = DxilParamInputQual::Inout;
  1354. else if (parmDecl->hasAttr<HLSLOutAttr>())
  1355. dxilInputQ = DxilParamInputQual::Out;
  1356. if (parmDecl->hasAttr<HLSLOutAttr>() && parmDecl->hasAttr<HLSLInAttr>())
  1357. dxilInputQ = DxilParamInputQual::Inout;
  1358. DXIL::InputPrimitive inputPrimitive = DXIL::InputPrimitive::Undefined;
  1359. if (IsHLSLOutputPatchType(parmDecl->getType())) {
  1360. outputPatchCount++;
  1361. if (dxilInputQ != DxilParamInputQual::In) {
  1362. unsigned DiagID = Diags.getCustomDiagID(
  1363. DiagnosticsEngine::Error,
  1364. "OutputPatch should not be out/inout parameter");
  1365. Diags.Report(parmDecl->getLocation(), DiagID);
  1366. continue;
  1367. }
  1368. dxilInputQ = DxilParamInputQual::OutputPatch;
  1369. if (isDS)
  1370. funcProps->ShaderProps.DS.inputControlPoints =
  1371. GetHLSLOutputPatchCount(parmDecl->getType());
  1372. } else if (IsHLSLInputPatchType(parmDecl->getType())) {
  1373. inputPatchCount++;
  1374. if (dxilInputQ != DxilParamInputQual::In) {
  1375. unsigned DiagID = Diags.getCustomDiagID(
  1376. DiagnosticsEngine::Error,
  1377. "InputPatch should not be out/inout parameter");
  1378. Diags.Report(parmDecl->getLocation(), DiagID);
  1379. continue;
  1380. }
  1381. dxilInputQ = DxilParamInputQual::InputPatch;
  1382. if (isHS) {
  1383. funcProps->ShaderProps.HS.inputControlPoints =
  1384. GetHLSLInputPatchCount(parmDecl->getType());
  1385. } else if (isGS) {
  1386. inputPrimitive = (DXIL::InputPrimitive)(
  1387. (unsigned)DXIL::InputPrimitive::ControlPointPatch1 +
  1388. GetHLSLInputPatchCount(parmDecl->getType()) - 1);
  1389. }
  1390. } else if (IsHLSLStreamOutputType(parmDecl->getType())) {
  1391. // TODO: validation this at ASTContext::getFunctionType in
  1392. // AST/ASTContext.cpp
  1393. DXASSERT(dxilInputQ == DxilParamInputQual::Inout,
  1394. "stream output parameter must be inout");
  1395. switch (streamIndex) {
  1396. case 0:
  1397. dxilInputQ = DxilParamInputQual::OutStream0;
  1398. break;
  1399. case 1:
  1400. dxilInputQ = DxilParamInputQual::OutStream1;
  1401. break;
  1402. case 2:
  1403. dxilInputQ = DxilParamInputQual::OutStream2;
  1404. break;
  1405. case 3:
  1406. default:
  1407. // TODO: validation this at ASTContext::getFunctionType in
  1408. // AST/ASTContext.cpp
  1409. DXASSERT(streamIndex == 3, "stream number out of bound");
  1410. dxilInputQ = DxilParamInputQual::OutStream3;
  1411. break;
  1412. }
  1413. DXIL::PrimitiveTopology &streamTopology =
  1414. funcProps->ShaderProps.GS.streamPrimitiveTopologies[streamIndex];
  1415. if (IsHLSLPointStreamType(parmDecl->getType()))
  1416. streamTopology = DXIL::PrimitiveTopology::PointList;
  1417. else if (IsHLSLLineStreamType(parmDecl->getType()))
  1418. streamTopology = DXIL::PrimitiveTopology::LineStrip;
  1419. else {
  1420. DXASSERT(IsHLSLTriangleStreamType(parmDecl->getType()),
  1421. "invalid StreamType");
  1422. streamTopology = DXIL::PrimitiveTopology::TriangleStrip;
  1423. }
  1424. if (streamIndex > 0) {
  1425. bool bAllPoint =
  1426. streamTopology == DXIL::PrimitiveTopology::PointList &&
  1427. funcProps->ShaderProps.GS.streamPrimitiveTopologies[0] ==
  1428. DXIL::PrimitiveTopology::PointList;
  1429. if (!bAllPoint) {
  1430. DiagnosticsEngine &Diags = CGM.getDiags();
  1431. unsigned DiagID = Diags.getCustomDiagID(
  1432. DiagnosticsEngine::Error, "when multiple GS output streams are "
  1433. "used they must be pointlists.");
  1434. Diags.Report(FD->getLocation(), DiagID);
  1435. }
  1436. }
  1437. streamIndex++;
  1438. }
  1439. unsigned GsInputArrayDim = 0;
  1440. if (parmDecl->hasAttr<HLSLTriangleAttr>()) {
  1441. inputPrimitive = DXIL::InputPrimitive::Triangle;
  1442. GsInputArrayDim = 3;
  1443. } else if (parmDecl->hasAttr<HLSLTriangleAdjAttr>()) {
  1444. inputPrimitive = DXIL::InputPrimitive::TriangleWithAdjacency;
  1445. GsInputArrayDim = 6;
  1446. } else if (parmDecl->hasAttr<HLSLPointAttr>()) {
  1447. inputPrimitive = DXIL::InputPrimitive::Point;
  1448. GsInputArrayDim = 1;
  1449. } else if (parmDecl->hasAttr<HLSLLineAdjAttr>()) {
  1450. inputPrimitive = DXIL::InputPrimitive::LineWithAdjacency;
  1451. GsInputArrayDim = 4;
  1452. } else if (parmDecl->hasAttr<HLSLLineAttr>()) {
  1453. inputPrimitive = DXIL::InputPrimitive::Line;
  1454. GsInputArrayDim = 2;
  1455. }
  1456. if (inputPrimitive != DXIL::InputPrimitive::Undefined) {
  1457. // Set to InputPrimitive for GS.
  1458. dxilInputQ = DxilParamInputQual::InputPrimitive;
  1459. if (funcProps->ShaderProps.GS.inputPrimitive ==
  1460. DXIL::InputPrimitive::Undefined) {
  1461. funcProps->ShaderProps.GS.inputPrimitive = inputPrimitive;
  1462. } else if (funcProps->ShaderProps.GS.inputPrimitive != inputPrimitive) {
  1463. DiagnosticsEngine &Diags = CGM.getDiags();
  1464. unsigned DiagID = Diags.getCustomDiagID(
  1465. DiagnosticsEngine::Error, "input parameter conflicts with geometry "
  1466. "specifier of previous input parameters");
  1467. Diags.Report(parmDecl->getLocation(), DiagID);
  1468. }
  1469. }
  1470. if (GsInputArrayDim != 0) {
  1471. QualType Ty = parmDecl->getType();
  1472. if (!Ty->isConstantArrayType()) {
  1473. DiagnosticsEngine &Diags = CGM.getDiags();
  1474. unsigned DiagID = Diags.getCustomDiagID(
  1475. DiagnosticsEngine::Error,
  1476. "input types for geometry shader must be constant size arrays");
  1477. Diags.Report(parmDecl->getLocation(), DiagID);
  1478. } else {
  1479. const ConstantArrayType *CAT = cast<ConstantArrayType>(Ty);
  1480. if (CAT->getSize().getLimitedValue() != GsInputArrayDim) {
  1481. StringRef primtiveNames[] = {
  1482. "invalid", // 0
  1483. "point", // 1
  1484. "line", // 2
  1485. "triangle", // 3
  1486. "lineadj", // 4
  1487. "invalid", // 5
  1488. "triangleadj", // 6
  1489. };
  1490. DXASSERT(GsInputArrayDim < llvm::array_lengthof(primtiveNames),
  1491. "Invalid array dim");
  1492. DiagnosticsEngine &Diags = CGM.getDiags();
  1493. unsigned DiagID = Diags.getCustomDiagID(
  1494. DiagnosticsEngine::Error, "array dimension for %0 must be %1");
  1495. Diags.Report(parmDecl->getLocation(), DiagID)
  1496. << primtiveNames[GsInputArrayDim] << GsInputArrayDim;
  1497. }
  1498. }
  1499. }
  1500. paramAnnotation.SetParamInputQual(dxilInputQ);
  1501. if (isEntry) {
  1502. CheckParameterAnnotation(paramSemanticLoc, paramAnnotation,
  1503. /*isPatchConstantFunction*/ false);
  1504. }
  1505. }
  1506. if (inputPatchCount > 1) {
  1507. DiagnosticsEngine &Diags = CGM.getDiags();
  1508. unsigned DiagID = Diags.getCustomDiagID(
  1509. DiagnosticsEngine::Error, "may only have one InputPatch parameter");
  1510. Diags.Report(FD->getLocation(), DiagID);
  1511. }
  1512. if (outputPatchCount > 1) {
  1513. DiagnosticsEngine &Diags = CGM.getDiags();
  1514. unsigned DiagID = Diags.getCustomDiagID(
  1515. DiagnosticsEngine::Error, "may only have one OutputPatch parameter");
  1516. Diags.Report(FD->getLocation(), DiagID);
  1517. }
  1518. // Type annotation for parameters and return type.
  1519. DxilTypeSystem &dxilTypeSys = m_pHLModule->GetTypeSystem();
  1520. unsigned arrayEltSize = 0;
  1521. AddTypeAnnotation(FD->getReturnType(), dxilTypeSys, arrayEltSize);
  1522. // Type annotation for this pointer.
  1523. if (const CXXMethodDecl *MFD = dyn_cast<CXXMethodDecl>(FD)) {
  1524. const CXXRecordDecl *RD = MFD->getParent();
  1525. QualType Ty = CGM.getContext().getTypeDeclType(RD);
  1526. AddTypeAnnotation(Ty, dxilTypeSys, arrayEltSize);
  1527. }
  1528. for (const ValueDecl *param : FD->params()) {
  1529. QualType Ty = param->getType();
  1530. AddTypeAnnotation(Ty, dxilTypeSys, arrayEltSize);
  1531. }
  1532. // Only add functionProps when exist.
  1533. if (profileAttributes || isEntry)
  1534. m_pHLModule->AddDxilFunctionProps(F, funcProps);
  1535. if (isPatchConstantFunction)
  1536. patchConstantFunctionPropsMap[F] = std::move(funcProps);
  1537. // Save F to entry map.
  1538. if (profileAttributes) {
  1539. if (entryFunctionMap.count(FD->getName())) {
  1540. DiagnosticsEngine &Diags = CGM.getDiags();
  1541. unsigned DiagID = Diags.getCustomDiagID(
  1542. DiagnosticsEngine::Error,
  1543. "redefinition of %0");
  1544. Diags.Report(FD->getLocStart(), DiagID) << FD->getName();
  1545. }
  1546. auto &Entry = entryFunctionMap[FD->getNameAsString()];
  1547. Entry.SL = FD->getLocation();
  1548. Entry.Func= F;
  1549. }
  1550. // Add target-dependent experimental function attributes
  1551. for (const auto &Attr : FD->specific_attrs<HLSLExperimentalAttr>()) {
  1552. F->addFnAttr(Twine("exp-", Attr->getName()).str(), Attr->getValue());
  1553. }
  1554. }
  1555. void CGMSHLSLRuntime::EmitHLSLFunctionProlog(Function *F, const FunctionDecl *FD) {
  1556. // Support clip plane need debug info which not available when create function attribute.
  1557. if (const HLSLClipPlanesAttr *Attr = FD->getAttr<HLSLClipPlanesAttr>()) {
  1558. DxilFunctionProps &funcProps = m_pHLModule->GetDxilFunctionProps(F);
  1559. // Initialize to null.
  1560. memset(funcProps.ShaderProps.VS.clipPlanes, 0, sizeof(funcProps.ShaderProps.VS.clipPlanes));
  1561. // Create global for each clip plane, and use the clip plane val as init val.
  1562. auto AddClipPlane = [&](Expr *clipPlane, unsigned idx) {
  1563. if (DeclRefExpr *decl = dyn_cast<DeclRefExpr>(clipPlane)) {
  1564. const VarDecl *VD = cast<VarDecl>(decl->getDecl());
  1565. Constant *clipPlaneVal = CGM.GetAddrOfGlobalVar(VD);
  1566. funcProps.ShaderProps.VS.clipPlanes[idx] = clipPlaneVal;
  1567. if (m_bDebugInfo) {
  1568. CodeGenFunction CGF(CGM);
  1569. ApplyDebugLocation applyDebugLoc(CGF, clipPlane);
  1570. debugInfoMap[clipPlaneVal] = CGF.Builder.getCurrentDebugLocation();
  1571. }
  1572. } else {
  1573. // Must be a MemberExpr.
  1574. const MemberExpr *ME = cast<MemberExpr>(clipPlane);
  1575. CodeGenFunction CGF(CGM);
  1576. CodeGen::LValue LV = CGF.EmitMemberExpr(ME);
  1577. Value *addr = LV.getAddress();
  1578. funcProps.ShaderProps.VS.clipPlanes[idx] = cast<Constant>(addr);
  1579. if (m_bDebugInfo) {
  1580. CodeGenFunction CGF(CGM);
  1581. ApplyDebugLocation applyDebugLoc(CGF, clipPlane);
  1582. debugInfoMap[addr] = CGF.Builder.getCurrentDebugLocation();
  1583. }
  1584. }
  1585. };
  1586. if (Expr *clipPlane = Attr->getClipPlane1())
  1587. AddClipPlane(clipPlane, 0);
  1588. if (Expr *clipPlane = Attr->getClipPlane2())
  1589. AddClipPlane(clipPlane, 1);
  1590. if (Expr *clipPlane = Attr->getClipPlane3())
  1591. AddClipPlane(clipPlane, 2);
  1592. if (Expr *clipPlane = Attr->getClipPlane4())
  1593. AddClipPlane(clipPlane, 3);
  1594. if (Expr *clipPlane = Attr->getClipPlane5())
  1595. AddClipPlane(clipPlane, 4);
  1596. if (Expr *clipPlane = Attr->getClipPlane6())
  1597. AddClipPlane(clipPlane, 5);
  1598. clipPlaneFuncList.emplace_back(F);
  1599. }
  1600. }
  1601. void CGMSHLSLRuntime::AddControlFlowHint(CodeGenFunction &CGF, const Stmt &S,
  1602. llvm::TerminatorInst *TI,
  1603. ArrayRef<const Attr *> Attrs) {
  1604. // Build hints.
  1605. bool bNoBranchFlatten = true;
  1606. bool bBranch = false;
  1607. bool bFlatten = false;
  1608. std::vector<DXIL::ControlFlowHint> hints;
  1609. for (const auto *Attr : Attrs) {
  1610. if (isa<HLSLBranchAttr>(Attr)) {
  1611. hints.emplace_back(DXIL::ControlFlowHint::Branch);
  1612. bNoBranchFlatten = false;
  1613. bBranch = true;
  1614. }
  1615. else if (isa<HLSLFlattenAttr>(Attr)) {
  1616. hints.emplace_back(DXIL::ControlFlowHint::Flatten);
  1617. bNoBranchFlatten = false;
  1618. bFlatten = true;
  1619. } else if (isa<HLSLForceCaseAttr>(Attr)) {
  1620. if (isa<SwitchStmt>(&S)) {
  1621. hints.emplace_back(DXIL::ControlFlowHint::ForceCase);
  1622. }
  1623. }
  1624. // Ignore fastopt, allow_uav_condition and call for now.
  1625. }
  1626. if (bNoBranchFlatten) {
  1627. // CHECK control flow option.
  1628. if (CGF.CGM.getCodeGenOpts().HLSLPreferControlFlow)
  1629. hints.emplace_back(DXIL::ControlFlowHint::Branch);
  1630. else if (CGF.CGM.getCodeGenOpts().HLSLAvoidControlFlow)
  1631. hints.emplace_back(DXIL::ControlFlowHint::Flatten);
  1632. }
  1633. if (bFlatten && bBranch) {
  1634. DiagnosticsEngine &Diags = CGM.getDiags();
  1635. unsigned DiagID = Diags.getCustomDiagID(
  1636. DiagnosticsEngine::Error,
  1637. "can't use branch and flatten attributes together");
  1638. Diags.Report(S.getLocStart(), DiagID);
  1639. }
  1640. if (hints.size()) {
  1641. // Add meta data to the instruction.
  1642. MDNode *hintsNode = DxilMDHelper::EmitControlFlowHints(Context, hints);
  1643. TI->setMetadata(DxilMDHelper::kDxilControlFlowHintMDName, hintsNode);
  1644. }
  1645. }
  1646. void CGMSHLSLRuntime::FinishAutoVar(CodeGenFunction &CGF, const VarDecl &D, llvm::Value *V) {
  1647. if (D.hasAttr<HLSLPreciseAttr>()) {
  1648. AllocaInst *AI = cast<AllocaInst>(V);
  1649. HLModule::MarkPreciseAttributeWithMetadata(AI);
  1650. }
  1651. // Add type annotation for local variable.
  1652. DxilTypeSystem &typeSys = m_pHLModule->GetTypeSystem();
  1653. unsigned arrayEltSize = 0;
  1654. AddTypeAnnotation(D.getType(), typeSys, arrayEltSize);
  1655. }
  1656. hlsl::InterpolationMode CGMSHLSLRuntime::GetInterpMode(const Decl *decl,
  1657. CompType compType,
  1658. bool bKeepUndefined) {
  1659. InterpolationMode Interp(
  1660. decl->hasAttr<HLSLNoInterpolationAttr>(), decl->hasAttr<HLSLLinearAttr>(),
  1661. decl->hasAttr<HLSLNoPerspectiveAttr>(), decl->hasAttr<HLSLCentroidAttr>(),
  1662. decl->hasAttr<HLSLSampleAttr>());
  1663. DXASSERT(Interp.IsValid(), "otherwise front-end missing validation");
  1664. if (Interp.IsUndefined() && !bKeepUndefined) {
  1665. // Type-based default: linear for floats, constant for others.
  1666. if (compType.IsFloatTy())
  1667. Interp = InterpolationMode::Kind::Linear;
  1668. else
  1669. Interp = InterpolationMode::Kind::Constant;
  1670. }
  1671. return Interp;
  1672. }
  1673. hlsl::CompType CGMSHLSLRuntime::GetCompType(const BuiltinType *BT) {
  1674. hlsl::CompType ElementType = hlsl::CompType::getInvalid();
  1675. switch (BT->getKind()) {
  1676. case BuiltinType::Bool:
  1677. ElementType = hlsl::CompType::getI1();
  1678. break;
  1679. case BuiltinType::Double:
  1680. ElementType = hlsl::CompType::getF64();
  1681. break;
  1682. case BuiltinType::HalfFloat: // HLSL Change
  1683. case BuiltinType::Float:
  1684. ElementType = hlsl::CompType::getF32();
  1685. break;
  1686. // HLSL Changes begin
  1687. case BuiltinType::Min10Float:
  1688. case BuiltinType::Min16Float:
  1689. // HLSL Changes end
  1690. case BuiltinType::Half:
  1691. ElementType = hlsl::CompType::getF16();
  1692. break;
  1693. case BuiltinType::Int:
  1694. ElementType = hlsl::CompType::getI32();
  1695. break;
  1696. case BuiltinType::LongLong:
  1697. ElementType = hlsl::CompType::getI64();
  1698. break;
  1699. // HLSL Changes begin
  1700. case BuiltinType::Min12Int:
  1701. case BuiltinType::Min16Int:
  1702. // HLSL Changes end
  1703. case BuiltinType::Short:
  1704. ElementType = hlsl::CompType::getI16();
  1705. break;
  1706. case BuiltinType::UInt:
  1707. ElementType = hlsl::CompType::getU32();
  1708. break;
  1709. case BuiltinType::ULongLong:
  1710. ElementType = hlsl::CompType::getU64();
  1711. break;
  1712. case BuiltinType::Min16UInt: // HLSL Change
  1713. case BuiltinType::UShort:
  1714. ElementType = hlsl::CompType::getU16();
  1715. break;
  1716. default:
  1717. llvm_unreachable("unsupported type");
  1718. break;
  1719. }
  1720. return ElementType;
  1721. }
  1722. /// Add resouce to the program
  1723. void CGMSHLSLRuntime::addResource(Decl *D) {
  1724. if (HLSLBufferDecl *BD = dyn_cast<HLSLBufferDecl>(D))
  1725. GetOrCreateCBuffer(BD);
  1726. else if (VarDecl *VD = dyn_cast<VarDecl>(D)) {
  1727. hlsl::DxilResourceBase::Class resClass = TypeToClass(VD->getType());
  1728. // skip decl has init which is resource.
  1729. if (VD->hasInit() && resClass != DXIL::ResourceClass::Invalid)
  1730. return;
  1731. // skip static global.
  1732. if (!VD->hasExternalFormalLinkage()) {
  1733. if (VD->hasInit() && VD->getType().isConstQualified()) {
  1734. Expr* InitExp = VD->getInit();
  1735. GlobalVariable *GV = cast<GlobalVariable>(CGM.GetAddrOfGlobalVar(VD));
  1736. // Only save const static global of struct type.
  1737. if (GV->getType()->getElementType()->isStructTy()) {
  1738. staticConstGlobalInitMap[InitExp] = GV;
  1739. }
  1740. }
  1741. return;
  1742. }
  1743. if (D->hasAttr<HLSLGroupSharedAttr>()) {
  1744. GlobalVariable *GV = cast<GlobalVariable>(CGM.GetAddrOfGlobalVar(VD));
  1745. m_pHLModule->AddGroupSharedVariable(GV);
  1746. return;
  1747. }
  1748. switch (resClass) {
  1749. case hlsl::DxilResourceBase::Class::Sampler:
  1750. AddSampler(VD);
  1751. break;
  1752. case hlsl::DxilResourceBase::Class::UAV:
  1753. case hlsl::DxilResourceBase::Class::SRV:
  1754. AddUAVSRV(VD, resClass);
  1755. break;
  1756. case hlsl::DxilResourceBase::Class::Invalid: {
  1757. // normal global constant, add to global CB
  1758. HLCBuffer &globalCB = GetGlobalCBuffer();
  1759. AddConstant(VD, globalCB);
  1760. break;
  1761. }
  1762. case DXIL::ResourceClass::CBuffer:
  1763. DXASSERT(0, "cbuffer should not be here");
  1764. break;
  1765. }
  1766. }
  1767. }
  1768. // TODO: collect such helper utility functions in one place.
  1769. static DxilResourceBase::Class KeywordToClass(const std::string &keyword) {
  1770. // TODO: refactor for faster search (switch by 1/2/3 first letters, then
  1771. // compare)
  1772. if (keyword == "SamplerState")
  1773. return DxilResourceBase::Class::Sampler;
  1774. if (keyword == "SamplerComparisonState")
  1775. return DxilResourceBase::Class::Sampler;
  1776. if (keyword == "ConstantBuffer")
  1777. return DxilResourceBase::Class::CBuffer;
  1778. if (keyword == "TextureBuffer")
  1779. return DxilResourceBase::Class::SRV;
  1780. bool isSRV = keyword == "Buffer";
  1781. isSRV |= keyword == "ByteAddressBuffer";
  1782. isSRV |= keyword == "StructuredBuffer";
  1783. isSRV |= keyword == "Texture1D";
  1784. isSRV |= keyword == "Texture1DArray";
  1785. isSRV |= keyword == "Texture2D";
  1786. isSRV |= keyword == "Texture2DArray";
  1787. isSRV |= keyword == "Texture3D";
  1788. isSRV |= keyword == "TextureCube";
  1789. isSRV |= keyword == "TextureCubeArray";
  1790. isSRV |= keyword == "Texture2DMS";
  1791. isSRV |= keyword == "Texture2DMSArray";
  1792. if (isSRV)
  1793. return DxilResourceBase::Class::SRV;
  1794. bool isUAV = keyword == "RWBuffer";
  1795. isUAV |= keyword == "RWByteAddressBuffer";
  1796. isUAV |= keyword == "RWStructuredBuffer";
  1797. isUAV |= keyword == "RWTexture1D";
  1798. isUAV |= keyword == "RWTexture1DArray";
  1799. isUAV |= keyword == "RWTexture2D";
  1800. isUAV |= keyword == "RWTexture2DArray";
  1801. isUAV |= keyword == "RWTexture3D";
  1802. isUAV |= keyword == "RWTextureCube";
  1803. isUAV |= keyword == "RWTextureCubeArray";
  1804. isUAV |= keyword == "RWTexture2DMS";
  1805. isUAV |= keyword == "RWTexture2DMSArray";
  1806. isUAV |= keyword == "AppendStructuredBuffer";
  1807. isUAV |= keyword == "ConsumeStructuredBuffer";
  1808. isUAV |= keyword == "RasterizerOrderedBuffer";
  1809. isUAV |= keyword == "RasterizerOrderedByteAddressBuffer";
  1810. isUAV |= keyword == "RasterizerOrderedStructuredBuffer";
  1811. isUAV |= keyword == "RasterizerOrderedTexture1D";
  1812. isUAV |= keyword == "RasterizerOrderedTexture1DArray";
  1813. isUAV |= keyword == "RasterizerOrderedTexture2D";
  1814. isUAV |= keyword == "RasterizerOrderedTexture2DArray";
  1815. isUAV |= keyword == "RasterizerOrderedTexture3D";
  1816. if (isUAV)
  1817. return DxilResourceBase::Class::UAV;
  1818. return DxilResourceBase::Class::Invalid;
  1819. }
  1820. // This should probably be refactored to ASTContextHLSL, and follow types
  1821. // rather than do string comparisons.
  1822. DXIL::ResourceClass
  1823. hlsl::GetResourceClassForType(const clang::ASTContext &context,
  1824. clang::QualType Ty) {
  1825. Ty = Ty.getCanonicalType();
  1826. if (const clang::ArrayType *arrayType = context.getAsArrayType(Ty)) {
  1827. return GetResourceClassForType(context, arrayType->getElementType());
  1828. } else if (const RecordType *RT = Ty->getAsStructureType()) {
  1829. return KeywordToClass(RT->getDecl()->getName());
  1830. } else if (const RecordType *RT = Ty->getAs<RecordType>()) {
  1831. if (const ClassTemplateSpecializationDecl *templateDecl =
  1832. dyn_cast<ClassTemplateSpecializationDecl>(RT->getDecl())) {
  1833. return KeywordToClass(templateDecl->getName());
  1834. }
  1835. }
  1836. return hlsl::DxilResourceBase::Class::Invalid;
  1837. }
  1838. hlsl::DxilResourceBase::Class CGMSHLSLRuntime::TypeToClass(clang::QualType Ty) {
  1839. return hlsl::GetResourceClassForType(CGM.getContext(), Ty);
  1840. }
  1841. uint32_t CGMSHLSLRuntime::AddSampler(VarDecl *samplerDecl) {
  1842. llvm::Constant *val = CGM.GetAddrOfGlobalVar(samplerDecl);
  1843. unique_ptr<DxilSampler> hlslRes(new DxilSampler);
  1844. hlslRes->SetLowerBound(UINT_MAX);
  1845. hlslRes->SetGlobalSymbol(cast<llvm::GlobalVariable>(val));
  1846. hlslRes->SetGlobalName(samplerDecl->getName());
  1847. QualType VarTy = samplerDecl->getType();
  1848. if (const clang::ArrayType *arrayType =
  1849. CGM.getContext().getAsArrayType(VarTy)) {
  1850. if (arrayType->isConstantArrayType()) {
  1851. uint32_t arraySize =
  1852. cast<ConstantArrayType>(arrayType)->getSize().getLimitedValue();
  1853. hlslRes->SetRangeSize(arraySize);
  1854. } else {
  1855. hlslRes->SetRangeSize(UINT_MAX);
  1856. }
  1857. // use elementTy
  1858. VarTy = arrayType->getElementType();
  1859. // Support more dim.
  1860. while (const clang::ArrayType *arrayType =
  1861. CGM.getContext().getAsArrayType(VarTy)) {
  1862. unsigned rangeSize = hlslRes->GetRangeSize();
  1863. if (arrayType->isConstantArrayType()) {
  1864. uint32_t arraySize =
  1865. cast<ConstantArrayType>(arrayType)->getSize().getLimitedValue();
  1866. if (rangeSize != UINT_MAX)
  1867. hlslRes->SetRangeSize(rangeSize * arraySize);
  1868. } else
  1869. hlslRes->SetRangeSize(UINT_MAX);
  1870. // use elementTy
  1871. VarTy = arrayType->getElementType();
  1872. }
  1873. } else
  1874. hlslRes->SetRangeSize(1);
  1875. const RecordType *RT = VarTy->getAs<RecordType>();
  1876. DxilSampler::SamplerKind kind = KeywordToSamplerKind(RT->getDecl()->getName());
  1877. hlslRes->SetSamplerKind(kind);
  1878. for (hlsl::UnusualAnnotation *it : samplerDecl->getUnusualAnnotations()) {
  1879. switch (it->getKind()) {
  1880. case hlsl::UnusualAnnotation::UA_RegisterAssignment: {
  1881. hlsl::RegisterAssignment *ra = cast<hlsl::RegisterAssignment>(it);
  1882. hlslRes->SetLowerBound(ra->RegisterNumber);
  1883. hlslRes->SetSpaceID(ra->RegisterSpace);
  1884. break;
  1885. }
  1886. case hlsl::UnusualAnnotation::UA_SemanticDecl:
  1887. // Ignore Semantics
  1888. break;
  1889. case hlsl::UnusualAnnotation::UA_ConstantPacking:
  1890. // Should be handled by front-end
  1891. llvm_unreachable("packoffset on sampler");
  1892. break;
  1893. default:
  1894. llvm_unreachable("unknown UnusualAnnotation on sampler");
  1895. break;
  1896. }
  1897. }
  1898. hlslRes->SetID(m_pHLModule->GetSamplers().size());
  1899. return m_pHLModule->AddSampler(std::move(hlslRes));
  1900. }
  1901. static void CollectScalarTypes(std::vector<QualType> &ScalarTys, QualType Ty) {
  1902. if (Ty->isRecordType()) {
  1903. if (hlsl::IsHLSLMatType(Ty)) {
  1904. QualType EltTy = hlsl::GetHLSLMatElementType(Ty);
  1905. unsigned row = 0;
  1906. unsigned col = 0;
  1907. hlsl::GetRowsAndCols(Ty, row, col);
  1908. unsigned size = col*row;
  1909. for (unsigned i = 0; i < size; i++) {
  1910. CollectScalarTypes(ScalarTys, EltTy);
  1911. }
  1912. } else if (hlsl::IsHLSLVecType(Ty)) {
  1913. QualType EltTy = hlsl::GetHLSLVecElementType(Ty);
  1914. unsigned row = 0;
  1915. unsigned col = 0;
  1916. hlsl::GetRowsAndColsForAny(Ty, row, col);
  1917. unsigned size = col;
  1918. for (unsigned i = 0; i < size; i++) {
  1919. CollectScalarTypes(ScalarTys, EltTy);
  1920. }
  1921. } else {
  1922. const RecordType *RT = Ty->getAsStructureType();
  1923. // For CXXRecord.
  1924. if (!RT)
  1925. RT = Ty->getAs<RecordType>();
  1926. RecordDecl *RD = RT->getDecl();
  1927. for (FieldDecl *field : RD->fields())
  1928. CollectScalarTypes(ScalarTys, field->getType());
  1929. }
  1930. } else if (Ty->isArrayType()) {
  1931. const clang::ArrayType *AT = Ty->getAsArrayTypeUnsafe();
  1932. QualType EltTy = AT->getElementType();
  1933. // Set it to 5 for unsized array.
  1934. unsigned size = 5;
  1935. if (AT->isConstantArrayType()) {
  1936. size = cast<ConstantArrayType>(AT)->getSize().getLimitedValue();
  1937. }
  1938. for (unsigned i=0;i<size;i++) {
  1939. CollectScalarTypes(ScalarTys, EltTy);
  1940. }
  1941. } else {
  1942. ScalarTys.emplace_back(Ty);
  1943. }
  1944. }
  1945. bool CGMSHLSLRuntime::SetUAVSRV(SourceLocation loc,
  1946. hlsl::DxilResourceBase::Class resClass,
  1947. DxilResource *hlslRes, const RecordDecl *RD) {
  1948. hlsl::DxilResource::Kind kind = KeywordToKind(RD->getName());
  1949. hlslRes->SetKind(kind);
  1950. // Get the result type from handle field.
  1951. FieldDecl *FD = *(RD->field_begin());
  1952. DXASSERT(FD->getName() == "h", "must be handle field");
  1953. QualType resultTy = FD->getType();
  1954. // Type annotation for result type of resource.
  1955. DxilTypeSystem &dxilTypeSys = m_pHLModule->GetTypeSystem();
  1956. unsigned arrayEltSize = 0;
  1957. AddTypeAnnotation(QualType(RD->getTypeForDecl(),0), dxilTypeSys, arrayEltSize);
  1958. if (kind == hlsl::DxilResource::Kind::Texture2DMS ||
  1959. kind == hlsl::DxilResource::Kind::Texture2DMSArray) {
  1960. const ClassTemplateSpecializationDecl *templateDecl =
  1961. dyn_cast<ClassTemplateSpecializationDecl>(RD);
  1962. const clang::TemplateArgument &sampleCountArg =
  1963. templateDecl->getTemplateArgs()[1];
  1964. uint32_t sampleCount = sampleCountArg.getAsIntegral().getLimitedValue();
  1965. hlslRes->SetSampleCount(sampleCount);
  1966. }
  1967. if (kind != hlsl::DxilResource::Kind::StructuredBuffer) {
  1968. QualType Ty = resultTy;
  1969. QualType EltTy = Ty;
  1970. if (hlsl::IsHLSLVecType(Ty)) {
  1971. EltTy = hlsl::GetHLSLVecElementType(Ty);
  1972. } else if (hlsl::IsHLSLMatType(Ty)) {
  1973. EltTy = hlsl::GetHLSLMatElementType(Ty);
  1974. } else if (resultTy->isAggregateType()) {
  1975. // Struct or array in a none-struct resource.
  1976. std::vector<QualType> ScalarTys;
  1977. CollectScalarTypes(ScalarTys, resultTy);
  1978. unsigned size = ScalarTys.size();
  1979. if (size == 0) {
  1980. DiagnosticsEngine &Diags = CGM.getDiags();
  1981. unsigned DiagID = Diags.getCustomDiagID(
  1982. DiagnosticsEngine::Error,
  1983. "object's templated type must have at least one element");
  1984. Diags.Report(loc, DiagID);
  1985. return false;
  1986. }
  1987. if (size > 4) {
  1988. DiagnosticsEngine &Diags = CGM.getDiags();
  1989. unsigned DiagID = Diags.getCustomDiagID(
  1990. DiagnosticsEngine::Error, "elements of typed buffers and textures "
  1991. "must fit in four 32-bit quantities");
  1992. Diags.Report(loc, DiagID);
  1993. return false;
  1994. }
  1995. EltTy = ScalarTys[0];
  1996. for (QualType ScalarTy : ScalarTys) {
  1997. if (ScalarTy != EltTy) {
  1998. DiagnosticsEngine &Diags = CGM.getDiags();
  1999. unsigned DiagID = Diags.getCustomDiagID(
  2000. DiagnosticsEngine::Error,
  2001. "all template type components must have the same type");
  2002. Diags.Report(loc, DiagID);
  2003. return false;
  2004. }
  2005. }
  2006. }
  2007. EltTy = EltTy.getCanonicalType();
  2008. bool bSNorm = false;
  2009. bool bUNorm = false;
  2010. if (const AttributedType *AT = dyn_cast<AttributedType>(Ty)) {
  2011. switch (AT->getAttrKind()) {
  2012. case AttributedType::Kind::attr_hlsl_snorm:
  2013. bSNorm = true;
  2014. break;
  2015. case AttributedType::Kind::attr_hlsl_unorm:
  2016. bUNorm = true;
  2017. break;
  2018. default:
  2019. // Do nothing
  2020. break;
  2021. }
  2022. }
  2023. if (EltTy->isBuiltinType()) {
  2024. const BuiltinType *BTy = EltTy->getAs<BuiltinType>();
  2025. CompType::Kind kind = BuiltinTyToCompTy(BTy, bSNorm, bUNorm);
  2026. // 64bits types are implemented with u32.
  2027. if (kind == CompType::Kind::U64 || kind == CompType::Kind::I64 ||
  2028. kind == CompType::Kind::SNormF64 ||
  2029. kind == CompType::Kind::UNormF64 || kind == CompType::Kind::F64) {
  2030. kind = CompType::Kind::U32;
  2031. }
  2032. hlslRes->SetCompType(kind);
  2033. } else {
  2034. DXASSERT(!bSNorm && !bUNorm, "snorm/unorm on invalid type");
  2035. }
  2036. }
  2037. hlslRes->SetROV(RD->getName().startswith("RasterizerOrdered"));
  2038. if (kind == hlsl::DxilResource::Kind::TypedBuffer ||
  2039. kind == hlsl::DxilResource::Kind::StructuredBuffer) {
  2040. const ClassTemplateSpecializationDecl *templateDecl =
  2041. dyn_cast<ClassTemplateSpecializationDecl>(RD);
  2042. const clang::TemplateArgument &retTyArg =
  2043. templateDecl->getTemplateArgs()[0];
  2044. llvm::Type *retTy = CGM.getTypes().ConvertType(retTyArg.getAsType());
  2045. uint32_t strideInBytes = dataLayout.getTypeAllocSize(retTy);
  2046. hlslRes->SetElementStride(strideInBytes);
  2047. }
  2048. if (resClass == hlsl::DxilResourceBase::Class::SRV) {
  2049. if (hlslRes->IsGloballyCoherent()) {
  2050. DiagnosticsEngine &Diags = CGM.getDiags();
  2051. unsigned DiagID = Diags.getCustomDiagID(
  2052. DiagnosticsEngine::Error, "globallycoherent can only be used with "
  2053. "Unordered Access View buffers.");
  2054. Diags.Report(loc, DiagID);
  2055. return false;
  2056. }
  2057. hlslRes->SetRW(false);
  2058. hlslRes->SetID(m_pHLModule->GetSRVs().size());
  2059. } else {
  2060. hlslRes->SetRW(true);
  2061. hlslRes->SetID(m_pHLModule->GetUAVs().size());
  2062. }
  2063. return true;
  2064. }
  2065. uint32_t CGMSHLSLRuntime::AddUAVSRV(VarDecl *decl,
  2066. hlsl::DxilResourceBase::Class resClass) {
  2067. llvm::GlobalVariable *val =
  2068. cast<llvm::GlobalVariable>(CGM.GetAddrOfGlobalVar(decl));
  2069. QualType VarTy = decl->getType().getCanonicalType();
  2070. unique_ptr<HLResource> hlslRes(new HLResource);
  2071. hlslRes->SetLowerBound(UINT_MAX);
  2072. hlslRes->SetGlobalSymbol(val);
  2073. hlslRes->SetGlobalName(decl->getName());
  2074. if (const clang::ArrayType *arrayType =
  2075. CGM.getContext().getAsArrayType(VarTy)) {
  2076. if (arrayType->isConstantArrayType()) {
  2077. uint32_t arraySize =
  2078. cast<ConstantArrayType>(arrayType)->getSize().getLimitedValue();
  2079. hlslRes->SetRangeSize(arraySize);
  2080. } else
  2081. hlslRes->SetRangeSize(UINT_MAX);
  2082. // use elementTy
  2083. VarTy = arrayType->getElementType();
  2084. // Support more dim.
  2085. while (const clang::ArrayType *arrayType =
  2086. CGM.getContext().getAsArrayType(VarTy)) {
  2087. unsigned rangeSize = hlslRes->GetRangeSize();
  2088. if (arrayType->isConstantArrayType()) {
  2089. uint32_t arraySize =
  2090. cast<ConstantArrayType>(arrayType)->getSize().getLimitedValue();
  2091. if (rangeSize != UINT_MAX)
  2092. hlslRes->SetRangeSize(rangeSize * arraySize);
  2093. } else
  2094. hlslRes->SetRangeSize(UINT_MAX);
  2095. // use elementTy
  2096. VarTy = arrayType->getElementType();
  2097. }
  2098. } else
  2099. hlslRes->SetRangeSize(1);
  2100. for (hlsl::UnusualAnnotation *it : decl->getUnusualAnnotations()) {
  2101. switch (it->getKind()) {
  2102. case hlsl::UnusualAnnotation::UA_RegisterAssignment: {
  2103. hlsl::RegisterAssignment *ra = cast<hlsl::RegisterAssignment>(it);
  2104. hlslRes->SetLowerBound(ra->RegisterNumber);
  2105. hlslRes->SetSpaceID(ra->RegisterSpace);
  2106. break;
  2107. }
  2108. case hlsl::UnusualAnnotation::UA_SemanticDecl:
  2109. // Ignore Semantics
  2110. break;
  2111. case hlsl::UnusualAnnotation::UA_ConstantPacking:
  2112. // Should be handled by front-end
  2113. llvm_unreachable("packoffset on uav/srv");
  2114. break;
  2115. default:
  2116. llvm_unreachable("unknown UnusualAnnotation on uav/srv");
  2117. break;
  2118. }
  2119. }
  2120. const RecordType *RT = VarTy->getAs<RecordType>();
  2121. RecordDecl *RD = RT->getDecl();
  2122. if (decl->hasAttr<HLSLGloballyCoherentAttr>()) {
  2123. hlslRes->SetGloballyCoherent(true);
  2124. }
  2125. if (!SetUAVSRV(decl->getLocation(), resClass, hlslRes.get(), RD))
  2126. return 0;
  2127. if (resClass == hlsl::DxilResourceBase::Class::SRV) {
  2128. return m_pHLModule->AddSRV(std::move(hlslRes));
  2129. } else {
  2130. return m_pHLModule->AddUAV(std::move(hlslRes));
  2131. }
  2132. }
  2133. static bool IsResourceInType(const clang::ASTContext &context,
  2134. clang::QualType Ty) {
  2135. Ty = Ty.getCanonicalType();
  2136. if (const clang::ArrayType *arrayType = context.getAsArrayType(Ty)) {
  2137. return IsResourceInType(context, arrayType->getElementType());
  2138. } else if (const RecordType *RT = Ty->getAsStructureType()) {
  2139. if (KeywordToClass(RT->getDecl()->getName()) != DxilResourceBase::Class::Invalid)
  2140. return true;
  2141. const CXXRecordDecl* typeRecordDecl = RT->getAsCXXRecordDecl();
  2142. if (typeRecordDecl && !typeRecordDecl->isImplicit()) {
  2143. for (auto field : typeRecordDecl->fields()) {
  2144. if (IsResourceInType(context, field->getType()))
  2145. return true;
  2146. }
  2147. }
  2148. } else if (const RecordType *RT = Ty->getAs<RecordType>()) {
  2149. if (const ClassTemplateSpecializationDecl *templateDecl =
  2150. dyn_cast<ClassTemplateSpecializationDecl>(RT->getDecl())) {
  2151. if (KeywordToClass(templateDecl->getName()) != DxilResourceBase::Class::Invalid)
  2152. return true;
  2153. }
  2154. }
  2155. return false; // no resources found
  2156. }
  2157. void CGMSHLSLRuntime::AddConstant(VarDecl *constDecl, HLCBuffer &CB) {
  2158. if (constDecl->getStorageClass() == SC_Static) {
  2159. // For static inside cbuffer, take as global static.
  2160. // Don't add to cbuffer.
  2161. CGM.EmitGlobal(constDecl);
  2162. // Add type annotation for static global types.
  2163. // May need it when cast from cbuf.
  2164. DxilTypeSystem &dxilTypeSys = m_pHLModule->GetTypeSystem();
  2165. unsigned arraySize = 0;
  2166. AddTypeAnnotation(constDecl->getType(), dxilTypeSys, arraySize);
  2167. return;
  2168. }
  2169. // Search defined structure for resource objects and fail
  2170. if (CB.GetRangeSize() > 1 &&
  2171. IsResourceInType(CGM.getContext(), constDecl->getType())) {
  2172. DiagnosticsEngine &Diags = CGM.getDiags();
  2173. unsigned DiagID = Diags.getCustomDiagID(
  2174. DiagnosticsEngine::Error,
  2175. "object types not supported in cbuffer/tbuffer view arrays.");
  2176. Diags.Report(constDecl->getLocation(), DiagID);
  2177. return;
  2178. }
  2179. llvm::Constant *constVal = CGM.GetAddrOfGlobalVar(constDecl);
  2180. bool isGlobalCB = CB.GetID() == globalCBIndex;
  2181. uint32_t offset = 0;
  2182. bool userOffset = false;
  2183. for (hlsl::UnusualAnnotation *it : constDecl->getUnusualAnnotations()) {
  2184. switch (it->getKind()) {
  2185. case hlsl::UnusualAnnotation::UA_ConstantPacking: {
  2186. if (!isGlobalCB) {
  2187. // TODO: check cannot mix packoffset elements with nonpackoffset
  2188. // elements in a cbuffer.
  2189. hlsl::ConstantPacking *cp = cast<hlsl::ConstantPacking>(it);
  2190. offset = cp->Subcomponent << 2;
  2191. offset += cp->ComponentOffset;
  2192. // Change to byte.
  2193. offset <<= 2;
  2194. userOffset = true;
  2195. } else {
  2196. DiagnosticsEngine &Diags = CGM.getDiags();
  2197. unsigned DiagID = Diags.getCustomDiagID(
  2198. DiagnosticsEngine::Error,
  2199. "packoffset is only allowed in a constant buffer.");
  2200. Diags.Report(it->Loc, DiagID);
  2201. }
  2202. break;
  2203. }
  2204. case hlsl::UnusualAnnotation::UA_RegisterAssignment: {
  2205. if (isGlobalCB) {
  2206. RegisterAssignment *ra = cast<RegisterAssignment>(it);
  2207. offset = ra->RegisterNumber << 2;
  2208. // Change to byte.
  2209. offset <<= 2;
  2210. userOffset = true;
  2211. }
  2212. break;
  2213. }
  2214. case hlsl::UnusualAnnotation::UA_SemanticDecl:
  2215. // skip semantic on constant
  2216. break;
  2217. }
  2218. }
  2219. std::unique_ptr<DxilResourceBase> pHlslConst = llvm::make_unique<DxilResourceBase>(DXIL::ResourceClass::Invalid);
  2220. pHlslConst->SetLowerBound(UINT_MAX);
  2221. pHlslConst->SetGlobalSymbol(cast<llvm::GlobalVariable>(constVal));
  2222. pHlslConst->SetGlobalName(constDecl->getName());
  2223. if (userOffset) {
  2224. pHlslConst->SetLowerBound(offset);
  2225. }
  2226. DxilTypeSystem &dxilTypeSys = m_pHLModule->GetTypeSystem();
  2227. // Just add type annotation here.
  2228. // Offset will be allocated later.
  2229. QualType Ty = constDecl->getType();
  2230. if (CB.GetRangeSize() != 1) {
  2231. while (Ty->isArrayType()) {
  2232. Ty = Ty->getAsArrayTypeUnsafe()->getElementType();
  2233. }
  2234. }
  2235. unsigned arrayEltSize = 0;
  2236. unsigned size = AddTypeAnnotation(Ty, dxilTypeSys, arrayEltSize);
  2237. pHlslConst->SetRangeSize(size);
  2238. CB.AddConst(pHlslConst);
  2239. // Save fieldAnnotation for the const var.
  2240. DxilFieldAnnotation fieldAnnotation;
  2241. if (userOffset)
  2242. fieldAnnotation.SetCBufferOffset(offset);
  2243. // Get the nested element type.
  2244. if (Ty->isArrayType()) {
  2245. while (const ConstantArrayType *arrayTy =
  2246. CGM.getContext().getAsConstantArrayType(Ty)) {
  2247. Ty = arrayTy->getElementType();
  2248. }
  2249. }
  2250. bool bDefaultRowMajor = m_pHLModule->GetHLOptions().bDefaultRowMajor;
  2251. ConstructFieldAttributedAnnotation(fieldAnnotation, Ty, bDefaultRowMajor);
  2252. m_ConstVarAnnotationMap[constVal] = fieldAnnotation;
  2253. }
  2254. uint32_t CGMSHLSLRuntime::AddCBuffer(HLSLBufferDecl *D) {
  2255. unique_ptr<HLCBuffer> CB = llvm::make_unique<HLCBuffer>();
  2256. // setup the CB
  2257. CB->SetGlobalSymbol(nullptr);
  2258. CB->SetGlobalName(D->getNameAsString());
  2259. CB->SetLowerBound(UINT_MAX);
  2260. if (!D->isCBuffer()) {
  2261. CB->SetKind(DXIL::ResourceKind::TBuffer);
  2262. }
  2263. // the global variable will only used once by the createHandle?
  2264. // SetHandle(llvm::Value *pHandle);
  2265. for (hlsl::UnusualAnnotation *it : D->getUnusualAnnotations()) {
  2266. switch (it->getKind()) {
  2267. case hlsl::UnusualAnnotation::UA_RegisterAssignment: {
  2268. hlsl::RegisterAssignment *ra = cast<hlsl::RegisterAssignment>(it);
  2269. uint32_t regNum = ra->RegisterNumber;
  2270. uint32_t regSpace = ra->RegisterSpace;
  2271. CB->SetSpaceID(regSpace);
  2272. CB->SetLowerBound(regNum);
  2273. break;
  2274. }
  2275. case hlsl::UnusualAnnotation::UA_SemanticDecl:
  2276. // skip semantic on constant buffer
  2277. break;
  2278. case hlsl::UnusualAnnotation::UA_ConstantPacking:
  2279. llvm_unreachable("no packoffset on constant buffer");
  2280. break;
  2281. }
  2282. }
  2283. // Add constant
  2284. if (D->isConstantBufferView()) {
  2285. VarDecl *constDecl = cast<VarDecl>(*D->decls_begin());
  2286. CB->SetRangeSize(1);
  2287. QualType Ty = constDecl->getType();
  2288. if (Ty->isArrayType()) {
  2289. if (!Ty->isIncompleteArrayType()) {
  2290. unsigned arraySize = 1;
  2291. while (Ty->isArrayType()) {
  2292. Ty = Ty->getCanonicalTypeUnqualified();
  2293. const ConstantArrayType *AT = cast<ConstantArrayType>(Ty);
  2294. arraySize *= AT->getSize().getLimitedValue();
  2295. Ty = AT->getElementType();
  2296. }
  2297. CB->SetRangeSize(arraySize);
  2298. } else {
  2299. CB->SetRangeSize(UINT_MAX);
  2300. }
  2301. }
  2302. AddConstant(constDecl, *CB.get());
  2303. } else {
  2304. auto declsEnds = D->decls_end();
  2305. CB->SetRangeSize(1);
  2306. for (auto it = D->decls_begin(); it != declsEnds; it++) {
  2307. if (VarDecl *constDecl = dyn_cast<VarDecl>(*it)) {
  2308. AddConstant(constDecl, *CB.get());
  2309. } else if (isa<EmptyDecl>(*it)) {
  2310. // Nothing to do for this declaration.
  2311. } else if (isa<CXXRecordDecl>(*it)) {
  2312. // Nothing to do for this declaration.
  2313. } else if (isa<FunctionDecl>(*it)) {
  2314. // A function within an cbuffer is effectively a top-level function,
  2315. // as it only refers to globally scoped declarations.
  2316. this->CGM.EmitTopLevelDecl(*it);
  2317. } else {
  2318. HLSLBufferDecl *inner = cast<HLSLBufferDecl>(*it);
  2319. GetOrCreateCBuffer(inner);
  2320. }
  2321. }
  2322. }
  2323. CB->SetID(m_pHLModule->GetCBuffers().size());
  2324. return m_pHLModule->AddCBuffer(std::move(CB));
  2325. }
  2326. HLCBuffer &CGMSHLSLRuntime::GetOrCreateCBuffer(HLSLBufferDecl *D) {
  2327. if (constantBufMap.count(D) != 0) {
  2328. uint32_t cbIndex = constantBufMap[D];
  2329. return *static_cast<HLCBuffer*>(&(m_pHLModule->GetCBuffer(cbIndex)));
  2330. }
  2331. uint32_t cbID = AddCBuffer(D);
  2332. constantBufMap[D] = cbID;
  2333. return *static_cast<HLCBuffer*>(&(m_pHLModule->GetCBuffer(cbID)));
  2334. }
  2335. bool CGMSHLSLRuntime::IsPatchConstantFunction(const Function *F) {
  2336. DXASSERT_NOMSG(F != nullptr);
  2337. for (auto && p : patchConstantFunctionMap) {
  2338. if (p.second.Func == F) return true;
  2339. }
  2340. return false;
  2341. }
  2342. void CGMSHLSLRuntime::SetEntryFunction() {
  2343. if (Entry.Func == nullptr) {
  2344. DiagnosticsEngine &Diags = CGM.getDiags();
  2345. unsigned DiagID = Diags.getCustomDiagID(DiagnosticsEngine::Error,
  2346. "cannot find entry function %0");
  2347. Diags.Report(DiagID) << CGM.getCodeGenOpts().HLSLEntryFunction;
  2348. return;
  2349. }
  2350. m_pHLModule->SetEntryFunction(Entry.Func);
  2351. }
  2352. // Here the size is CB size. So don't need check type.
  2353. static unsigned AlignCBufferOffset(unsigned offset, unsigned size, llvm::Type *Ty) {
  2354. DXASSERT(!(offset & 1), "otherwise we have an invalid offset.");
  2355. bool bNeedNewRow = Ty->isArrayTy();
  2356. unsigned scalarSizeInBytes = Ty->getScalarSizeInBits() / 8;
  2357. return AlignBufferOffsetInLegacy(offset, size, scalarSizeInBytes, bNeedNewRow);
  2358. }
  2359. static unsigned AllocateDxilConstantBuffer(HLCBuffer &CB) {
  2360. unsigned offset = 0;
  2361. // Scan user allocated constants first.
  2362. // Update offset.
  2363. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2364. if (C->GetLowerBound() == UINT_MAX)
  2365. continue;
  2366. unsigned size = C->GetRangeSize();
  2367. unsigned nextOffset = size + C->GetLowerBound();
  2368. if (offset < nextOffset)
  2369. offset = nextOffset;
  2370. }
  2371. // Alloc after user allocated constants.
  2372. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2373. if (C->GetLowerBound() != UINT_MAX)
  2374. continue;
  2375. unsigned size = C->GetRangeSize();
  2376. llvm::Type *Ty = C->GetGlobalSymbol()->getType()->getPointerElementType();
  2377. // Align offset.
  2378. offset = AlignCBufferOffset(offset, size, Ty);
  2379. if (C->GetLowerBound() == UINT_MAX) {
  2380. C->SetLowerBound(offset);
  2381. }
  2382. offset += size;
  2383. }
  2384. return offset;
  2385. }
  2386. static void AllocateDxilConstantBuffers(HLModule *pHLModule) {
  2387. for (unsigned i = 0; i < pHLModule->GetCBuffers().size(); i++) {
  2388. HLCBuffer &CB = *static_cast<HLCBuffer*>(&(pHLModule->GetCBuffer(i)));
  2389. unsigned size = AllocateDxilConstantBuffer(CB);
  2390. CB.SetSize(size);
  2391. }
  2392. }
  2393. static void ReplaceUseInFunction(Value *V, Value *NewV, Function *F,
  2394. IRBuilder<> &Builder) {
  2395. for (auto U = V->user_begin(); U != V->user_end(); ) {
  2396. User *user = *(U++);
  2397. if (Instruction *I = dyn_cast<Instruction>(user)) {
  2398. if (I->getParent()->getParent() == F) {
  2399. // replace use with GEP if in F
  2400. for (unsigned i = 0; i < I->getNumOperands(); i++) {
  2401. if (I->getOperand(i) == V)
  2402. I->setOperand(i, NewV);
  2403. }
  2404. }
  2405. } else {
  2406. // For constant operator, create local clone which use GEP.
  2407. // Only support GEP and bitcast.
  2408. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  2409. std::vector<Value *> idxList(GEPOp->idx_begin(), GEPOp->idx_end());
  2410. Value *NewGEP = Builder.CreateInBoundsGEP(NewV, idxList);
  2411. ReplaceUseInFunction(GEPOp, NewGEP, F, Builder);
  2412. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  2413. // Change the init val into NewV with Store.
  2414. GV->setInitializer(nullptr);
  2415. Builder.CreateStore(NewV, GV);
  2416. } else {
  2417. // Must be bitcast here.
  2418. BitCastOperator *BC = cast<BitCastOperator>(user);
  2419. Value *NewBC = Builder.CreateBitCast(NewV, BC->getType());
  2420. ReplaceUseInFunction(BC, NewBC, F, Builder);
  2421. }
  2422. }
  2423. }
  2424. }
  2425. void MarkUsedFunctionForConst(Value *V, std::unordered_set<Function*> &usedFunc) {
  2426. for (auto U = V->user_begin(); U != V->user_end();) {
  2427. User *user = *(U++);
  2428. if (Instruction *I = dyn_cast<Instruction>(user)) {
  2429. Function *F = I->getParent()->getParent();
  2430. usedFunc.insert(F);
  2431. } else {
  2432. // For constant operator, create local clone which use GEP.
  2433. // Only support GEP and bitcast.
  2434. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  2435. MarkUsedFunctionForConst(GEPOp, usedFunc);
  2436. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  2437. MarkUsedFunctionForConst(GV, usedFunc);
  2438. } else {
  2439. // Must be bitcast here.
  2440. BitCastOperator *BC = cast<BitCastOperator>(user);
  2441. MarkUsedFunctionForConst(BC, usedFunc);
  2442. }
  2443. }
  2444. }
  2445. }
  2446. static Function * GetOrCreateHLCreateHandle(HLModule &HLM, llvm::Type *HandleTy,
  2447. ArrayRef<Value*> paramList, MDNode *MD) {
  2448. SmallVector<llvm::Type *, 4> paramTyList;
  2449. for (Value *param : paramList) {
  2450. paramTyList.emplace_back(param->getType());
  2451. }
  2452. llvm::FunctionType *funcTy =
  2453. llvm::FunctionType::get(HandleTy, paramTyList, false);
  2454. llvm::Module &M = *HLM.GetModule();
  2455. Function *CreateHandle = GetOrCreateHLFunctionWithBody(M, funcTy, HLOpcodeGroup::HLCreateHandle,
  2456. /*opcode*/ 0, "");
  2457. if (CreateHandle->empty()) {
  2458. // Add body.
  2459. BasicBlock *BB =
  2460. BasicBlock::Create(CreateHandle->getContext(), "Entry", CreateHandle);
  2461. IRBuilder<> Builder(BB);
  2462. // Just return undef to make a body.
  2463. Builder.CreateRet(UndefValue::get(HandleTy));
  2464. // Mark resource attribute.
  2465. HLM.MarkDxilResourceAttrib(CreateHandle, MD);
  2466. }
  2467. return CreateHandle;
  2468. }
  2469. static bool CreateCBufferVariable(HLCBuffer &CB,
  2470. HLModule &HLM, llvm::Type *HandleTy) {
  2471. bool bUsed = false;
  2472. // Build Struct for CBuffer.
  2473. SmallVector<llvm::Type*, 4> Elements;
  2474. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2475. Value *GV = C->GetGlobalSymbol();
  2476. if (GV->hasNUsesOrMore(1))
  2477. bUsed = true;
  2478. // Global variable must be pointer type.
  2479. llvm::Type *Ty = GV->getType()->getPointerElementType();
  2480. Elements.emplace_back(Ty);
  2481. }
  2482. // Don't create CBuffer variable for unused cbuffer.
  2483. if (!bUsed)
  2484. return false;
  2485. llvm::Module &M = *HLM.GetModule();
  2486. bool isCBArray = CB.GetRangeSize() != 1;
  2487. llvm::GlobalVariable *cbGV = nullptr;
  2488. llvm::Type *cbTy = nullptr;
  2489. unsigned cbIndexDepth = 0;
  2490. if (!isCBArray) {
  2491. llvm::StructType *CBStructTy =
  2492. llvm::StructType::create(Elements, CB.GetGlobalName());
  2493. cbGV = new llvm::GlobalVariable(M, CBStructTy, /*IsConstant*/ true,
  2494. llvm::GlobalValue::ExternalLinkage,
  2495. /*InitVal*/ nullptr, CB.GetGlobalName());
  2496. cbTy = cbGV->getType();
  2497. } else {
  2498. // For array of ConstantBuffer, create array of struct instead of struct of
  2499. // array.
  2500. DXASSERT(CB.GetConstants().size() == 1,
  2501. "ConstantBuffer should have 1 constant");
  2502. Value *GV = CB.GetConstants()[0]->GetGlobalSymbol();
  2503. llvm::Type *CBEltTy =
  2504. GV->getType()->getPointerElementType()->getArrayElementType();
  2505. cbIndexDepth = 1;
  2506. while (CBEltTy->isArrayTy()) {
  2507. CBEltTy = CBEltTy->getArrayElementType();
  2508. cbIndexDepth++;
  2509. }
  2510. // Add one level struct type to match normal case.
  2511. llvm::StructType *CBStructTy =
  2512. llvm::StructType::create({CBEltTy}, CB.GetGlobalName());
  2513. llvm::ArrayType *CBArrayTy =
  2514. llvm::ArrayType::get(CBStructTy, CB.GetRangeSize());
  2515. cbGV = new llvm::GlobalVariable(M, CBArrayTy, /*IsConstant*/ true,
  2516. llvm::GlobalValue::ExternalLinkage,
  2517. /*InitVal*/ nullptr, CB.GetGlobalName());
  2518. cbTy = llvm::PointerType::get(CBStructTy,
  2519. cbGV->getType()->getPointerAddressSpace());
  2520. }
  2521. CB.SetGlobalSymbol(cbGV);
  2522. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  2523. llvm::Type *idxTy = opcodeTy;
  2524. Constant *zeroIdx = ConstantInt::get(opcodeTy, 0);
  2525. MDNode *MD = HLM.DxilCBufferToMDNode(CB);
  2526. Value *HandleArgs[] = { zeroIdx, cbGV, zeroIdx };
  2527. Function *CreateHandleFunc = GetOrCreateHLCreateHandle(HLM, HandleTy, HandleArgs, MD);
  2528. llvm::FunctionType *SubscriptFuncTy =
  2529. llvm::FunctionType::get(cbTy, { opcodeTy, HandleTy, idxTy}, false);
  2530. Function *subscriptFunc =
  2531. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  2532. (unsigned)HLSubscriptOpcode::CBufferSubscript);
  2533. Constant *opArg = ConstantInt::get(opcodeTy, (unsigned)HLSubscriptOpcode::CBufferSubscript);
  2534. Value *args[] = { opArg, nullptr, zeroIdx };
  2535. llvm::LLVMContext &Context = M.getContext();
  2536. llvm::Type *i32Ty = llvm::Type::getInt32Ty(Context);
  2537. Value *zero = ConstantInt::get(i32Ty, (uint64_t)0);
  2538. std::vector<Value *> indexArray(CB.GetConstants().size());
  2539. std::vector<std::unordered_set<Function*>> constUsedFuncList(CB.GetConstants().size());
  2540. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2541. Value *idx = ConstantInt::get(i32Ty, C->GetID());
  2542. indexArray[C->GetID()] = idx;
  2543. Value *GV = C->GetGlobalSymbol();
  2544. MarkUsedFunctionForConst(GV, constUsedFuncList[C->GetID()]);
  2545. }
  2546. for (Function &F : M.functions()) {
  2547. if (F.isDeclaration())
  2548. continue;
  2549. if (GetHLOpcodeGroupByName(&F) != HLOpcodeGroup::NotHL)
  2550. continue;
  2551. IRBuilder<> Builder(F.getEntryBlock().getFirstInsertionPt());
  2552. // create HL subscript to make all the use of cbuffer start from it.
  2553. HandleArgs[HLOperandIndex::kCreateHandleResourceOpIdx] = cbGV;
  2554. CallInst *Handle = Builder.CreateCall(CreateHandleFunc, HandleArgs);
  2555. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  2556. Instruction *cbSubscript =
  2557. cast<Instruction>(Builder.CreateCall(subscriptFunc, {args}));
  2558. // Replace constant var with GEP pGV
  2559. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2560. Value *GV = C->GetGlobalSymbol();
  2561. if (constUsedFuncList[C->GetID()].count(&F) == 0)
  2562. continue;
  2563. Value *idx = indexArray[C->GetID()];
  2564. if (!isCBArray) {
  2565. Instruction *GEP = cast<Instruction>(
  2566. Builder.CreateInBoundsGEP(cbSubscript, {zero, idx}));
  2567. // TODO: make sure the debug info is synced to GEP.
  2568. // GEP->setDebugLoc(GV);
  2569. ReplaceUseInFunction(GV, GEP, &F, Builder);
  2570. // Delete if no use in F.
  2571. if (GEP->user_empty())
  2572. GEP->eraseFromParent();
  2573. } else {
  2574. for (auto U = GV->user_begin(); U != GV->user_end();) {
  2575. User *user = *(U++);
  2576. if (user->user_empty())
  2577. continue;
  2578. Instruction *I = dyn_cast<Instruction>(user);
  2579. if (I && I->getParent()->getParent() != &F)
  2580. continue;
  2581. IRBuilder<> *instBuilder = &Builder;
  2582. unique_ptr<IRBuilder<>> B;
  2583. if (I) {
  2584. B = llvm::make_unique<IRBuilder<>>(I);
  2585. instBuilder = B.get();
  2586. }
  2587. GEPOperator *GEPOp = cast<GEPOperator>(user);
  2588. std::vector<Value *> idxList;
  2589. DXASSERT(GEPOp->getNumIndices() >= 1 + cbIndexDepth,
  2590. "must indexing ConstantBuffer array");
  2591. idxList.reserve(GEPOp->getNumIndices() - (cbIndexDepth - 1));
  2592. gep_type_iterator GI = gep_type_begin(*GEPOp),
  2593. E = gep_type_end(*GEPOp);
  2594. idxList.push_back(GI.getOperand());
  2595. // change array index with 0 for struct index.
  2596. idxList.push_back(zero);
  2597. GI++;
  2598. Value *arrayIdx = GI.getOperand();
  2599. GI++;
  2600. for (unsigned curIndex = 1; GI != E && curIndex < cbIndexDepth;
  2601. ++GI, ++curIndex) {
  2602. arrayIdx = instBuilder->CreateMul(
  2603. arrayIdx, Builder.getInt32(GI->getArrayNumElements()));
  2604. arrayIdx = instBuilder->CreateAdd(arrayIdx, GI.getOperand());
  2605. }
  2606. for (; GI != E; ++GI) {
  2607. idxList.push_back(GI.getOperand());
  2608. }
  2609. HandleArgs[HLOperandIndex::kCreateHandleIndexOpIdx] = arrayIdx;
  2610. CallInst *Handle =
  2611. instBuilder->CreateCall(CreateHandleFunc, HandleArgs);
  2612. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  2613. args[HLOperandIndex::kSubscriptIndexOpIdx] = arrayIdx;
  2614. Instruction *cbSubscript =
  2615. cast<Instruction>(instBuilder->CreateCall(subscriptFunc, {args}));
  2616. Instruction *NewGEP = cast<Instruction>(
  2617. instBuilder->CreateInBoundsGEP(cbSubscript, idxList));
  2618. ReplaceUseInFunction(GEPOp, NewGEP, &F, *instBuilder);
  2619. }
  2620. }
  2621. }
  2622. // Delete if no use in F.
  2623. if (cbSubscript->user_empty()) {
  2624. cbSubscript->eraseFromParent();
  2625. Handle->eraseFromParent();
  2626. } else {
  2627. // merge GEP use for cbSubscript.
  2628. HLModule::MergeGepUse(cbSubscript);
  2629. }
  2630. }
  2631. return true;
  2632. }
  2633. static void ConstructCBufferAnnotation(
  2634. HLCBuffer &CB, DxilTypeSystem &dxilTypeSys,
  2635. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  2636. Value *GV = CB.GetGlobalSymbol();
  2637. llvm::StructType *CBStructTy =
  2638. dyn_cast<llvm::StructType>(GV->getType()->getPointerElementType());
  2639. if (!CBStructTy) {
  2640. // For Array of ConstantBuffer.
  2641. llvm::ArrayType *CBArrayTy =
  2642. cast<llvm::ArrayType>(GV->getType()->getPointerElementType());
  2643. CBStructTy = cast<llvm::StructType>(CBArrayTy->getArrayElementType());
  2644. }
  2645. DxilStructAnnotation *CBAnnotation =
  2646. dxilTypeSys.AddStructAnnotation(CBStructTy);
  2647. CBAnnotation->SetCBufferSize(CB.GetSize());
  2648. // Set fieldAnnotation for each constant var.
  2649. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2650. Constant *GV = C->GetGlobalSymbol();
  2651. DxilFieldAnnotation &fieldAnnotation =
  2652. CBAnnotation->GetFieldAnnotation(C->GetID());
  2653. fieldAnnotation = AnnotationMap[GV];
  2654. // This is after CBuffer allocation.
  2655. fieldAnnotation.SetCBufferOffset(C->GetLowerBound());
  2656. fieldAnnotation.SetFieldName(C->GetGlobalName());
  2657. }
  2658. }
  2659. static void ConstructCBuffer(
  2660. HLModule *pHLModule,
  2661. llvm::Type *CBufferType,
  2662. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  2663. DxilTypeSystem &dxilTypeSys = pHLModule->GetTypeSystem();
  2664. llvm::Type *HandleTy = pHLModule->GetOP()->GetHandleType();
  2665. for (unsigned i = 0; i < pHLModule->GetCBuffers().size(); i++) {
  2666. HLCBuffer &CB = *static_cast<HLCBuffer*>(&(pHLModule->GetCBuffer(i)));
  2667. if (CB.GetConstants().size() == 0) {
  2668. // Create Fake variable for cbuffer which is empty.
  2669. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  2670. *pHLModule->GetModule(), CBufferType, true,
  2671. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  2672. CB.SetGlobalSymbol(pGV);
  2673. } else {
  2674. bool bCreated =
  2675. CreateCBufferVariable(CB, *pHLModule, HandleTy);
  2676. if (bCreated)
  2677. ConstructCBufferAnnotation(CB, dxilTypeSys, AnnotationMap);
  2678. else {
  2679. // Create Fake variable for cbuffer which is unused.
  2680. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  2681. *pHLModule->GetModule(), CBufferType, true,
  2682. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  2683. CB.SetGlobalSymbol(pGV);
  2684. }
  2685. }
  2686. // Clear the constants which useless now.
  2687. CB.GetConstants().clear();
  2688. }
  2689. }
  2690. static void ReplaceBoolVectorSubscript(CallInst *CI) {
  2691. Value *Ptr = CI->getArgOperand(0);
  2692. Value *Idx = CI->getArgOperand(1);
  2693. Value *IdxList[] = {ConstantInt::get(Idx->getType(), 0), Idx};
  2694. for (auto It = CI->user_begin(), E = CI->user_end(); It != E;) {
  2695. Instruction *user = cast<Instruction>(*(It++));
  2696. IRBuilder<> Builder(user);
  2697. Value *GEP = Builder.CreateInBoundsGEP(Ptr, IdxList);
  2698. if (LoadInst *LI = dyn_cast<LoadInst>(user)) {
  2699. Value *NewLd = Builder.CreateLoad(GEP);
  2700. Value *cast = Builder.CreateZExt(NewLd, LI->getType());
  2701. LI->replaceAllUsesWith(cast);
  2702. LI->eraseFromParent();
  2703. } else {
  2704. // Must be a store inst here.
  2705. StoreInst *SI = cast<StoreInst>(user);
  2706. Value *V = SI->getValueOperand();
  2707. Value *cast =
  2708. Builder.CreateICmpNE(V, llvm::ConstantInt::get(V->getType(), 0));
  2709. Builder.CreateStore(cast, GEP);
  2710. SI->eraseFromParent();
  2711. }
  2712. }
  2713. CI->eraseFromParent();
  2714. }
  2715. static void ReplaceBoolVectorSubscript(Function *F) {
  2716. for (auto It = F->user_begin(), E = F->user_end(); It != E; ) {
  2717. User *user = *(It++);
  2718. CallInst *CI = cast<CallInst>(user);
  2719. ReplaceBoolVectorSubscript(CI);
  2720. }
  2721. }
  2722. // Add function body for intrinsic if possible.
  2723. static Function *CreateOpFunction(llvm::Module &M, Function *F,
  2724. llvm::FunctionType *funcTy,
  2725. HLOpcodeGroup group, unsigned opcode) {
  2726. Function *opFunc = nullptr;
  2727. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  2728. if (group == HLOpcodeGroup::HLIntrinsic) {
  2729. IntrinsicOp intriOp = static_cast<IntrinsicOp>(opcode);
  2730. switch (intriOp) {
  2731. case IntrinsicOp::MOP_Append:
  2732. case IntrinsicOp::MOP_Consume: {
  2733. bool bAppend = intriOp == IntrinsicOp::MOP_Append;
  2734. llvm::Type *handleTy = funcTy->getParamType(HLOperandIndex::kHandleOpIdx);
  2735. // Don't generate body for OutputStream::Append.
  2736. if (bAppend && HLModule::IsStreamOutputPtrType(handleTy)) {
  2737. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode);
  2738. break;
  2739. }
  2740. opFunc = GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode,
  2741. bAppend ? "append" : "consume");
  2742. llvm::Type *counterTy = llvm::Type::getInt32Ty(M.getContext());
  2743. llvm::FunctionType *IncCounterFuncTy =
  2744. llvm::FunctionType::get(counterTy, {opcodeTy, handleTy}, false);
  2745. unsigned counterOpcode = bAppend ? (unsigned)IntrinsicOp::MOP_IncrementCounter:
  2746. (unsigned)IntrinsicOp::MOP_DecrementCounter;
  2747. Function *incCounterFunc =
  2748. GetOrCreateHLFunction(M, IncCounterFuncTy, group,
  2749. counterOpcode);
  2750. llvm::Type *idxTy = counterTy;
  2751. llvm::Type *valTy = bAppend ?
  2752. funcTy->getParamType(HLOperandIndex::kAppendValOpIndex):funcTy->getReturnType();
  2753. llvm::Type *subscriptTy = valTy;
  2754. if (!valTy->isPointerTy()) {
  2755. // Return type for subscript should be pointer type.
  2756. subscriptTy = llvm::PointerType::get(valTy, 0);
  2757. }
  2758. llvm::FunctionType *SubscriptFuncTy =
  2759. llvm::FunctionType::get(subscriptTy, {opcodeTy, handleTy, idxTy}, false);
  2760. Function *subscriptFunc =
  2761. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  2762. (unsigned)HLSubscriptOpcode::DefaultSubscript);
  2763. BasicBlock *BB = BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  2764. IRBuilder<> Builder(BB);
  2765. auto argIter = opFunc->args().begin();
  2766. // Skip the opcode arg.
  2767. argIter++;
  2768. Argument *thisArg = argIter++;
  2769. // int counter = IncrementCounter/DecrementCounter(Buf);
  2770. Value *incCounterOpArg =
  2771. ConstantInt::get(idxTy, counterOpcode);
  2772. Value *counter =
  2773. Builder.CreateCall(incCounterFunc, {incCounterOpArg, thisArg});
  2774. // Buf[counter];
  2775. Value *subscriptOpArg = ConstantInt::get(
  2776. idxTy, (unsigned)HLSubscriptOpcode::DefaultSubscript);
  2777. Value *subscript =
  2778. Builder.CreateCall(subscriptFunc, {subscriptOpArg, thisArg, counter});
  2779. if (bAppend) {
  2780. Argument *valArg = argIter;
  2781. // Buf[counter] = val;
  2782. if (valTy->isPointerTy()) {
  2783. unsigned size = M.getDataLayout().getTypeAllocSize(subscript->getType()->getPointerElementType());
  2784. Builder.CreateMemCpy(subscript, valArg, size, 1);
  2785. } else
  2786. Builder.CreateStore(valArg, subscript);
  2787. Builder.CreateRetVoid();
  2788. } else {
  2789. // return Buf[counter];
  2790. if (valTy->isPointerTy())
  2791. Builder.CreateRet(subscript);
  2792. else {
  2793. Value *retVal = Builder.CreateLoad(subscript);
  2794. Builder.CreateRet(retVal);
  2795. }
  2796. }
  2797. } break;
  2798. case IntrinsicOp::IOP_sincos: {
  2799. opFunc = GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode, "sincos");
  2800. llvm::Type *valTy = funcTy->getParamType(HLOperandIndex::kTrinaryOpSrc0Idx);
  2801. llvm::FunctionType *sinFuncTy =
  2802. llvm::FunctionType::get(valTy, {opcodeTy, valTy}, false);
  2803. unsigned sinOp = static_cast<unsigned>(IntrinsicOp::IOP_sin);
  2804. unsigned cosOp = static_cast<unsigned>(IntrinsicOp::IOP_cos);
  2805. Function *sinFunc = GetOrCreateHLFunction(M, sinFuncTy, group, sinOp);
  2806. Function *cosFunc = GetOrCreateHLFunction(M, sinFuncTy, group, cosOp);
  2807. BasicBlock *BB = BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  2808. IRBuilder<> Builder(BB);
  2809. auto argIter = opFunc->args().begin();
  2810. // Skip the opcode arg.
  2811. argIter++;
  2812. Argument *valArg = argIter++;
  2813. Argument *sinPtrArg = argIter++;
  2814. Argument *cosPtrArg = argIter++;
  2815. Value *sinOpArg =
  2816. ConstantInt::get(opcodeTy, sinOp);
  2817. Value *sinVal = Builder.CreateCall(sinFunc, {sinOpArg, valArg});
  2818. Builder.CreateStore(sinVal, sinPtrArg);
  2819. Value *cosOpArg =
  2820. ConstantInt::get(opcodeTy, cosOp);
  2821. Value *cosVal = Builder.CreateCall(cosFunc, {cosOpArg, valArg});
  2822. Builder.CreateStore(cosVal, cosPtrArg);
  2823. // Ret.
  2824. Builder.CreateRetVoid();
  2825. } break;
  2826. default:
  2827. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode);
  2828. break;
  2829. }
  2830. }
  2831. else if (group == HLOpcodeGroup::HLExtIntrinsic) {
  2832. llvm::StringRef fnName = F->getName();
  2833. llvm::StringRef groupName = GetHLOpcodeGroupNameByAttr(F);
  2834. opFunc = GetOrCreateHLFunction(M, funcTy, group, &groupName, &fnName, opcode);
  2835. }
  2836. else {
  2837. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode);
  2838. }
  2839. // Add attribute
  2840. if (F->hasFnAttribute(Attribute::ReadNone))
  2841. opFunc->addFnAttr(Attribute::ReadNone);
  2842. if (F->hasFnAttribute(Attribute::ReadOnly))
  2843. opFunc->addFnAttr(Attribute::ReadOnly);
  2844. return opFunc;
  2845. }
  2846. static Value *CreateHandleFromResPtr(
  2847. Value *ResPtr, HLModule &HLM, llvm::Type *HandleTy,
  2848. std::unordered_map<llvm::Type *, MDNode *> &resMetaMap,
  2849. IRBuilder<> &Builder) {
  2850. llvm::Type *objTy = ResPtr->getType()->getPointerElementType();
  2851. DXASSERT(resMetaMap.count(objTy), "cannot find resource type");
  2852. MDNode *MD = resMetaMap[objTy];
  2853. // Load to make sure resource only have Ld/St use so mem2reg could remove
  2854. // temp resource.
  2855. Value *ldObj = Builder.CreateLoad(ResPtr);
  2856. Value *opcode = Builder.getInt32(0);
  2857. Value *args[] = {opcode, ldObj};
  2858. Function *CreateHandle = GetOrCreateHLCreateHandle(HLM, HandleTy, args, MD);
  2859. CallInst *Handle = Builder.CreateCall(CreateHandle, args);
  2860. return Handle;
  2861. }
  2862. static void AddOpcodeParamForIntrinsic(HLModule &HLM, Function *F,
  2863. unsigned opcode, llvm::Type *HandleTy,
  2864. std::unordered_map<llvm::Type *, MDNode*> &resMetaMap) {
  2865. llvm::Module &M = *HLM.GetModule();
  2866. llvm::FunctionType *oldFuncTy = F->getFunctionType();
  2867. SmallVector<llvm::Type *, 4> paramTyList;
  2868. // Add the opcode param
  2869. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  2870. paramTyList.emplace_back(opcodeTy);
  2871. paramTyList.append(oldFuncTy->param_begin(), oldFuncTy->param_end());
  2872. for (unsigned i = 1; i < paramTyList.size(); i++) {
  2873. llvm::Type *Ty = paramTyList[i];
  2874. if (Ty->isPointerTy()) {
  2875. Ty = Ty->getPointerElementType();
  2876. if (HLModule::IsHLSLObjectType(Ty) &&
  2877. // StreamOutput don't need handle.
  2878. !HLModule::IsStreamOutputType(Ty)) {
  2879. // Use handle type for object type.
  2880. // This will make sure temp object variable only used by createHandle.
  2881. paramTyList[i] = HandleTy;
  2882. }
  2883. }
  2884. }
  2885. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  2886. if (group == HLOpcodeGroup::HLSubscript &&
  2887. opcode == static_cast<unsigned>(HLSubscriptOpcode::VectorSubscript)) {
  2888. llvm::FunctionType *FT = F->getFunctionType();
  2889. llvm::Type *VecArgTy = FT->getParamType(0);
  2890. llvm::VectorType *VType =
  2891. cast<llvm::VectorType>(VecArgTy->getPointerElementType());
  2892. llvm::Type *Ty = VType->getElementType();
  2893. DXASSERT(Ty->isIntegerTy(), "Only bool could use VectorSubscript");
  2894. llvm::IntegerType *ITy = cast<IntegerType>(Ty);
  2895. DXASSERT_LOCALVAR(ITy, ITy->getBitWidth() == 1, "Only bool could use VectorSubscript");
  2896. // The return type is i8*.
  2897. // Replace all uses with i1*.
  2898. ReplaceBoolVectorSubscript(F);
  2899. return;
  2900. }
  2901. bool isDoubleSubscriptFunc = group == HLOpcodeGroup::HLSubscript &&
  2902. opcode == static_cast<unsigned>(HLSubscriptOpcode::DoubleSubscript);
  2903. llvm::Type *RetTy = oldFuncTy->getReturnType();
  2904. if (isDoubleSubscriptFunc) {
  2905. CallInst *doubleSub = cast<CallInst>(*F->user_begin());
  2906. // Change currentIdx type into coord type.
  2907. auto U = doubleSub->user_begin();
  2908. Value *user = *U;
  2909. CallInst *secSub = cast<CallInst>(user);
  2910. unsigned coordIdx = HLOperandIndex::kSubscriptIndexOpIdx;
  2911. // opcode operand not add yet, so the index need -1.
  2912. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) == HLOpcodeGroup::NotHL)
  2913. coordIdx -= 1;
  2914. Value *coord = secSub->getArgOperand(coordIdx);
  2915. llvm::Type *coordTy = coord->getType();
  2916. paramTyList[HLOperandIndex::kSubscriptIndexOpIdx] = coordTy;
  2917. // Add the sampleIdx or mipLevel parameter to the end.
  2918. paramTyList.emplace_back(opcodeTy);
  2919. // Change return type to be resource ret type.
  2920. // opcode operand not add yet, so the index need -1.
  2921. Value *objPtr = doubleSub->getArgOperand(HLOperandIndex::kSubscriptObjectOpIdx-1);
  2922. // Must be a GEP
  2923. GEPOperator *objGEP = cast<GEPOperator>(objPtr);
  2924. gep_type_iterator GEPIt = gep_type_begin(objGEP), E = gep_type_end(objGEP);
  2925. llvm::Type *resTy = nullptr;
  2926. while (GEPIt != E) {
  2927. if (HLModule::IsHLSLObjectType(*GEPIt)) {
  2928. resTy = *GEPIt;
  2929. break;
  2930. }
  2931. GEPIt++;
  2932. }
  2933. DXASSERT(resTy, "must find the resource type");
  2934. // Change object type to handle type.
  2935. paramTyList[HLOperandIndex::kSubscriptObjectOpIdx] = HandleTy;
  2936. // Change RetTy into pointer of resource reture type.
  2937. RetTy = cast<StructType>(resTy)->getElementType(0)->getPointerTo();
  2938. llvm::Type *sliceTy = objGEP->getType()->getPointerElementType();
  2939. DXIL::ResourceClass RC = HLM.GetResourceClass(sliceTy);
  2940. DXIL::ResourceKind RK = HLM.GetResourceKind(sliceTy);
  2941. HLM.AddResourceTypeAnnotation(resTy, RC, RK);
  2942. }
  2943. llvm::FunctionType *funcTy =
  2944. llvm::FunctionType::get(RetTy, paramTyList, false);
  2945. Function *opFunc = CreateOpFunction(M, F, funcTy, group, opcode);
  2946. StringRef lower = hlsl::GetHLLowerStrategy(F);
  2947. if (!lower.empty())
  2948. hlsl::SetHLLowerStrategy(opFunc, lower);
  2949. for (auto user = F->user_begin(); user != F->user_end();) {
  2950. // User must be a call.
  2951. CallInst *oldCI = cast<CallInst>(*(user++));
  2952. SmallVector<Value *, 4> opcodeParamList;
  2953. Value *opcodeConst = Constant::getIntegerValue(opcodeTy, APInt(32, opcode));
  2954. opcodeParamList.emplace_back(opcodeConst);
  2955. opcodeParamList.append(oldCI->arg_operands().begin(),
  2956. oldCI->arg_operands().end());
  2957. IRBuilder<> Builder(oldCI);
  2958. if (isDoubleSubscriptFunc) {
  2959. // Change obj to the resource pointer.
  2960. Value *objVal = opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx];
  2961. GEPOperator *objGEP = cast<GEPOperator>(objVal);
  2962. SmallVector<Value *, 8> IndexList;
  2963. IndexList.append(objGEP->idx_begin(), objGEP->idx_end());
  2964. Value *lastIndex = IndexList.back();
  2965. ConstantInt *constIndex = cast<ConstantInt>(lastIndex);
  2966. DXASSERT_LOCALVAR(constIndex, constIndex->getLimitedValue() == 1, "last index must 1");
  2967. // Remove the last index.
  2968. IndexList.pop_back();
  2969. objVal = objGEP->getPointerOperand();
  2970. if (IndexList.size() > 1)
  2971. objVal = Builder.CreateInBoundsGEP(objVal, IndexList);
  2972. Value *Handle =
  2973. CreateHandleFromResPtr(objVal, HLM, HandleTy, resMetaMap, Builder);
  2974. // Change obj to the resource pointer.
  2975. opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  2976. // Set idx and mipIdx.
  2977. Value *mipIdx = opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx];
  2978. auto U = oldCI->user_begin();
  2979. Value *user = *U;
  2980. CallInst *secSub = cast<CallInst>(user);
  2981. unsigned idxOpIndex = HLOperandIndex::kSubscriptIndexOpIdx;
  2982. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) == HLOpcodeGroup::NotHL)
  2983. idxOpIndex--;
  2984. Value *idx = secSub->getArgOperand(idxOpIndex);
  2985. DXASSERT(secSub->hasOneUse(), "subscript should only has one use");
  2986. // Add the sampleIdx or mipLevel parameter to the end.
  2987. opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx] = idx;
  2988. opcodeParamList.emplace_back(mipIdx);
  2989. // Insert new call before secSub to make sure idx is ready to use.
  2990. Builder.SetInsertPoint(secSub);
  2991. }
  2992. for (unsigned i = 1; i < opcodeParamList.size(); i++) {
  2993. Value *arg = opcodeParamList[i];
  2994. llvm::Type *Ty = arg->getType();
  2995. if (Ty->isPointerTy()) {
  2996. Ty = Ty->getPointerElementType();
  2997. if (HLModule::IsHLSLObjectType(Ty) &&
  2998. // StreamOutput don't need handle.
  2999. !HLModule::IsStreamOutputType(Ty)) {
  3000. // Use object type directly, not by pointer.
  3001. // This will make sure temp object variable only used by ld/st.
  3002. if (GEPOperator *argGEP = dyn_cast<GEPOperator>(arg)) {
  3003. std::vector<Value*> idxList(argGEP->idx_begin(), argGEP->idx_end());
  3004. // Create instruction to avoid GEPOperator.
  3005. GetElementPtrInst *GEP = GetElementPtrInst::CreateInBounds(argGEP->getPointerOperand(),
  3006. idxList);
  3007. Builder.Insert(GEP);
  3008. arg = GEP;
  3009. }
  3010. Value *Handle = CreateHandleFromResPtr(arg, HLM, HandleTy,
  3011. resMetaMap, Builder);
  3012. opcodeParamList[i] = Handle;
  3013. }
  3014. }
  3015. }
  3016. Value *CI = Builder.CreateCall(opFunc, opcodeParamList);
  3017. if (!isDoubleSubscriptFunc) {
  3018. // replace new call and delete the old call
  3019. oldCI->replaceAllUsesWith(CI);
  3020. oldCI->eraseFromParent();
  3021. } else {
  3022. // For double script.
  3023. // Replace single users use with new CI.
  3024. auto U = oldCI->user_begin();
  3025. Value *user = *U;
  3026. CallInst *secSub = cast<CallInst>(user);
  3027. secSub->replaceAllUsesWith(CI);
  3028. secSub->eraseFromParent();
  3029. oldCI->eraseFromParent();
  3030. }
  3031. }
  3032. // delete the function
  3033. F->eraseFromParent();
  3034. }
  3035. static void AddOpcodeParamForIntrinsics(HLModule &HLM
  3036. , std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  3037. std::unordered_map<llvm::Type *, MDNode*> &resMetaMap) {
  3038. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  3039. for (auto mapIter : intrinsicMap) {
  3040. Function *F = mapIter.first;
  3041. if (F->user_empty()) {
  3042. // delete the function
  3043. F->eraseFromParent();
  3044. continue;
  3045. }
  3046. unsigned opcode = mapIter.second;
  3047. AddOpcodeParamForIntrinsic(HLM, F, opcode, HandleTy, resMetaMap);
  3048. }
  3049. }
  3050. static Value *CastLdValue(Value *Ptr, llvm::Type *FromTy, llvm::Type *ToTy, IRBuilder<> &Builder) {
  3051. if (ToTy->isVectorTy()) {
  3052. unsigned vecSize = ToTy->getVectorNumElements();
  3053. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  3054. Value *V = Builder.CreateLoad(Ptr);
  3055. // ScalarToVec1Splat
  3056. // Change scalar into vec1.
  3057. Value *Vec1 = UndefValue::get(ToTy);
  3058. return Builder.CreateInsertElement(Vec1, V, (uint64_t)0);
  3059. } else if (FromTy->isVectorTy() && vecSize == 1) {
  3060. Value *V = Builder.CreateLoad(Ptr);
  3061. // VectorTrunc
  3062. // Change vector into vec1.
  3063. int mask[] = {0};
  3064. return Builder.CreateShuffleVector(V, V, mask);
  3065. } else if (FromTy->isArrayTy()) {
  3066. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  3067. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  3068. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  3069. // ArrayToVector.
  3070. Value *NewLd = UndefValue::get(ToTy);
  3071. Value *zeroIdx = Builder.getInt32(0);
  3072. for (unsigned i = 0; i < vecSize; i++) {
  3073. Value *GEP = Builder.CreateInBoundsGEP(
  3074. Ptr, {zeroIdx, Builder.getInt32(i)});
  3075. Value *Elt = Builder.CreateLoad(GEP);
  3076. NewLd = Builder.CreateInsertElement(NewLd, Elt, i);
  3077. }
  3078. return NewLd;
  3079. }
  3080. }
  3081. } else if (FromTy == Builder.getInt1Ty()) {
  3082. Value *V = Builder.CreateLoad(Ptr);
  3083. // BoolCast
  3084. DXASSERT_NOMSG(ToTy->isIntegerTy());
  3085. return Builder.CreateZExt(V, ToTy);
  3086. }
  3087. return nullptr;
  3088. }
  3089. static Value *CastStValue(Value *Ptr, Value *V, llvm::Type *FromTy, llvm::Type *ToTy, IRBuilder<> &Builder) {
  3090. if (ToTy->isVectorTy()) {
  3091. unsigned vecSize = ToTy->getVectorNumElements();
  3092. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  3093. // ScalarToVec1Splat
  3094. // Change vec1 back to scalar.
  3095. Value *Elt = Builder.CreateExtractElement(V, (uint64_t)0);
  3096. return Elt;
  3097. } else if (FromTy->isVectorTy() && vecSize == 1) {
  3098. // VectorTrunc
  3099. // Change vec1 into vector.
  3100. // Should not happen.
  3101. // Reported error at Sema::ImpCastExprToType.
  3102. DXASSERT_NOMSG(0);
  3103. } else if (FromTy->isArrayTy()) {
  3104. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  3105. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  3106. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  3107. // ArrayToVector.
  3108. Value *zeroIdx = Builder.getInt32(0);
  3109. for (unsigned i = 0; i < vecSize; i++) {
  3110. Value *Elt = Builder.CreateExtractElement(V, i);
  3111. Value *GEP = Builder.CreateInBoundsGEP(
  3112. Ptr, {zeroIdx, Builder.getInt32(i)});
  3113. Builder.CreateStore(Elt, GEP);
  3114. }
  3115. // The store already done.
  3116. // Return null to ignore use of the return value.
  3117. return nullptr;
  3118. }
  3119. }
  3120. } else if (FromTy == Builder.getInt1Ty()) {
  3121. // BoolCast
  3122. // Change i1 to ToTy.
  3123. DXASSERT_NOMSG(ToTy->isIntegerTy());
  3124. Value *CastV = Builder.CreateICmpNE(V, ConstantInt::get(V->getType(), 0));
  3125. return CastV;
  3126. }
  3127. return nullptr;
  3128. }
  3129. static bool SimplifyBitCastLoad(LoadInst *LI, llvm::Type *FromTy, llvm::Type *ToTy, Value *Ptr) {
  3130. IRBuilder<> Builder(LI);
  3131. // Cast FromLd to ToTy.
  3132. Value *CastV = CastLdValue(Ptr, FromTy, ToTy, Builder);
  3133. if (CastV) {
  3134. LI->replaceAllUsesWith(CastV);
  3135. return true;
  3136. } else {
  3137. return false;
  3138. }
  3139. }
  3140. static bool SimplifyBitCastStore(StoreInst *SI, llvm::Type *FromTy, llvm::Type *ToTy, Value *Ptr) {
  3141. IRBuilder<> Builder(SI);
  3142. Value *V = SI->getValueOperand();
  3143. // Cast Val to FromTy.
  3144. Value *CastV = CastStValue(Ptr, V, FromTy, ToTy, Builder);
  3145. if (CastV) {
  3146. Builder.CreateStore(CastV, Ptr);
  3147. return true;
  3148. } else {
  3149. return false;
  3150. }
  3151. }
  3152. static bool SimplifyBitCastGEP(GEPOperator *GEP, llvm::Type *FromTy, llvm::Type *ToTy, Value *Ptr) {
  3153. if (ToTy->isVectorTy()) {
  3154. unsigned vecSize = ToTy->getVectorNumElements();
  3155. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  3156. // ScalarToVec1Splat
  3157. GEP->replaceAllUsesWith(Ptr);
  3158. return true;
  3159. } else if (FromTy->isVectorTy() && vecSize == 1) {
  3160. // VectorTrunc
  3161. DXASSERT_NOMSG(
  3162. !isa<llvm::VectorType>(GEP->getType()->getPointerElementType()));
  3163. IRBuilder<> Builder(FromTy->getContext());
  3164. if (Instruction *I = dyn_cast<Instruction>(GEP))
  3165. Builder.SetInsertPoint(I);
  3166. std::vector<Value *> idxList(GEP->idx_begin(), GEP->idx_end());
  3167. Value *NewGEP = Builder.CreateInBoundsGEP(Ptr, idxList);
  3168. GEP->replaceAllUsesWith(NewGEP);
  3169. return true;
  3170. } else if (FromTy->isArrayTy()) {
  3171. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  3172. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  3173. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  3174. // ArrayToVector.
  3175. }
  3176. }
  3177. } else if (FromTy == llvm::Type::getInt1Ty(FromTy->getContext())) {
  3178. // BoolCast
  3179. }
  3180. return false;
  3181. }
  3182. typedef SmallPtrSet<Instruction *, 4> SmallInstSet;
  3183. static void SimplifyBitCast(BitCastOperator *BC, SmallInstSet &deadInsts) {
  3184. Value *Ptr = BC->getOperand(0);
  3185. llvm::Type *FromTy = Ptr->getType();
  3186. llvm::Type *ToTy = BC->getType();
  3187. if (!FromTy->isPointerTy() || !ToTy->isPointerTy())
  3188. return;
  3189. FromTy = FromTy->getPointerElementType();
  3190. ToTy = ToTy->getPointerElementType();
  3191. // Take care case like %2 = bitcast %struct.T* %1 to <1 x float>*.
  3192. if (FromTy->isStructTy()) {
  3193. IRBuilder<> Builder(FromTy->getContext());
  3194. if (Instruction *I = dyn_cast<Instruction>(BC))
  3195. Builder.SetInsertPoint(I);
  3196. Value *zeroIdx = Builder.getInt32(0);
  3197. unsigned nestLevel = 1;
  3198. while (llvm::StructType *ST = dyn_cast<llvm::StructType>(FromTy)) {
  3199. FromTy = ST->getElementType(0);
  3200. nestLevel++;
  3201. }
  3202. std::vector<Value *> idxList(nestLevel, zeroIdx);
  3203. Ptr = Builder.CreateGEP(Ptr, idxList);
  3204. }
  3205. for (User *U : BC->users()) {
  3206. if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
  3207. if (SimplifyBitCastLoad(LI, FromTy, ToTy, Ptr)) {
  3208. LI->dropAllReferences();
  3209. deadInsts.insert(LI);
  3210. }
  3211. } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
  3212. if (SimplifyBitCastStore(SI, FromTy, ToTy, Ptr)) {
  3213. SI->dropAllReferences();
  3214. deadInsts.insert(SI);
  3215. }
  3216. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  3217. if (SimplifyBitCastGEP(GEP, FromTy, ToTy, Ptr))
  3218. if (Instruction *I = dyn_cast<Instruction>(GEP)) {
  3219. I->dropAllReferences();
  3220. deadInsts.insert(I);
  3221. }
  3222. } else if (dyn_cast<CallInst>(U)) {
  3223. // Skip function call.
  3224. } else if (dyn_cast<BitCastInst>(U)) {
  3225. // Skip bitcast.
  3226. } else {
  3227. DXASSERT(0, "not support yet");
  3228. }
  3229. }
  3230. }
  3231. typedef float(__cdecl *FloatUnaryEvalFuncType)(float);
  3232. typedef double(__cdecl *DoubleUnaryEvalFuncType)(double);
  3233. typedef float(__cdecl *FloatBinaryEvalFuncType)(float, float);
  3234. typedef double(__cdecl *DoubleBinaryEvalFuncType)(double, double);
  3235. static Value * EvalUnaryIntrinsic(ConstantFP *fpV,
  3236. FloatUnaryEvalFuncType floatEvalFunc,
  3237. DoubleUnaryEvalFuncType doubleEvalFunc) {
  3238. llvm::Type *Ty = fpV->getType();
  3239. Value *Result = nullptr;
  3240. if (Ty->isDoubleTy()) {
  3241. double dV = fpV->getValueAPF().convertToDouble();
  3242. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV));
  3243. Result = dResult;
  3244. } else {
  3245. DXASSERT_NOMSG(Ty->isFloatTy());
  3246. float fV = fpV->getValueAPF().convertToFloat();
  3247. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV));
  3248. Result = dResult;
  3249. }
  3250. return Result;
  3251. }
  3252. static Value * EvalBinaryIntrinsic(ConstantFP *fpV0, ConstantFP *fpV1,
  3253. FloatBinaryEvalFuncType floatEvalFunc,
  3254. DoubleBinaryEvalFuncType doubleEvalFunc) {
  3255. llvm::Type *Ty = fpV0->getType();
  3256. Value *Result = nullptr;
  3257. if (Ty->isDoubleTy()) {
  3258. double dV0 = fpV0->getValueAPF().convertToDouble();
  3259. double dV1 = fpV1->getValueAPF().convertToDouble();
  3260. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV0, dV1));
  3261. Result = dResult;
  3262. } else {
  3263. DXASSERT_NOMSG(Ty->isFloatTy());
  3264. float fV0 = fpV0->getValueAPF().convertToFloat();
  3265. float fV1 = fpV1->getValueAPF().convertToFloat();
  3266. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV0, fV1));
  3267. Result = dResult;
  3268. }
  3269. return Result;
  3270. }
  3271. static Value * EvalUnaryIntrinsic(CallInst *CI,
  3272. FloatUnaryEvalFuncType floatEvalFunc,
  3273. DoubleUnaryEvalFuncType doubleEvalFunc) {
  3274. Value *V = CI->getArgOperand(0);
  3275. llvm::Type *Ty = CI->getType();
  3276. Value *Result = nullptr;
  3277. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  3278. Result = UndefValue::get(Ty);
  3279. Constant *CV = cast<Constant>(V);
  3280. IRBuilder<> Builder(CI);
  3281. for (unsigned i=0;i<VT->getNumElements();i++) {
  3282. ConstantFP *fpV = cast<ConstantFP>(CV->getAggregateElement(i));
  3283. Value *EltResult = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  3284. Result = Builder.CreateInsertElement(Result, EltResult, i);
  3285. }
  3286. } else {
  3287. ConstantFP *fpV = cast<ConstantFP>(V);
  3288. Result = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  3289. }
  3290. CI->replaceAllUsesWith(Result);
  3291. CI->eraseFromParent();
  3292. return Result;
  3293. }
  3294. static Value * EvalBinaryIntrinsic(CallInst *CI,
  3295. FloatBinaryEvalFuncType floatEvalFunc,
  3296. DoubleBinaryEvalFuncType doubleEvalFunc) {
  3297. Value *V0 = CI->getArgOperand(0);
  3298. Value *V1 = CI->getArgOperand(1);
  3299. llvm::Type *Ty = CI->getType();
  3300. Value *Result = nullptr;
  3301. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  3302. Result = UndefValue::get(Ty);
  3303. Constant *CV0 = cast<Constant>(V0);
  3304. Constant *CV1 = cast<Constant>(V1);
  3305. IRBuilder<> Builder(CI);
  3306. for (unsigned i=0;i<VT->getNumElements();i++) {
  3307. ConstantFP *fpV0 = cast<ConstantFP>(CV0->getAggregateElement(i));
  3308. ConstantFP *fpV1 = cast<ConstantFP>(CV1->getAggregateElement(i));
  3309. Value *EltResult = EvalBinaryIntrinsic(fpV0, fpV1, floatEvalFunc, doubleEvalFunc);
  3310. Result = Builder.CreateInsertElement(Result, EltResult, i);
  3311. }
  3312. } else {
  3313. ConstantFP *fpV0 = cast<ConstantFP>(V0);
  3314. ConstantFP *fpV1 = cast<ConstantFP>(V1);
  3315. Result = EvalBinaryIntrinsic(fpV0, fpV1, floatEvalFunc, doubleEvalFunc);
  3316. }
  3317. CI->replaceAllUsesWith(Result);
  3318. CI->eraseFromParent();
  3319. return Result;
  3320. CI->eraseFromParent();
  3321. return Result;
  3322. }
  3323. static Value * TryEvalIntrinsic(CallInst *CI, IntrinsicOp intriOp) {
  3324. switch (intriOp) {
  3325. case IntrinsicOp::IOP_tan: {
  3326. return EvalUnaryIntrinsic(CI, tanf, tan);
  3327. } break;
  3328. case IntrinsicOp::IOP_tanh: {
  3329. return EvalUnaryIntrinsic(CI, tanhf, tanh);
  3330. } break;
  3331. case IntrinsicOp::IOP_sin: {
  3332. return EvalUnaryIntrinsic(CI, sinf, sin);
  3333. } break;
  3334. case IntrinsicOp::IOP_sinh: {
  3335. return EvalUnaryIntrinsic(CI, sinhf, sinh);
  3336. } break;
  3337. case IntrinsicOp::IOP_cos: {
  3338. return EvalUnaryIntrinsic(CI, cosf, cos);
  3339. } break;
  3340. case IntrinsicOp::IOP_cosh: {
  3341. return EvalUnaryIntrinsic(CI, coshf, cosh);
  3342. } break;
  3343. case IntrinsicOp::IOP_asin: {
  3344. return EvalUnaryIntrinsic(CI, asinf, asin);
  3345. } break;
  3346. case IntrinsicOp::IOP_acos: {
  3347. return EvalUnaryIntrinsic(CI, acosf, acos);
  3348. } break;
  3349. case IntrinsicOp::IOP_atan: {
  3350. return EvalUnaryIntrinsic(CI, atanf, atan);
  3351. } break;
  3352. case IntrinsicOp::IOP_atan2: {
  3353. Value *V0 = CI->getArgOperand(0);
  3354. ConstantFP *fpV0 = cast<ConstantFP>(V0);
  3355. Value *V1 = CI->getArgOperand(1);
  3356. ConstantFP *fpV1 = cast<ConstantFP>(V1);
  3357. llvm::Type *Ty = CI->getType();
  3358. Value *Result = nullptr;
  3359. if (Ty->isDoubleTy()) {
  3360. double dV0 = fpV0->getValueAPF().convertToDouble();
  3361. double dV1 = fpV1->getValueAPF().convertToDouble();
  3362. Value *atanV = ConstantFP::get(CI->getType(), atan(dV0 / dV1));
  3363. CI->replaceAllUsesWith(atanV);
  3364. Result = atanV;
  3365. } else {
  3366. DXASSERT_NOMSG(Ty->isFloatTy());
  3367. float fV0 = fpV0->getValueAPF().convertToFloat();
  3368. float fV1 = fpV1->getValueAPF().convertToFloat();
  3369. Value *atanV = ConstantFP::get(CI->getType(), atanf(fV0 / fV1));
  3370. CI->replaceAllUsesWith(atanV);
  3371. Result = atanV;
  3372. }
  3373. CI->eraseFromParent();
  3374. return Result;
  3375. } break;
  3376. case IntrinsicOp::IOP_sqrt: {
  3377. return EvalUnaryIntrinsic(CI, sqrtf, sqrt);
  3378. } break;
  3379. case IntrinsicOp::IOP_rsqrt: {
  3380. auto rsqrtF = [](float v) -> float { return 1.0 / sqrtf(v); };
  3381. auto rsqrtD = [](double v) -> double { return 1.0 / sqrt(v); };
  3382. return EvalUnaryIntrinsic(CI, rsqrtF, rsqrtD);
  3383. } break;
  3384. case IntrinsicOp::IOP_exp: {
  3385. return EvalUnaryIntrinsic(CI, expf, exp);
  3386. } break;
  3387. case IntrinsicOp::IOP_exp2: {
  3388. return EvalUnaryIntrinsic(CI, exp2f, exp2);
  3389. } break;
  3390. case IntrinsicOp::IOP_log: {
  3391. return EvalUnaryIntrinsic(CI, logf, log);
  3392. } break;
  3393. case IntrinsicOp::IOP_log10: {
  3394. return EvalUnaryIntrinsic(CI, log10f, log10);
  3395. } break;
  3396. case IntrinsicOp::IOP_log2: {
  3397. return EvalUnaryIntrinsic(CI, log2f, log2);
  3398. } break;
  3399. case IntrinsicOp::IOP_pow: {
  3400. return EvalBinaryIntrinsic(CI, powf, pow);
  3401. } break;
  3402. case IntrinsicOp::IOP_max: {
  3403. auto maxF = [](float a, float b) -> float { return a > b ? a:b; };
  3404. auto maxD = [](double a, double b) -> double { return a > b ? a:b; };
  3405. return EvalBinaryIntrinsic(CI, maxF, maxD);
  3406. } break;
  3407. case IntrinsicOp::IOP_min: {
  3408. auto minF = [](float a, float b) -> float { return a < b ? a:b; };
  3409. auto minD = [](double a, double b) -> double { return a < b ? a:b; };
  3410. return EvalBinaryIntrinsic(CI, minF, minD);
  3411. } break;
  3412. case IntrinsicOp::IOP_rcp: {
  3413. auto rcpF = [](float v) -> float { return 1.0 / v; };
  3414. auto rcpD = [](double v) -> double { return 1.0 / v; };
  3415. return EvalUnaryIntrinsic(CI, rcpF, rcpD);
  3416. } break;
  3417. case IntrinsicOp::IOP_ceil: {
  3418. return EvalUnaryIntrinsic(CI, ceilf, ceil);
  3419. } break;
  3420. case IntrinsicOp::IOP_floor: {
  3421. return EvalUnaryIntrinsic(CI, floorf, floor);
  3422. } break;
  3423. case IntrinsicOp::IOP_round: {
  3424. return EvalUnaryIntrinsic(CI, roundf, round);
  3425. } break;
  3426. case IntrinsicOp::IOP_trunc: {
  3427. return EvalUnaryIntrinsic(CI, truncf, trunc);
  3428. } break;
  3429. case IntrinsicOp::IOP_frac: {
  3430. auto fracF = [](float v) -> float {
  3431. int exp = 0;
  3432. return frexpf(v, &exp);
  3433. };
  3434. auto fracD = [](double v) -> double {
  3435. int exp = 0;
  3436. return frexp(v, &exp);
  3437. };
  3438. return EvalUnaryIntrinsic(CI, fracF, fracD);
  3439. } break;
  3440. case IntrinsicOp::IOP_isnan: {
  3441. Value *V = CI->getArgOperand(0);
  3442. ConstantFP *fV = cast<ConstantFP>(V);
  3443. bool isNan = fV->getValueAPF().isNaN();
  3444. Constant *cNan = ConstantInt::get(CI->getType(), isNan ? 1 : 0);
  3445. CI->replaceAllUsesWith(cNan);
  3446. CI->eraseFromParent();
  3447. return cNan;
  3448. } break;
  3449. default:
  3450. return nullptr;
  3451. }
  3452. }
  3453. static void SimpleTransformForHLDXIR(Instruction *I,
  3454. SmallInstSet &deadInsts) {
  3455. unsigned opcode = I->getOpcode();
  3456. switch (opcode) {
  3457. case Instruction::BitCast: {
  3458. BitCastOperator *BCI = cast<BitCastOperator>(I);
  3459. SimplifyBitCast(BCI, deadInsts);
  3460. } break;
  3461. case Instruction::Load: {
  3462. LoadInst *ldInst = cast<LoadInst>(I);
  3463. DXASSERT(!HLMatrixLower::IsMatrixType(ldInst->getType()),
  3464. "matrix load should use HL LdStMatrix");
  3465. Value *Ptr = ldInst->getPointerOperand();
  3466. if (ConstantExpr *CE = dyn_cast_or_null<ConstantExpr>(Ptr)) {
  3467. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  3468. SimplifyBitCast(BCO, deadInsts);
  3469. }
  3470. }
  3471. } break;
  3472. case Instruction::Store: {
  3473. StoreInst *stInst = cast<StoreInst>(I);
  3474. Value *V = stInst->getValueOperand();
  3475. DXASSERT_LOCALVAR(V, !HLMatrixLower::IsMatrixType(V->getType()),
  3476. "matrix store should use HL LdStMatrix");
  3477. Value *Ptr = stInst->getPointerOperand();
  3478. if (ConstantExpr *CE = dyn_cast<ConstantExpr>(Ptr)) {
  3479. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  3480. SimplifyBitCast(BCO, deadInsts);
  3481. }
  3482. }
  3483. } break;
  3484. case Instruction::LShr:
  3485. case Instruction::AShr:
  3486. case Instruction::Shl: {
  3487. llvm::BinaryOperator *BO = cast<llvm::BinaryOperator>(I);
  3488. Value *op2 = BO->getOperand(1);
  3489. IntegerType *Ty = cast<IntegerType>(BO->getType()->getScalarType());
  3490. unsigned bitWidth = Ty->getBitWidth();
  3491. // Clamp op2 to 0 ~ bitWidth-1
  3492. if (ConstantInt *cOp2 = dyn_cast<ConstantInt>(op2)) {
  3493. unsigned iOp2 = cOp2->getLimitedValue();
  3494. unsigned clampedOp2 = iOp2 & (bitWidth - 1);
  3495. if (iOp2 != clampedOp2) {
  3496. BO->setOperand(1, ConstantInt::get(op2->getType(), clampedOp2));
  3497. }
  3498. } else {
  3499. Value *mask = ConstantInt::get(op2->getType(), bitWidth - 1);
  3500. IRBuilder<> Builder(I);
  3501. op2 = Builder.CreateAnd(op2, mask);
  3502. BO->setOperand(1, op2);
  3503. }
  3504. } break;
  3505. }
  3506. }
  3507. // Do simple transform to make later lower pass easier.
  3508. static void SimpleTransformForHLDXIR(llvm::Module *pM) {
  3509. SmallInstSet deadInsts;
  3510. for (Function &F : pM->functions()) {
  3511. for (BasicBlock &BB : F.getBasicBlockList()) {
  3512. for (BasicBlock::iterator Iter = BB.begin(); Iter != BB.end(); ) {
  3513. Instruction *I = (Iter++);
  3514. if (deadInsts.count(I))
  3515. continue; // Skip dead instructions
  3516. SimpleTransformForHLDXIR(I, deadInsts);
  3517. }
  3518. }
  3519. }
  3520. for (Instruction * I : deadInsts)
  3521. I->dropAllReferences();
  3522. for (Instruction * I : deadInsts)
  3523. I->eraseFromParent();
  3524. deadInsts.clear();
  3525. for (GlobalVariable &GV : pM->globals()) {
  3526. if (dxilutil::IsStaticGlobal(&GV)) {
  3527. for (User *U : GV.users()) {
  3528. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(U)) {
  3529. SimplifyBitCast(BCO, deadInsts);
  3530. }
  3531. }
  3532. }
  3533. }
  3534. for (Instruction * I : deadInsts)
  3535. I->dropAllReferences();
  3536. for (Instruction * I : deadInsts)
  3537. I->eraseFromParent();
  3538. }
  3539. // Clone shader entry function to be called by other functions.
  3540. // The original function will be used as shader entry.
  3541. static void CloneShaderEntry(Function *ShaderF, StringRef EntryName,
  3542. HLModule &HLM) {
  3543. // Use mangled name for cloned one.
  3544. Function *F = Function::Create(ShaderF->getFunctionType(),
  3545. GlobalValue::LinkageTypes::ExternalLinkage,
  3546. "", HLM.GetModule());
  3547. F->takeName(ShaderF);
  3548. // Set to name before mangled.
  3549. ShaderF->setName(EntryName);
  3550. SmallVector<ReturnInst *, 2> Returns;
  3551. ValueToValueMapTy vmap;
  3552. // Map params.
  3553. auto entryParamIt = F->arg_begin();
  3554. for (Argument &param : ShaderF->args()) {
  3555. vmap[&param] = (entryParamIt++);
  3556. }
  3557. llvm::CloneFunctionInto(F, ShaderF, vmap, /*ModuleLevelChagnes*/ false,
  3558. Returns);
  3559. // Copy function annotation.
  3560. DxilFunctionAnnotation *shaderAnnot = HLM.GetFunctionAnnotation(ShaderF);
  3561. DxilFunctionAnnotation *annot = HLM.AddFunctionAnnotation(F);
  3562. DxilParameterAnnotation &retAnnot = shaderAnnot->GetRetTypeAnnotation();
  3563. DxilParameterAnnotation &cloneRetAnnot = annot->GetRetTypeAnnotation();
  3564. cloneRetAnnot = retAnnot;
  3565. // Clear semantic for cloned one.
  3566. cloneRetAnnot.SetSemanticString("");
  3567. cloneRetAnnot.SetSemanticIndexVec({});
  3568. for (unsigned i = 0; i < shaderAnnot->GetNumParameters(); i++) {
  3569. DxilParameterAnnotation &cloneParamAnnot = annot->GetParameterAnnotation(i);
  3570. DxilParameterAnnotation &paramAnnot =
  3571. shaderAnnot->GetParameterAnnotation(i);
  3572. cloneParamAnnot = paramAnnot;
  3573. // Clear semantic for cloned one.
  3574. cloneParamAnnot.SetSemanticString("");
  3575. cloneParamAnnot.SetSemanticIndexVec({});
  3576. }
  3577. }
  3578. // For case like:
  3579. //cbuffer A {
  3580. // float a;
  3581. // int b;
  3582. //}
  3583. //
  3584. //const static struct {
  3585. // float a;
  3586. // int b;
  3587. //} ST = { a, b };
  3588. // Replace user of ST with a and b.
  3589. static bool ReplaceConstStaticGlobalUser(GEPOperator *GEP,
  3590. std::vector<Constant *> &InitList,
  3591. IRBuilder<> &Builder) {
  3592. if (GEP->getNumIndices() < 2) {
  3593. // Don't use sub element.
  3594. return false;
  3595. }
  3596. SmallVector<Value *, 4> idxList;
  3597. auto iter = GEP->idx_begin();
  3598. idxList.emplace_back(*(iter++));
  3599. ConstantInt *subIdx = dyn_cast<ConstantInt>(*(iter++));
  3600. DXASSERT(subIdx, "else dynamic indexing on struct field");
  3601. unsigned subIdxImm = subIdx->getLimitedValue();
  3602. DXASSERT(subIdxImm < InitList.size(), "else struct index out of bound");
  3603. Constant *subPtr = InitList[subIdxImm];
  3604. // Move every idx to idxList except idx for InitList.
  3605. while (iter != GEP->idx_end()) {
  3606. idxList.emplace_back(*(iter++));
  3607. }
  3608. Value *NewGEP = Builder.CreateGEP(subPtr, idxList);
  3609. GEP->replaceAllUsesWith(NewGEP);
  3610. return true;
  3611. }
  3612. static void ReplaceConstStaticGlobals(
  3613. std::unordered_map<GlobalVariable *, std::vector<Constant *>>
  3614. &staticConstGlobalInitListMap,
  3615. std::unordered_map<GlobalVariable *, Function *>
  3616. &staticConstGlobalCtorMap) {
  3617. for (auto &iter : staticConstGlobalInitListMap) {
  3618. GlobalVariable *GV = iter.first;
  3619. std::vector<Constant *> &InitList = iter.second;
  3620. LLVMContext &Ctx = GV->getContext();
  3621. // Do the replace.
  3622. bool bPass = true;
  3623. for (User *U : GV->users()) {
  3624. IRBuilder<> Builder(Ctx);
  3625. if (GetElementPtrInst *GEPInst = dyn_cast<GetElementPtrInst>(U)) {
  3626. Builder.SetInsertPoint(GEPInst);
  3627. bPass &= ReplaceConstStaticGlobalUser(cast<GEPOperator>(GEPInst), InitList, Builder);
  3628. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  3629. bPass &= ReplaceConstStaticGlobalUser(GEP, InitList, Builder);
  3630. } else {
  3631. DXASSERT(false, "invalid user of const static global");
  3632. }
  3633. }
  3634. // Clear the Ctor which is useless now.
  3635. if (bPass) {
  3636. Function *Ctor = staticConstGlobalCtorMap[GV];
  3637. Ctor->getBasicBlockList().clear();
  3638. BasicBlock *Entry = BasicBlock::Create(Ctx, "", Ctor);
  3639. IRBuilder<> Builder(Entry);
  3640. Builder.CreateRetVoid();
  3641. }
  3642. }
  3643. }
  3644. bool BuildImmInit(Function *Ctor) {
  3645. GlobalVariable *GV = nullptr;
  3646. SmallVector<Constant *, 4> ImmList;
  3647. bool allConst = true;
  3648. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  3649. if (StoreInst *SI = dyn_cast<StoreInst>(&(*I))) {
  3650. Value *V = SI->getValueOperand();
  3651. if (!isa<Constant>(V) || V->getType()->isPointerTy()) {
  3652. allConst = false;
  3653. break;
  3654. }
  3655. ImmList.emplace_back(cast<Constant>(V));
  3656. Value *Ptr = SI->getPointerOperand();
  3657. if (GEPOperator *GepOp = dyn_cast<GEPOperator>(Ptr)) {
  3658. Ptr = GepOp->getPointerOperand();
  3659. if (GlobalVariable *pGV = dyn_cast<GlobalVariable>(Ptr)) {
  3660. if (GV == nullptr)
  3661. GV = pGV;
  3662. else {
  3663. DXASSERT(GV == pGV, "else pointer mismatch");
  3664. }
  3665. }
  3666. }
  3667. } else {
  3668. if (!isa<ReturnInst>(*I)) {
  3669. allConst = false;
  3670. break;
  3671. }
  3672. }
  3673. }
  3674. if (!allConst)
  3675. return false;
  3676. if (!GV)
  3677. return false;
  3678. llvm::Type *Ty = GV->getType()->getElementType();
  3679. llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty);
  3680. // TODO: support other types.
  3681. if (!AT)
  3682. return false;
  3683. if (ImmList.size() != AT->getNumElements())
  3684. return false;
  3685. Constant *Init = llvm::ConstantArray::get(AT, ImmList);
  3686. GV->setInitializer(Init);
  3687. return true;
  3688. }
  3689. void ProcessCtorFunctions(llvm::Module &M, StringRef globalName,
  3690. Instruction *InsertPt) {
  3691. // add global call to entry func
  3692. GlobalVariable *GV = M.getGlobalVariable(globalName);
  3693. if (GV) {
  3694. if (ConstantArray *CA = dyn_cast<ConstantArray>(GV->getInitializer())) {
  3695. IRBuilder<> Builder(InsertPt);
  3696. for (User::op_iterator i = CA->op_begin(), e = CA->op_end(); i != e;
  3697. ++i) {
  3698. if (isa<ConstantAggregateZero>(*i))
  3699. continue;
  3700. ConstantStruct *CS = cast<ConstantStruct>(*i);
  3701. if (isa<ConstantPointerNull>(CS->getOperand(1)))
  3702. continue;
  3703. // Must have a function or null ptr.
  3704. if (!isa<Function>(CS->getOperand(1)))
  3705. continue;
  3706. Function *Ctor = cast<Function>(CS->getOperand(1));
  3707. DXASSERT(Ctor->getReturnType()->isVoidTy() && Ctor->arg_size() == 0,
  3708. "function type must be void (void)");
  3709. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E;
  3710. ++I) {
  3711. if (CallInst *CI = dyn_cast<CallInst>(&(*I))) {
  3712. Function *F = CI->getCalledFunction();
  3713. // Try to build imm initilizer.
  3714. // If not work, add global call to entry func.
  3715. if (BuildImmInit(F) == false) {
  3716. Builder.CreateCall(F);
  3717. }
  3718. } else {
  3719. DXASSERT(isa<ReturnInst>(&(*I)),
  3720. "else invalid Global constructor function");
  3721. }
  3722. }
  3723. }
  3724. // remove the GV
  3725. GV->eraseFromParent();
  3726. }
  3727. }
  3728. }
  3729. void CGMSHLSLRuntime::SetPatchConstantFunction(const EntryFunctionInfo &EntryFunc) {
  3730. auto AttrsIter = HSEntryPatchConstantFuncAttr.find(EntryFunc.Func);
  3731. DXASSERT(AttrsIter != HSEntryPatchConstantFuncAttr.end(),
  3732. "we have checked this in AddHLSLFunctionInfo()");
  3733. SetPatchConstantFunctionWithAttr(Entry, AttrsIter->second);
  3734. }
  3735. void CGMSHLSLRuntime::SetPatchConstantFunctionWithAttr(
  3736. const EntryFunctionInfo &EntryFunc,
  3737. const clang::HLSLPatchConstantFuncAttr *PatchConstantFuncAttr) {
  3738. StringRef funcName = PatchConstantFuncAttr->getFunctionName();
  3739. auto Entry = patchConstantFunctionMap.find(funcName);
  3740. if (Entry == patchConstantFunctionMap.end()) {
  3741. DiagnosticsEngine &Diags = CGM.getDiags();
  3742. unsigned DiagID =
  3743. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  3744. "Cannot find patchconstantfunc %0.");
  3745. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID)
  3746. << funcName;
  3747. return;
  3748. }
  3749. if (Entry->second.NumOverloads != 1) {
  3750. DiagnosticsEngine &Diags = CGM.getDiags();
  3751. unsigned DiagID =
  3752. Diags.getCustomDiagID(DiagnosticsEngine::Warning,
  3753. "Multiple overloads of patchconstantfunc %0.");
  3754. unsigned NoteID =
  3755. Diags.getCustomDiagID(DiagnosticsEngine::Note,
  3756. "This overload was selected.");
  3757. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID)
  3758. << funcName;
  3759. Diags.Report(Entry->second.SL, NoteID);
  3760. }
  3761. Function *patchConstFunc = Entry->second.Func;
  3762. DxilFunctionProps *HSProps = &m_pHLModule->GetDxilFunctionProps(EntryFunc.Func);
  3763. DXASSERT(HSProps != nullptr,
  3764. " else AddHLSLFunctionInfo did not save the dxil function props for the "
  3765. "HS entry.");
  3766. HSProps->ShaderProps.HS.patchConstantFunc = patchConstFunc;
  3767. DXASSERT_NOMSG(patchConstantFunctionPropsMap.count(patchConstFunc));
  3768. // Check no inout parameter for patch constant function.
  3769. DxilFunctionAnnotation *patchConstFuncAnnotation =
  3770. m_pHLModule->GetFunctionAnnotation(patchConstFunc);
  3771. for (unsigned i = 0; i < patchConstFuncAnnotation->GetNumParameters(); i++) {
  3772. if (patchConstFuncAnnotation->GetParameterAnnotation(i)
  3773. .GetParamInputQual() == DxilParamInputQual::Inout) {
  3774. DiagnosticsEngine &Diags = CGM.getDiags();
  3775. unsigned DiagID = Diags.getCustomDiagID(
  3776. DiagnosticsEngine::Error,
  3777. "Patch Constant function %0 should not have inout param.");
  3778. Diags.Report(Entry->second.SL, DiagID) << funcName;
  3779. }
  3780. }
  3781. // Input/Output control point validation.
  3782. if (patchConstantFunctionPropsMap.count(patchConstFunc)) {
  3783. const DxilFunctionProps &patchProps =
  3784. *patchConstantFunctionPropsMap[patchConstFunc];
  3785. if (patchProps.ShaderProps.HS.inputControlPoints != 0 &&
  3786. patchProps.ShaderProps.HS.inputControlPoints !=
  3787. HSProps->ShaderProps.HS.inputControlPoints) {
  3788. DiagnosticsEngine &Diags = CGM.getDiags();
  3789. unsigned DiagID =
  3790. Diags.getCustomDiagID(DiagnosticsEngine::Error,
  3791. "Patch constant function's input patch input "
  3792. "should have %0 elements, but has %1.");
  3793. Diags.Report(Entry->second.SL, DiagID)
  3794. << HSProps->ShaderProps.HS.inputControlPoints
  3795. << patchProps.ShaderProps.HS.inputControlPoints;
  3796. }
  3797. if (patchProps.ShaderProps.HS.outputControlPoints != 0 &&
  3798. patchProps.ShaderProps.HS.outputControlPoints !=
  3799. HSProps->ShaderProps.HS.outputControlPoints) {
  3800. DiagnosticsEngine &Diags = CGM.getDiags();
  3801. unsigned DiagID = Diags.getCustomDiagID(
  3802. DiagnosticsEngine::Error,
  3803. "Patch constant function's output patch input "
  3804. "should have %0 elements, but has %1.");
  3805. Diags.Report(Entry->second.SL, DiagID)
  3806. << HSProps->ShaderProps.HS.outputControlPoints
  3807. << patchProps.ShaderProps.HS.outputControlPoints;
  3808. }
  3809. }
  3810. }
  3811. // Returns true a global value is being updated
  3812. static bool GlobalHasStoreUserRec(Value *V, std::set<Value *> &visited) {
  3813. bool isWriteEnabled = false;
  3814. if (V && visited.find(V) == visited.end()) {
  3815. visited.insert(V);
  3816. for (User *U : V->users()) {
  3817. if (isa<StoreInst>(U)) {
  3818. return true;
  3819. } else if (CallInst* CI = dyn_cast<CallInst>(U)) {
  3820. Function *F = CI->getCalledFunction();
  3821. if (!F->isIntrinsic()) {
  3822. HLOpcodeGroup hlGroup = GetHLOpcodeGroup(F);
  3823. switch (hlGroup) {
  3824. case HLOpcodeGroup::NotHL:
  3825. return true;
  3826. case HLOpcodeGroup::HLMatLoadStore:
  3827. {
  3828. HLMatLoadStoreOpcode opCode = static_cast<HLMatLoadStoreOpcode>(hlsl::GetHLOpcode(CI));
  3829. if (opCode == HLMatLoadStoreOpcode::ColMatStore || opCode == HLMatLoadStoreOpcode::RowMatStore)
  3830. return true;
  3831. break;
  3832. }
  3833. case HLOpcodeGroup::HLCast:
  3834. case HLOpcodeGroup::HLSubscript:
  3835. if (GlobalHasStoreUserRec(U, visited))
  3836. return true;
  3837. break;
  3838. default:
  3839. break;
  3840. }
  3841. }
  3842. } else if (isa<GEPOperator>(U) || isa<PHINode>(U) || isa<SelectInst>(U)) {
  3843. if (GlobalHasStoreUserRec(U, visited))
  3844. return true;
  3845. }
  3846. }
  3847. }
  3848. return isWriteEnabled;
  3849. }
  3850. // Returns true if any of the direct user of a global is a store inst
  3851. // otherwise recurse through the remaining users and check if any GEP
  3852. // exists and which in turn has a store inst as user.
  3853. static bool GlobalHasStoreUser(GlobalVariable *GV) {
  3854. std::set<Value *> visited;
  3855. Value *V = cast<Value>(GV);
  3856. return GlobalHasStoreUserRec(V, visited);
  3857. }
  3858. static GlobalVariable *CreateStaticGlobal(llvm::Module *M, GlobalVariable *GV) {
  3859. Constant *GC = M->getOrInsertGlobal(GV->getName().str() + ".static.copy",
  3860. GV->getType()->getPointerElementType());
  3861. GlobalVariable *NGV = cast<GlobalVariable>(GC);
  3862. if (GV->hasInitializer()) {
  3863. NGV->setInitializer(GV->getInitializer());
  3864. }
  3865. // static global should have internal linkage
  3866. NGV->setLinkage(GlobalValue::InternalLinkage);
  3867. return NGV;
  3868. }
  3869. static void CreateWriteEnabledStaticGlobals(llvm::Module *M,
  3870. llvm::Function *EF) {
  3871. std::vector<GlobalVariable *> worklist;
  3872. for (GlobalVariable &GV : M->globals()) {
  3873. if (!GV.isConstant() && GV.getLinkage() != GlobalValue::InternalLinkage &&
  3874. // skip globals which are HLSL objects or group shared
  3875. !HLModule::IsHLSLObjectType(GV.getType()->getPointerElementType()) &&
  3876. !dxilutil::IsSharedMemoryGlobal(&GV)) {
  3877. if (GlobalHasStoreUser(&GV))
  3878. worklist.emplace_back(&GV);
  3879. // TODO: Ensure that constant globals aren't using initializer
  3880. GV.setConstant(true);
  3881. }
  3882. }
  3883. IRBuilder<> Builder(
  3884. dxilutil::FirstNonAllocaInsertionPt(&EF->getEntryBlock()));
  3885. for (GlobalVariable *GV : worklist) {
  3886. GlobalVariable *NGV = CreateStaticGlobal(M, GV);
  3887. GV->replaceAllUsesWith(NGV);
  3888. // insert memcpy in all entryblocks
  3889. uint64_t size = M->getDataLayout().getTypeAllocSize(
  3890. GV->getType()->getPointerElementType());
  3891. Builder.CreateMemCpy(NGV, GV, size, 1);
  3892. }
  3893. }
  3894. void CGMSHLSLRuntime::FinishCodeGen() {
  3895. // Library don't have entry.
  3896. if (!m_bIsLib) {
  3897. SetEntryFunction();
  3898. // If at this point we haven't determined the entry function it's an error.
  3899. if (m_pHLModule->GetEntryFunction() == nullptr) {
  3900. assert(CGM.getDiags().hasErrorOccurred() &&
  3901. "else SetEntryFunction should have reported this condition");
  3902. return;
  3903. }
  3904. // In back-compat mode (with /Gec flag) create a static global for each const global
  3905. // to allow writing to it.
  3906. // TODO: Verfiy the behavior of static globals in hull shader
  3907. if(CGM.getLangOpts().EnableBackCompatMode && CGM.getLangOpts().HLSLVersion <= 2016)
  3908. CreateWriteEnabledStaticGlobals(m_pHLModule->GetModule(), m_pHLModule->GetEntryFunction());
  3909. if (m_pHLModule->GetShaderModel()->IsHS()) {
  3910. SetPatchConstantFunction(Entry);
  3911. }
  3912. } else {
  3913. for (auto &it : entryFunctionMap) {
  3914. CloneShaderEntry(it.second.Func, it.getKey(), *m_pHLModule);
  3915. auto AttrIter = HSEntryPatchConstantFuncAttr.find(it.second.Func);
  3916. if (AttrIter != HSEntryPatchConstantFuncAttr.end()) {
  3917. SetPatchConstantFunctionWithAttr(it.second, AttrIter->second);
  3918. }
  3919. }
  3920. }
  3921. ReplaceConstStaticGlobals(staticConstGlobalInitListMap,
  3922. staticConstGlobalCtorMap);
  3923. // Create copy for clip plane.
  3924. for (Function *F : clipPlaneFuncList) {
  3925. DxilFunctionProps &props = m_pHLModule->GetDxilFunctionProps(F);
  3926. IRBuilder<> Builder(F->getEntryBlock().getFirstInsertionPt());
  3927. for (unsigned i = 0; i < DXIL::kNumClipPlanes; i++) {
  3928. Value *clipPlane = props.ShaderProps.VS.clipPlanes[i];
  3929. if (!clipPlane)
  3930. continue;
  3931. if (m_bDebugInfo) {
  3932. Builder.SetCurrentDebugLocation(debugInfoMap[clipPlane]);
  3933. }
  3934. llvm::Type *Ty = clipPlane->getType()->getPointerElementType();
  3935. // Constant *zeroInit = ConstantFP::get(Ty, 0);
  3936. GlobalVariable *GV = new llvm::GlobalVariable(
  3937. TheModule, Ty, /*IsConstant*/ false, // constant false to store.
  3938. llvm::GlobalValue::ExternalLinkage,
  3939. /*InitVal*/ nullptr, Twine("SV_ClipPlane") + Twine(i));
  3940. Value *initVal = Builder.CreateLoad(clipPlane);
  3941. Builder.CreateStore(initVal, GV);
  3942. props.ShaderProps.VS.clipPlanes[i] = GV;
  3943. }
  3944. }
  3945. // Allocate constant buffers.
  3946. AllocateDxilConstantBuffers(m_pHLModule);
  3947. // TODO: create temp variable for constant which has store use.
  3948. // Create Global variable and type annotation for each CBuffer.
  3949. ConstructCBuffer(m_pHLModule, CBufferType, m_ConstVarAnnotationMap);
  3950. if (!m_bIsLib) {
  3951. // need this for "llvm.global_dtors"?
  3952. ProcessCtorFunctions(TheModule ,"llvm.global_ctors",
  3953. Entry.Func->getEntryBlock().getFirstInsertionPt());
  3954. }
  3955. // translate opcode into parameter for intrinsic functions
  3956. AddOpcodeParamForIntrinsics(*m_pHLModule, m_IntrinsicMap, resMetadataMap);
  3957. // Pin entry point and constant buffers, mark everything else internal.
  3958. for (Function &f : m_pHLModule->GetModule()->functions()) {
  3959. if (!m_bIsLib) {
  3960. if (&f == m_pHLModule->GetEntryFunction() ||
  3961. IsPatchConstantFunction(&f) || f.isDeclaration()) {
  3962. f.setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  3963. } else {
  3964. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  3965. }
  3966. }
  3967. // Skip no inline functions.
  3968. if (f.hasFnAttribute(llvm::Attribute::NoInline))
  3969. continue;
  3970. // Always inline for used functions.
  3971. if (!f.user_empty() && !f.isDeclaration())
  3972. f.addFnAttr(llvm::Attribute::AlwaysInline);
  3973. }
  3974. // Do simple transform to make later lower pass easier.
  3975. SimpleTransformForHLDXIR(m_pHLModule->GetModule());
  3976. // Handle lang extensions if provided.
  3977. if (CGM.getCodeGenOpts().HLSLExtensionsCodegen) {
  3978. // Add semantic defines for extensions if any are available.
  3979. HLSLExtensionsCodegenHelper::SemanticDefineErrorList errors =
  3980. CGM.getCodeGenOpts().HLSLExtensionsCodegen->WriteSemanticDefines(m_pHLModule->GetModule());
  3981. DiagnosticsEngine &Diags = CGM.getDiags();
  3982. for (const HLSLExtensionsCodegenHelper::SemanticDefineError& error : errors) {
  3983. DiagnosticsEngine::Level level = DiagnosticsEngine::Error;
  3984. if (error.IsWarning())
  3985. level = DiagnosticsEngine::Warning;
  3986. unsigned DiagID = Diags.getCustomDiagID(level, "%0");
  3987. Diags.Report(SourceLocation::getFromRawEncoding(error.Location()), DiagID) << error.Message();
  3988. }
  3989. // Add root signature from a #define. Overrides root signature in function attribute.
  3990. {
  3991. using Status = HLSLExtensionsCodegenHelper::CustomRootSignature::Status;
  3992. HLSLExtensionsCodegenHelper::CustomRootSignature customRootSig;
  3993. Status status = CGM.getCodeGenOpts().HLSLExtensionsCodegen->GetCustomRootSignature(&customRootSig);
  3994. if (status == Status::FOUND) {
  3995. CompileRootSignature(customRootSig.RootSignature, Diags,
  3996. SourceLocation::getFromRawEncoding(customRootSig.EncodedSourceLocation),
  3997. rootSigVer, &m_pHLModule->GetRootSignature());
  3998. }
  3999. }
  4000. }
  4001. // At this point, we have a high-level DXIL module - record this.
  4002. SetPauseResumePasses(*m_pHLModule->GetModule(), "hlsl-hlemit", "hlsl-hlensure");
  4003. }
  4004. RValue CGMSHLSLRuntime::EmitHLSLBuiltinCallExpr(CodeGenFunction &CGF,
  4005. const FunctionDecl *FD,
  4006. const CallExpr *E,
  4007. ReturnValueSlot ReturnValue) {
  4008. const Decl *TargetDecl = E->getCalleeDecl();
  4009. llvm::Value *Callee = CGF.EmitScalarExpr(E->getCallee());
  4010. RValue RV = CGF.EmitCall(E->getCallee()->getType(), Callee, E, ReturnValue,
  4011. TargetDecl);
  4012. if (RV.isScalar() && RV.getScalarVal() != nullptr) {
  4013. if (CallInst *CI = dyn_cast<CallInst>(RV.getScalarVal())) {
  4014. Function *F = CI->getCalledFunction();
  4015. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  4016. if (group == HLOpcodeGroup::HLIntrinsic) {
  4017. bool allOperandImm = true;
  4018. for (auto &operand : CI->arg_operands()) {
  4019. bool isImm = isa<ConstantInt>(operand) || isa<ConstantFP>(operand) ||
  4020. isa<ConstantAggregateZero>(operand) || isa<ConstantDataVector>(operand);
  4021. if (!isImm) {
  4022. allOperandImm = false;
  4023. break;
  4024. } else if (operand->getType()->isHalfTy()) {
  4025. // Not support half Eval yet.
  4026. allOperandImm = false;
  4027. break;
  4028. }
  4029. }
  4030. if (allOperandImm) {
  4031. unsigned intrinsicOpcode;
  4032. StringRef intrinsicGroup;
  4033. hlsl::GetIntrinsicOp(FD, intrinsicOpcode, intrinsicGroup);
  4034. IntrinsicOp opcode = static_cast<IntrinsicOp>(intrinsicOpcode);
  4035. if (Value *Result = TryEvalIntrinsic(CI, opcode)) {
  4036. RV = RValue::get(Result);
  4037. }
  4038. }
  4039. }
  4040. }
  4041. }
  4042. return RV;
  4043. }
  4044. static HLOpcodeGroup GetHLOpcodeGroup(const clang::Stmt::StmtClass stmtClass) {
  4045. switch (stmtClass) {
  4046. case Stmt::CStyleCastExprClass:
  4047. case Stmt::ImplicitCastExprClass:
  4048. case Stmt::CXXFunctionalCastExprClass:
  4049. return HLOpcodeGroup::HLCast;
  4050. case Stmt::InitListExprClass:
  4051. return HLOpcodeGroup::HLInit;
  4052. case Stmt::BinaryOperatorClass:
  4053. case Stmt::CompoundAssignOperatorClass:
  4054. return HLOpcodeGroup::HLBinOp;
  4055. case Stmt::UnaryOperatorClass:
  4056. return HLOpcodeGroup::HLUnOp;
  4057. case Stmt::ExtMatrixElementExprClass:
  4058. return HLOpcodeGroup::HLSubscript;
  4059. case Stmt::CallExprClass:
  4060. return HLOpcodeGroup::HLIntrinsic;
  4061. case Stmt::ConditionalOperatorClass:
  4062. return HLOpcodeGroup::HLSelect;
  4063. default:
  4064. llvm_unreachable("not support operation");
  4065. }
  4066. }
  4067. // NOTE: This table must match BinaryOperator::Opcode
  4068. static const HLBinaryOpcode BinaryOperatorKindMap[] = {
  4069. HLBinaryOpcode::Invalid, // PtrMemD
  4070. HLBinaryOpcode::Invalid, // PtrMemI
  4071. HLBinaryOpcode::Mul, HLBinaryOpcode::Div, HLBinaryOpcode::Rem,
  4072. HLBinaryOpcode::Add, HLBinaryOpcode::Sub, HLBinaryOpcode::Shl,
  4073. HLBinaryOpcode::Shr, HLBinaryOpcode::LT, HLBinaryOpcode::GT,
  4074. HLBinaryOpcode::LE, HLBinaryOpcode::GE, HLBinaryOpcode::EQ,
  4075. HLBinaryOpcode::NE, HLBinaryOpcode::And, HLBinaryOpcode::Xor,
  4076. HLBinaryOpcode::Or, HLBinaryOpcode::LAnd, HLBinaryOpcode::LOr,
  4077. HLBinaryOpcode::Invalid, // Assign,
  4078. // The assign part is done by matrix store
  4079. HLBinaryOpcode::Mul, // MulAssign
  4080. HLBinaryOpcode::Div, // DivAssign
  4081. HLBinaryOpcode::Rem, // RemAssign
  4082. HLBinaryOpcode::Add, // AddAssign
  4083. HLBinaryOpcode::Sub, // SubAssign
  4084. HLBinaryOpcode::Shl, // ShlAssign
  4085. HLBinaryOpcode::Shr, // ShrAssign
  4086. HLBinaryOpcode::And, // AndAssign
  4087. HLBinaryOpcode::Xor, // XorAssign
  4088. HLBinaryOpcode::Or, // OrAssign
  4089. HLBinaryOpcode::Invalid, // Comma
  4090. };
  4091. // NOTE: This table must match UnaryOperator::Opcode
  4092. static const HLUnaryOpcode UnaryOperatorKindMap[] = {
  4093. HLUnaryOpcode::PostInc, HLUnaryOpcode::PostDec,
  4094. HLUnaryOpcode::PreInc, HLUnaryOpcode::PreDec,
  4095. HLUnaryOpcode::Invalid, // AddrOf,
  4096. HLUnaryOpcode::Invalid, // Deref,
  4097. HLUnaryOpcode::Plus, HLUnaryOpcode::Minus,
  4098. HLUnaryOpcode::Not, HLUnaryOpcode::LNot,
  4099. HLUnaryOpcode::Invalid, // Real,
  4100. HLUnaryOpcode::Invalid, // Imag,
  4101. HLUnaryOpcode::Invalid, // Extension
  4102. };
  4103. static bool IsRowMajorMatrix(QualType Ty, bool bDefaultRowMajor) {
  4104. if (const AttributedType *AT = Ty->getAs<AttributedType>()) {
  4105. if (AT->getAttrKind() == AttributedType::attr_hlsl_row_major)
  4106. return true;
  4107. else if (AT->getAttrKind() == AttributedType::attr_hlsl_column_major)
  4108. return false;
  4109. else
  4110. return bDefaultRowMajor;
  4111. } else {
  4112. return bDefaultRowMajor;
  4113. }
  4114. }
  4115. static bool IsUnsigned(QualType Ty) {
  4116. Ty = Ty.getCanonicalType().getNonReferenceType();
  4117. if (hlsl::IsHLSLVecMatType(Ty))
  4118. Ty = CGHLSLRuntime::GetHLSLVecMatElementType(Ty);
  4119. if (Ty->isExtVectorType())
  4120. Ty = Ty->getAs<clang::ExtVectorType>()->getElementType();
  4121. return Ty->isUnsignedIntegerType();
  4122. }
  4123. static unsigned GetHLOpcode(const Expr *E) {
  4124. switch (E->getStmtClass()) {
  4125. case Stmt::CompoundAssignOperatorClass:
  4126. case Stmt::BinaryOperatorClass: {
  4127. const clang::BinaryOperator *binOp = cast<clang::BinaryOperator>(E);
  4128. HLBinaryOpcode binOpcode = BinaryOperatorKindMap[binOp->getOpcode()];
  4129. if (HasUnsignedOpcode(binOpcode)) {
  4130. if (IsUnsigned(binOp->getLHS()->getType())) {
  4131. binOpcode = GetUnsignedOpcode(binOpcode);
  4132. }
  4133. }
  4134. return static_cast<unsigned>(binOpcode);
  4135. }
  4136. case Stmt::UnaryOperatorClass: {
  4137. const UnaryOperator *unOp = cast<clang::UnaryOperator>(E);
  4138. HLUnaryOpcode unOpcode = UnaryOperatorKindMap[unOp->getOpcode()];
  4139. return static_cast<unsigned>(unOpcode);
  4140. }
  4141. case Stmt::ImplicitCastExprClass:
  4142. case Stmt::CStyleCastExprClass: {
  4143. const CastExpr *CE = cast<CastExpr>(E);
  4144. bool toUnsigned = IsUnsigned(E->getType());
  4145. bool fromUnsigned = IsUnsigned(CE->getSubExpr()->getType());
  4146. if (toUnsigned && fromUnsigned)
  4147. return static_cast<unsigned>(HLCastOpcode::UnsignedUnsignedCast);
  4148. else if (toUnsigned)
  4149. return static_cast<unsigned>(HLCastOpcode::ToUnsignedCast);
  4150. else if (fromUnsigned)
  4151. return static_cast<unsigned>(HLCastOpcode::FromUnsignedCast);
  4152. else
  4153. return static_cast<unsigned>(HLCastOpcode::DefaultCast);
  4154. }
  4155. default:
  4156. return 0;
  4157. }
  4158. }
  4159. static Value *
  4160. EmitHLSLMatrixOperationCallImp(CGBuilderTy &Builder, HLOpcodeGroup group,
  4161. unsigned opcode, llvm::Type *RetType,
  4162. ArrayRef<Value *> paramList, llvm::Module &M) {
  4163. SmallVector<llvm::Type *, 4> paramTyList;
  4164. // Add the opcode param
  4165. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  4166. paramTyList.emplace_back(opcodeTy);
  4167. for (Value *param : paramList) {
  4168. paramTyList.emplace_back(param->getType());
  4169. }
  4170. llvm::FunctionType *funcTy =
  4171. llvm::FunctionType::get(RetType, paramTyList, false);
  4172. Function *opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode);
  4173. SmallVector<Value *, 4> opcodeParamList;
  4174. Value *opcodeConst = Constant::getIntegerValue(opcodeTy, APInt(32, opcode));
  4175. opcodeParamList.emplace_back(opcodeConst);
  4176. opcodeParamList.append(paramList.begin(), paramList.end());
  4177. return Builder.CreateCall(opFunc, opcodeParamList);
  4178. }
  4179. static Value *EmitHLSLArrayInit(CGBuilderTy &Builder, HLOpcodeGroup group,
  4180. unsigned opcode, llvm::Type *RetType,
  4181. ArrayRef<Value *> paramList, llvm::Module &M) {
  4182. // It's a matrix init.
  4183. if (!RetType->isVoidTy())
  4184. return EmitHLSLMatrixOperationCallImp(Builder, group, opcode, RetType,
  4185. paramList, M);
  4186. Value *arrayPtr = paramList[0];
  4187. llvm::ArrayType *AT =
  4188. cast<llvm::ArrayType>(arrayPtr->getType()->getPointerElementType());
  4189. // Avoid the arrayPtr.
  4190. unsigned paramSize = paramList.size() - 1;
  4191. // Support simple case here.
  4192. if (paramSize == AT->getArrayNumElements()) {
  4193. bool typeMatch = true;
  4194. llvm::Type *EltTy = AT->getArrayElementType();
  4195. if (EltTy->isAggregateType()) {
  4196. // Aggregate Type use pointer in initList.
  4197. EltTy = llvm::PointerType::get(EltTy, 0);
  4198. }
  4199. for (unsigned i = 1; i < paramList.size(); i++) {
  4200. if (paramList[i]->getType() != EltTy) {
  4201. typeMatch = false;
  4202. break;
  4203. }
  4204. }
  4205. // Both size and type match.
  4206. if (typeMatch) {
  4207. bool isPtr = EltTy->isPointerTy();
  4208. llvm::Type *i32Ty = llvm::Type::getInt32Ty(EltTy->getContext());
  4209. Constant *zero = ConstantInt::get(i32Ty, 0);
  4210. for (unsigned i = 1; i < paramList.size(); i++) {
  4211. Constant *idx = ConstantInt::get(i32Ty, i - 1);
  4212. Value *GEP = Builder.CreateInBoundsGEP(arrayPtr, {zero, idx});
  4213. Value *Elt = paramList[i];
  4214. if (isPtr) {
  4215. Elt = Builder.CreateLoad(Elt);
  4216. }
  4217. Builder.CreateStore(Elt, GEP);
  4218. }
  4219. // The return value will not be used.
  4220. return nullptr;
  4221. }
  4222. }
  4223. // Other case will be lowered in later pass.
  4224. return EmitHLSLMatrixOperationCallImp(Builder, group, opcode, RetType,
  4225. paramList, M);
  4226. }
  4227. void CGMSHLSLRuntime::FlattenValToInitList(CodeGenFunction &CGF, SmallVector<Value *, 4> &elts,
  4228. SmallVector<QualType, 4> &eltTys,
  4229. QualType Ty, Value *val) {
  4230. CGBuilderTy &Builder = CGF.Builder;
  4231. llvm::Type *valTy = val->getType();
  4232. if (valTy->isPointerTy()) {
  4233. llvm::Type *valEltTy = valTy->getPointerElementType();
  4234. if (valEltTy->isVectorTy() ||
  4235. valEltTy->isSingleValueType()) {
  4236. Value *ldVal = Builder.CreateLoad(val);
  4237. FlattenValToInitList(CGF, elts, eltTys, Ty, ldVal);
  4238. } else if (HLMatrixLower::IsMatrixType(valEltTy)) {
  4239. Value *ldVal = EmitHLSLMatrixLoad(Builder, val, Ty);
  4240. FlattenValToInitList(CGF, elts, eltTys, Ty, ldVal);
  4241. } else {
  4242. llvm::Type *i32Ty = llvm::Type::getInt32Ty(valTy->getContext());
  4243. Value *zero = ConstantInt::get(i32Ty, 0);
  4244. if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(valEltTy)) {
  4245. QualType EltTy = Ty->getAsArrayTypeUnsafe()->getElementType();
  4246. for (unsigned i = 0; i < AT->getArrayNumElements(); i++) {
  4247. Value *gepIdx = ConstantInt::get(i32Ty, i);
  4248. Value *EltPtr = Builder.CreateInBoundsGEP(val, {zero, gepIdx});
  4249. FlattenValToInitList(CGF, elts, eltTys, EltTy,EltPtr);
  4250. }
  4251. } else {
  4252. // Struct.
  4253. StructType *ST = cast<StructType>(valEltTy);
  4254. if (HLModule::IsHLSLObjectType(ST)) {
  4255. // Save object directly like basic type.
  4256. elts.emplace_back(Builder.CreateLoad(val));
  4257. eltTys.emplace_back(Ty);
  4258. } else {
  4259. RecordDecl *RD = Ty->getAsStructureType()->getDecl();
  4260. const CGRecordLayout& RL = CGF.getTypes().getCGRecordLayout(RD);
  4261. // Take care base.
  4262. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  4263. if (CXXRD->getNumBases()) {
  4264. for (const auto &I : CXXRD->bases()) {
  4265. const CXXRecordDecl *BaseDecl = cast<CXXRecordDecl>(
  4266. I.getType()->castAs<RecordType>()->getDecl());
  4267. if (BaseDecl->field_empty())
  4268. continue;
  4269. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  4270. unsigned i = RL.getNonVirtualBaseLLVMFieldNo(BaseDecl);
  4271. Value *gepIdx = ConstantInt::get(i32Ty, i);
  4272. Value *EltPtr = Builder.CreateInBoundsGEP(val, {zero, gepIdx});
  4273. FlattenValToInitList(CGF, elts, eltTys, parentTy, EltPtr);
  4274. }
  4275. }
  4276. }
  4277. for (auto fieldIter = RD->field_begin(), fieldEnd = RD->field_end();
  4278. fieldIter != fieldEnd; ++fieldIter) {
  4279. unsigned i = RL.getLLVMFieldNo(*fieldIter);
  4280. Value *gepIdx = ConstantInt::get(i32Ty, i);
  4281. Value *EltPtr = Builder.CreateInBoundsGEP(val, {zero, gepIdx});
  4282. FlattenValToInitList(CGF, elts, eltTys, fieldIter->getType(), EltPtr);
  4283. }
  4284. }
  4285. }
  4286. }
  4287. } else {
  4288. if (HLMatrixLower::IsMatrixType(valTy)) {
  4289. unsigned col, row;
  4290. llvm::Type *EltTy = HLMatrixLower::GetMatrixInfo(valTy, col, row);
  4291. // All matrix Value should be row major.
  4292. // Init list is row major in scalar.
  4293. // So the order is match here, just cast to vector.
  4294. unsigned matSize = col * row;
  4295. bool isRowMajor = IsRowMajorMatrix(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor);
  4296. HLCastOpcode opcode = isRowMajor ? HLCastOpcode::RowMatrixToVecCast
  4297. : HLCastOpcode::ColMatrixToVecCast;
  4298. // Cast to vector.
  4299. val = EmitHLSLMatrixOperationCallImp(
  4300. Builder, HLOpcodeGroup::HLCast,
  4301. static_cast<unsigned>(opcode),
  4302. llvm::VectorType::get(EltTy, matSize), {val}, TheModule);
  4303. valTy = val->getType();
  4304. }
  4305. if (valTy->isVectorTy()) {
  4306. QualType EltTy = GetHLSLVecMatElementType(Ty);
  4307. unsigned vecSize = valTy->getVectorNumElements();
  4308. for (unsigned i = 0; i < vecSize; i++) {
  4309. Value *Elt = Builder.CreateExtractElement(val, i);
  4310. elts.emplace_back(Elt);
  4311. eltTys.emplace_back(EltTy);
  4312. }
  4313. } else {
  4314. DXASSERT(valTy->isSingleValueType(), "must be single value type here");
  4315. elts.emplace_back(val);
  4316. eltTys.emplace_back(Ty);
  4317. }
  4318. }
  4319. }
  4320. // Cast elements in initlist if not match the target type.
  4321. // idx is current element index in initlist, Ty is target type.
  4322. static void AddMissingCastOpsInInitList(SmallVector<Value *, 4> &elts, SmallVector<QualType, 4> &eltTys, unsigned &idx, QualType Ty, CodeGenFunction &CGF) {
  4323. if (Ty->isArrayType()) {
  4324. const clang::ArrayType *AT = Ty->getAsArrayTypeUnsafe();
  4325. // Must be ConstantArrayType here.
  4326. unsigned arraySize = cast<ConstantArrayType>(AT)->getSize().getLimitedValue();
  4327. QualType EltTy = AT->getElementType();
  4328. for (unsigned i = 0; i < arraySize; i++)
  4329. AddMissingCastOpsInInitList(elts, eltTys, idx, EltTy, CGF);
  4330. } else if (IsHLSLVecType(Ty)) {
  4331. QualType EltTy = GetHLSLVecElementType(Ty);
  4332. unsigned vecSize = GetHLSLVecSize(Ty);
  4333. for (unsigned i=0;i< vecSize;i++)
  4334. AddMissingCastOpsInInitList(elts, eltTys, idx, EltTy, CGF);
  4335. } else if (IsHLSLMatType(Ty)) {
  4336. QualType EltTy = GetHLSLMatElementType(Ty);
  4337. unsigned row, col;
  4338. GetHLSLMatRowColCount(Ty, row, col);
  4339. unsigned matSize = row*col;
  4340. for (unsigned i = 0; i < matSize; i++)
  4341. AddMissingCastOpsInInitList(elts, eltTys, idx, EltTy, CGF);
  4342. } else if (Ty->isRecordType()) {
  4343. if (HLModule::IsHLSLObjectType(CGF.ConvertType(Ty))) {
  4344. // Skip hlsl object.
  4345. idx++;
  4346. } else {
  4347. const RecordType *RT = Ty->getAsStructureType();
  4348. // For CXXRecord.
  4349. if (!RT)
  4350. RT = Ty->getAs<RecordType>();
  4351. RecordDecl *RD = RT->getDecl();
  4352. // Take care base.
  4353. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  4354. if (CXXRD->getNumBases()) {
  4355. for (const auto &I : CXXRD->bases()) {
  4356. const CXXRecordDecl *BaseDecl = cast<CXXRecordDecl>(
  4357. I.getType()->castAs<RecordType>()->getDecl());
  4358. if (BaseDecl->field_empty())
  4359. continue;
  4360. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  4361. AddMissingCastOpsInInitList(elts, eltTys, idx, parentTy, CGF);
  4362. }
  4363. }
  4364. }
  4365. for (FieldDecl *field : RD->fields())
  4366. AddMissingCastOpsInInitList(elts, eltTys, idx, field->getType(), CGF);
  4367. }
  4368. }
  4369. else {
  4370. // Basic type.
  4371. Value *val = elts[idx];
  4372. llvm::Type *srcTy = val->getType();
  4373. llvm::Type *dstTy = CGF.ConvertType(Ty);
  4374. if (srcTy != dstTy) {
  4375. Instruction::CastOps castOp =
  4376. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  4377. IsUnsigned(eltTys[idx]), IsUnsigned(Ty), srcTy, dstTy));
  4378. elts[idx] = CGF.Builder.CreateCast(castOp, val, dstTy);
  4379. }
  4380. idx++;
  4381. }
  4382. }
  4383. static void StoreInitListToDestPtr(Value *DestPtr,
  4384. SmallVector<Value *, 4> &elts, unsigned &idx,
  4385. QualType Type, CodeGenTypes &Types, bool bDefaultRowMajor,
  4386. CGBuilderTy &Builder, llvm::Module &M) {
  4387. llvm::Type *Ty = DestPtr->getType()->getPointerElementType();
  4388. llvm::Type *i32Ty = llvm::Type::getInt32Ty(Ty->getContext());
  4389. if (Ty->isVectorTy()) {
  4390. Value *Result = UndefValue::get(Ty);
  4391. for (unsigned i = 0; i < Ty->getVectorNumElements(); i++)
  4392. Result = Builder.CreateInsertElement(Result, elts[idx + i], i);
  4393. Builder.CreateStore(Result, DestPtr);
  4394. idx += Ty->getVectorNumElements();
  4395. } else if (HLMatrixLower::IsMatrixType(Ty)) {
  4396. bool isRowMajor =
  4397. IsRowMajorMatrix(Type, bDefaultRowMajor);
  4398. unsigned row, col;
  4399. HLMatrixLower::GetMatrixInfo(Ty, col, row);
  4400. std::vector<Value *> matInitList(col * row);
  4401. for (unsigned i = 0; i < col; i++) {
  4402. for (unsigned r = 0; r < row; r++) {
  4403. unsigned matIdx = i * row + r;
  4404. matInitList[matIdx] = elts[idx + matIdx];
  4405. }
  4406. }
  4407. idx += row * col;
  4408. Value *matVal =
  4409. EmitHLSLMatrixOperationCallImp(Builder, HLOpcodeGroup::HLInit,
  4410. /*opcode*/ 0, Ty, matInitList, M);
  4411. // matVal return from HLInit is row major.
  4412. // If DestPtr is row major, just store it directly.
  4413. if (!isRowMajor) {
  4414. // ColMatStore need a col major value.
  4415. // Cast row major matrix into col major.
  4416. // Then store it.
  4417. Value *colMatVal = EmitHLSLMatrixOperationCallImp(
  4418. Builder, HLOpcodeGroup::HLCast,
  4419. static_cast<unsigned>(HLCastOpcode::RowMatrixToColMatrix), Ty,
  4420. {matVal}, M);
  4421. EmitHLSLMatrixOperationCallImp(
  4422. Builder, HLOpcodeGroup::HLMatLoadStore,
  4423. static_cast<unsigned>(HLMatLoadStoreOpcode::ColMatStore), Ty,
  4424. {DestPtr, colMatVal}, M);
  4425. } else {
  4426. EmitHLSLMatrixOperationCallImp(
  4427. Builder, HLOpcodeGroup::HLMatLoadStore,
  4428. static_cast<unsigned>(HLMatLoadStoreOpcode::RowMatStore), Ty,
  4429. {DestPtr, matVal}, M);
  4430. }
  4431. } else if (Ty->isStructTy()) {
  4432. if (HLModule::IsHLSLObjectType(Ty)) {
  4433. Builder.CreateStore(elts[idx], DestPtr);
  4434. idx++;
  4435. } else {
  4436. Constant *zero = ConstantInt::get(i32Ty, 0);
  4437. const RecordType *RT = Type->getAsStructureType();
  4438. // For CXXRecord.
  4439. if (!RT)
  4440. RT = Type->getAs<RecordType>();
  4441. RecordDecl *RD = RT->getDecl();
  4442. const CGRecordLayout &RL = Types.getCGRecordLayout(RD);
  4443. // Take care base.
  4444. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  4445. if (CXXRD->getNumBases()) {
  4446. for (const auto &I : CXXRD->bases()) {
  4447. const CXXRecordDecl *BaseDecl = cast<CXXRecordDecl>(
  4448. I.getType()->castAs<RecordType>()->getDecl());
  4449. if (BaseDecl->field_empty())
  4450. continue;
  4451. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  4452. unsigned i = RL.getNonVirtualBaseLLVMFieldNo(BaseDecl);
  4453. Constant *gepIdx = ConstantInt::get(i32Ty, i);
  4454. Value *GEP = Builder.CreateInBoundsGEP(DestPtr, {zero, gepIdx});
  4455. StoreInitListToDestPtr(GEP, elts, idx, parentTy, Types,
  4456. bDefaultRowMajor, Builder, M);
  4457. }
  4458. }
  4459. }
  4460. for (FieldDecl *field : RD->fields()) {
  4461. unsigned i = RL.getLLVMFieldNo(field);
  4462. Constant *gepIdx = ConstantInt::get(i32Ty, i);
  4463. Value *GEP = Builder.CreateInBoundsGEP(DestPtr, {zero, gepIdx});
  4464. StoreInitListToDestPtr(GEP, elts, idx, field->getType(), Types,
  4465. bDefaultRowMajor, Builder, M);
  4466. }
  4467. }
  4468. } else if (Ty->isArrayTy()) {
  4469. Constant *zero = ConstantInt::get(i32Ty, 0);
  4470. QualType EltType = Type->getAsArrayTypeUnsafe()->getElementType();
  4471. for (unsigned i = 0; i < Ty->getArrayNumElements(); i++) {
  4472. Constant *gepIdx = ConstantInt::get(i32Ty, i);
  4473. Value *GEP = Builder.CreateInBoundsGEP(DestPtr, {zero, gepIdx});
  4474. StoreInitListToDestPtr(GEP, elts, idx, EltType, Types, bDefaultRowMajor,
  4475. Builder, M);
  4476. }
  4477. } else {
  4478. DXASSERT(Ty->isSingleValueType(), "invalid type");
  4479. llvm::Type *i1Ty = Builder.getInt1Ty();
  4480. Value *V = elts[idx];
  4481. if (V->getType() == i1Ty &&
  4482. DestPtr->getType()->getPointerElementType() != i1Ty) {
  4483. V = Builder.CreateZExt(V, DestPtr->getType()->getPointerElementType());
  4484. }
  4485. Builder.CreateStore(V, DestPtr);
  4486. idx++;
  4487. }
  4488. }
  4489. void CGMSHLSLRuntime::ScanInitList(CodeGenFunction &CGF, InitListExpr *E,
  4490. SmallVector<Value *, 4> &EltValList,
  4491. SmallVector<QualType, 4> &EltTyList) {
  4492. unsigned NumInitElements = E->getNumInits();
  4493. for (unsigned i = 0; i != NumInitElements; ++i) {
  4494. Expr *init = E->getInit(i);
  4495. QualType iType = init->getType();
  4496. if (InitListExpr *initList = dyn_cast<InitListExpr>(init)) {
  4497. ScanInitList(CGF, initList, EltValList, EltTyList);
  4498. } else if (CodeGenFunction::hasScalarEvaluationKind(iType)) {
  4499. llvm::Value *initVal = CGF.EmitScalarExpr(init);
  4500. FlattenValToInitList(CGF, EltValList, EltTyList, iType, initVal);
  4501. } else {
  4502. AggValueSlot Slot =
  4503. CGF.CreateAggTemp(init->getType(), "Agg.InitList.tmp");
  4504. CGF.EmitAggExpr(init, Slot);
  4505. llvm::Value *aggPtr = Slot.getAddr();
  4506. FlattenValToInitList(CGF, EltValList, EltTyList, iType, aggPtr);
  4507. }
  4508. }
  4509. }
  4510. // Is Type of E match Ty.
  4511. static bool ExpTypeMatch(Expr *E, QualType Ty, ASTContext &Ctx, CodeGenTypes &Types) {
  4512. if (InitListExpr *initList = dyn_cast<InitListExpr>(E)) {
  4513. unsigned NumInitElements = initList->getNumInits();
  4514. // Skip vector and matrix type.
  4515. if (Ty->isVectorType())
  4516. return false;
  4517. if (hlsl::IsHLSLVecMatType(Ty))
  4518. return false;
  4519. if (Ty->isStructureOrClassType()) {
  4520. RecordDecl *record = Ty->castAs<RecordType>()->getDecl();
  4521. bool bMatch = true;
  4522. unsigned i = 0;
  4523. for (auto it = record->field_begin(), end = record->field_end();
  4524. it != end; it++) {
  4525. if (i == NumInitElements) {
  4526. bMatch = false;
  4527. break;
  4528. }
  4529. Expr *init = initList->getInit(i++);
  4530. QualType EltTy = it->getType();
  4531. bMatch &= ExpTypeMatch(init, EltTy, Ctx, Types);
  4532. if (!bMatch)
  4533. break;
  4534. }
  4535. bMatch &= i == NumInitElements;
  4536. if (bMatch && initList->getType()->isVoidType()) {
  4537. initList->setType(Ty);
  4538. }
  4539. return bMatch;
  4540. } else if (Ty->isArrayType() && !Ty->isIncompleteArrayType()) {
  4541. const ConstantArrayType *AT = Ctx.getAsConstantArrayType(Ty);
  4542. QualType EltTy = AT->getElementType();
  4543. unsigned size = AT->getSize().getZExtValue();
  4544. if (size != NumInitElements)
  4545. return false;
  4546. bool bMatch = true;
  4547. for (unsigned i = 0; i != NumInitElements; ++i) {
  4548. Expr *init = initList->getInit(i);
  4549. bMatch &= ExpTypeMatch(init, EltTy, Ctx, Types);
  4550. if (!bMatch)
  4551. break;
  4552. }
  4553. if (bMatch && initList->getType()->isVoidType()) {
  4554. initList->setType(Ty);
  4555. }
  4556. return bMatch;
  4557. } else {
  4558. return false;
  4559. }
  4560. } else {
  4561. llvm::Type *ExpTy = Types.ConvertType(E->getType());
  4562. llvm::Type *TargetTy = Types.ConvertType(Ty);
  4563. return ExpTy == TargetTy;
  4564. }
  4565. }
  4566. bool CGMSHLSLRuntime::IsTrivalInitListExpr(CodeGenFunction &CGF,
  4567. InitListExpr *E) {
  4568. QualType Ty = E->getType();
  4569. bool result = ExpTypeMatch(E, Ty, CGF.getContext(), CGF.getTypes());
  4570. if (result) {
  4571. auto iter = staticConstGlobalInitMap.find(E);
  4572. if (iter != staticConstGlobalInitMap.end()) {
  4573. GlobalVariable * GV = iter->second;
  4574. auto &InitConstants = staticConstGlobalInitListMap[GV];
  4575. // Add Constant to InitList.
  4576. for (unsigned i=0;i<E->getNumInits();i++) {
  4577. Expr *Expr = E->getInit(i);
  4578. LValue LV = CGF.EmitLValue(Expr);
  4579. if (LV.isSimple()) {
  4580. Constant *SrcPtr = dyn_cast<Constant>(LV.getAddress());
  4581. if (SrcPtr && !isa<UndefValue>(SrcPtr)) {
  4582. InitConstants.emplace_back(SrcPtr);
  4583. continue;
  4584. }
  4585. }
  4586. // Only support simple LV and Constant Ptr case.
  4587. // Other case just go normal path.
  4588. InitConstants.clear();
  4589. break;
  4590. }
  4591. if (InitConstants.empty())
  4592. staticConstGlobalInitListMap.erase(GV);
  4593. else
  4594. staticConstGlobalCtorMap[GV] = CGF.CurFn;
  4595. }
  4596. }
  4597. return result;
  4598. }
  4599. Value *CGMSHLSLRuntime::EmitHLSLInitListExpr(CodeGenFunction &CGF, InitListExpr *E,
  4600. // The destPtr when emiting aggregate init, for normal case, it will be null.
  4601. Value *DestPtr) {
  4602. if (DestPtr && E->getNumInits() == 1) {
  4603. llvm::Type *ExpTy = CGF.ConvertType(E->getType());
  4604. llvm::Type *TargetTy = CGF.ConvertType(E->getInit(0)->getType());
  4605. if (ExpTy == TargetTy) {
  4606. Expr *Expr = E->getInit(0);
  4607. LValue LV = CGF.EmitLValue(Expr);
  4608. if (LV.isSimple()) {
  4609. Value *SrcPtr = LV.getAddress();
  4610. SmallVector<Value *, 4> idxList;
  4611. EmitHLSLAggregateCopy(CGF, SrcPtr, DestPtr, idxList, Expr->getType(),
  4612. E->getType(), SrcPtr->getType());
  4613. return nullptr;
  4614. }
  4615. }
  4616. }
  4617. SmallVector<Value *, 4> EltValList;
  4618. SmallVector<QualType, 4> EltTyList;
  4619. ScanInitList(CGF, E, EltValList, EltTyList);
  4620. QualType ResultTy = E->getType();
  4621. unsigned idx = 0;
  4622. // Create cast if need.
  4623. AddMissingCastOpsInInitList(EltValList, EltTyList, idx, ResultTy, CGF);
  4624. DXASSERT(idx == EltValList.size(), "size must match");
  4625. llvm::Type *RetTy = CGF.ConvertType(ResultTy);
  4626. if (DestPtr) {
  4627. SmallVector<Value *, 4> ParamList;
  4628. DXASSERT_NOMSG(RetTy->isAggregateType());
  4629. ParamList.emplace_back(DestPtr);
  4630. ParamList.append(EltValList.begin(), EltValList.end());
  4631. idx = 0;
  4632. bool bDefaultRowMajor = m_pHLModule->GetHLOptions().bDefaultRowMajor;
  4633. StoreInitListToDestPtr(DestPtr, EltValList, idx, ResultTy, CGF.getTypes(),
  4634. bDefaultRowMajor, CGF.Builder, TheModule);
  4635. return nullptr;
  4636. }
  4637. if (IsHLSLVecType(ResultTy)) {
  4638. Value *Result = UndefValue::get(RetTy);
  4639. for (unsigned i = 0; i < RetTy->getVectorNumElements(); i++)
  4640. Result = CGF.Builder.CreateInsertElement(Result, EltValList[i], i);
  4641. return Result;
  4642. } else {
  4643. // Must be matrix here.
  4644. DXASSERT(IsHLSLMatType(ResultTy), "must be matrix type here.");
  4645. return EmitHLSLMatrixOperationCallImp(CGF.Builder, HLOpcodeGroup::HLInit,
  4646. /*opcode*/ 0, RetTy, EltValList,
  4647. TheModule);
  4648. }
  4649. }
  4650. static void FlatConstToList(Constant *C, SmallVector<Constant *, 4> &EltValList,
  4651. QualType Type, CodeGenTypes &Types,
  4652. bool bDefaultRowMajor) {
  4653. llvm::Type *Ty = C->getType();
  4654. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  4655. // Type is only for matrix. Keep use Type to next level.
  4656. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  4657. FlatConstToList(C->getAggregateElement(i), EltValList, Type, Types,
  4658. bDefaultRowMajor);
  4659. }
  4660. } else if (HLMatrixLower::IsMatrixType(Ty)) {
  4661. bool isRowMajor = IsRowMajorMatrix(Type, bDefaultRowMajor);
  4662. // matrix type is struct { vector<Ty, row> [col] };
  4663. // Strip the struct level here.
  4664. Constant *matVal = C->getAggregateElement((unsigned)0);
  4665. const RecordType *RT = Type->getAs<RecordType>();
  4666. RecordDecl *RD = RT->getDecl();
  4667. QualType EltTy = RD->field_begin()->getType();
  4668. // When scan, init list scalars is row major.
  4669. if (isRowMajor) {
  4670. // Don't change the major for row major value.
  4671. FlatConstToList(matVal, EltValList, EltTy, Types, bDefaultRowMajor);
  4672. } else {
  4673. // Save to tmp list.
  4674. SmallVector<Constant *, 4> matEltList;
  4675. FlatConstToList(matVal, matEltList, EltTy, Types, bDefaultRowMajor);
  4676. unsigned row, col;
  4677. HLMatrixLower::GetMatrixInfo(Ty, col, row);
  4678. // Change col major value to row major.
  4679. for (unsigned r = 0; r < row; r++)
  4680. for (unsigned c = 0; c < col; c++) {
  4681. unsigned colMajorIdx = c * row + r;
  4682. EltValList.emplace_back(matEltList[colMajorIdx]);
  4683. }
  4684. }
  4685. } else if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty)) {
  4686. QualType EltTy = Type->getAsArrayTypeUnsafe()->getElementType();
  4687. for (unsigned i = 0; i < AT->getNumElements(); i++) {
  4688. FlatConstToList(C->getAggregateElement(i), EltValList, EltTy, Types,
  4689. bDefaultRowMajor);
  4690. }
  4691. } else if (dyn_cast<llvm::StructType>(Ty)) {
  4692. RecordDecl *RD = Type->getAsStructureType()->getDecl();
  4693. const CGRecordLayout &RL = Types.getCGRecordLayout(RD);
  4694. // Take care base.
  4695. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  4696. if (CXXRD->getNumBases()) {
  4697. for (const auto &I : CXXRD->bases()) {
  4698. const CXXRecordDecl *BaseDecl =
  4699. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  4700. if (BaseDecl->field_empty())
  4701. continue;
  4702. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  4703. unsigned i = RL.getNonVirtualBaseLLVMFieldNo(BaseDecl);
  4704. FlatConstToList(C->getAggregateElement(i), EltValList, parentTy,
  4705. Types, bDefaultRowMajor);
  4706. }
  4707. }
  4708. }
  4709. for (auto fieldIter = RD->field_begin(), fieldEnd = RD->field_end();
  4710. fieldIter != fieldEnd; ++fieldIter) {
  4711. unsigned i = RL.getLLVMFieldNo(*fieldIter);
  4712. FlatConstToList(C->getAggregateElement(i), EltValList,
  4713. fieldIter->getType(), Types, bDefaultRowMajor);
  4714. }
  4715. } else {
  4716. EltValList.emplace_back(C);
  4717. }
  4718. }
  4719. static bool ScanConstInitList(CodeGenModule &CGM, InitListExpr *E,
  4720. SmallVector<Constant *, 4> &EltValList,
  4721. CodeGenTypes &Types, bool bDefaultRowMajor) {
  4722. unsigned NumInitElements = E->getNumInits();
  4723. for (unsigned i = 0; i != NumInitElements; ++i) {
  4724. Expr *init = E->getInit(i);
  4725. QualType iType = init->getType();
  4726. if (InitListExpr *initList = dyn_cast<InitListExpr>(init)) {
  4727. if (!ScanConstInitList(CGM, initList, EltValList, Types,
  4728. bDefaultRowMajor))
  4729. return false;
  4730. } else if (DeclRefExpr *ref = dyn_cast<DeclRefExpr>(init)) {
  4731. if (VarDecl *D = dyn_cast<VarDecl>(ref->getDecl())) {
  4732. if (!D->hasInit())
  4733. return false;
  4734. if (Constant *initVal = CGM.EmitConstantInit(*D)) {
  4735. FlatConstToList(initVal, EltValList, iType, Types, bDefaultRowMajor);
  4736. } else {
  4737. return false;
  4738. }
  4739. } else {
  4740. return false;
  4741. }
  4742. } else if (hlsl::IsHLSLMatType(iType)) {
  4743. return false;
  4744. } else if (CodeGenFunction::hasScalarEvaluationKind(iType)) {
  4745. if (Constant *initVal = CGM.EmitConstantExpr(init, iType)) {
  4746. FlatConstToList(initVal, EltValList, iType, Types, bDefaultRowMajor);
  4747. } else {
  4748. return false;
  4749. }
  4750. } else {
  4751. return false;
  4752. }
  4753. }
  4754. return true;
  4755. }
  4756. static Constant *BuildConstInitializer(QualType Type, unsigned &offset,
  4757. SmallVector<Constant *, 4> &EltValList,
  4758. CodeGenTypes &Types,
  4759. bool bDefaultRowMajor);
  4760. static Constant *BuildConstVector(llvm::VectorType *VT, unsigned &offset,
  4761. SmallVector<Constant *, 4> &EltValList,
  4762. QualType Type, CodeGenTypes &Types) {
  4763. SmallVector<Constant *, 4> Elts;
  4764. QualType EltTy = hlsl::GetHLSLVecElementType(Type);
  4765. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  4766. Elts.emplace_back(BuildConstInitializer(EltTy, offset, EltValList, Types,
  4767. // Vector don't need major.
  4768. /*bDefaultRowMajor*/ false));
  4769. }
  4770. return llvm::ConstantVector::get(Elts);
  4771. }
  4772. static Constant *BuildConstMatrix(llvm::Type *Ty, unsigned &offset,
  4773. SmallVector<Constant *, 4> &EltValList,
  4774. QualType Type, CodeGenTypes &Types,
  4775. bool bDefaultRowMajor) {
  4776. QualType EltTy = hlsl::GetHLSLMatElementType(Type);
  4777. unsigned col, row;
  4778. HLMatrixLower::GetMatrixInfo(Ty, col, row);
  4779. llvm::ArrayType *AT = cast<llvm::ArrayType>(Ty->getStructElementType(0));
  4780. // Save initializer elements first.
  4781. // Matrix initializer is row major.
  4782. SmallVector<Constant *, 16> elts;
  4783. for (unsigned i = 0; i < col * row; i++) {
  4784. elts.emplace_back(BuildConstInitializer(EltTy, offset, EltValList, Types,
  4785. bDefaultRowMajor));
  4786. }
  4787. bool isRowMajor = IsRowMajorMatrix(Type, bDefaultRowMajor);
  4788. SmallVector<Constant *, 16> majorElts(elts.begin(), elts.end());
  4789. if (!isRowMajor) {
  4790. // cast row major to col major.
  4791. for (unsigned c = 0; c < col; c++) {
  4792. SmallVector<Constant *, 4> rows;
  4793. for (unsigned r = 0; r < row; r++) {
  4794. unsigned rowMajorIdx = r * col + c;
  4795. unsigned colMajorIdx = c * row + r;
  4796. majorElts[colMajorIdx] = elts[rowMajorIdx];
  4797. }
  4798. }
  4799. }
  4800. // The type is vector<element, col>[row].
  4801. SmallVector<Constant *, 4> rows;
  4802. unsigned idx = 0;
  4803. for (unsigned r = 0; r < row; r++) {
  4804. SmallVector<Constant *, 4> cols;
  4805. for (unsigned c = 0; c < col; c++) {
  4806. cols.emplace_back(majorElts[idx++]);
  4807. }
  4808. rows.emplace_back(llvm::ConstantVector::get(cols));
  4809. }
  4810. Constant *mat = llvm::ConstantArray::get(AT, rows);
  4811. return llvm::ConstantStruct::get(cast<llvm::StructType>(Ty), mat);
  4812. }
  4813. static Constant *BuildConstArray(llvm::ArrayType *AT, unsigned &offset,
  4814. SmallVector<Constant *, 4> &EltValList,
  4815. QualType Type, CodeGenTypes &Types,
  4816. bool bDefaultRowMajor) {
  4817. SmallVector<Constant *, 4> Elts;
  4818. QualType EltType = QualType(Type->getArrayElementTypeNoTypeQual(), 0);
  4819. for (unsigned i = 0; i < AT->getNumElements(); i++) {
  4820. Elts.emplace_back(BuildConstInitializer(EltType, offset, EltValList, Types,
  4821. bDefaultRowMajor));
  4822. }
  4823. return llvm::ConstantArray::get(AT, Elts);
  4824. }
  4825. static Constant *BuildConstStruct(llvm::StructType *ST, unsigned &offset,
  4826. SmallVector<Constant *, 4> &EltValList,
  4827. QualType Type, CodeGenTypes &Types,
  4828. bool bDefaultRowMajor) {
  4829. SmallVector<Constant *, 4> Elts;
  4830. const RecordType *RT = Type->getAsStructureType();
  4831. if (!RT)
  4832. RT = Type->getAs<RecordType>();
  4833. const RecordDecl *RD = RT->getDecl();
  4834. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  4835. if (CXXRD->getNumBases()) {
  4836. // Add base as field.
  4837. for (const auto &I : CXXRD->bases()) {
  4838. const CXXRecordDecl *BaseDecl =
  4839. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  4840. // Skip empty struct.
  4841. if (BaseDecl->field_empty())
  4842. continue;
  4843. // Add base as a whole constant. Not as element.
  4844. Elts.emplace_back(BuildConstInitializer(I.getType(), offset, EltValList,
  4845. Types, bDefaultRowMajor));
  4846. }
  4847. }
  4848. }
  4849. for (auto fieldIter = RD->field_begin(), fieldEnd = RD->field_end();
  4850. fieldIter != fieldEnd; ++fieldIter) {
  4851. Elts.emplace_back(BuildConstInitializer(
  4852. fieldIter->getType(), offset, EltValList, Types, bDefaultRowMajor));
  4853. }
  4854. return llvm::ConstantStruct::get(ST, Elts);
  4855. }
  4856. static Constant *BuildConstInitializer(QualType Type, unsigned &offset,
  4857. SmallVector<Constant *, 4> &EltValList,
  4858. CodeGenTypes &Types,
  4859. bool bDefaultRowMajor) {
  4860. llvm::Type *Ty = Types.ConvertType(Type);
  4861. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  4862. return BuildConstVector(VT, offset, EltValList, Type, Types);
  4863. } else if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty)) {
  4864. return BuildConstArray(AT, offset, EltValList, Type, Types,
  4865. bDefaultRowMajor);
  4866. } else if (HLMatrixLower::IsMatrixType(Ty)) {
  4867. return BuildConstMatrix(Ty, offset, EltValList, Type, Types,
  4868. bDefaultRowMajor);
  4869. } else if (StructType *ST = dyn_cast<llvm::StructType>(Ty)) {
  4870. return BuildConstStruct(ST, offset, EltValList, Type, Types,
  4871. bDefaultRowMajor);
  4872. } else {
  4873. // Scalar basic types.
  4874. Constant *Val = EltValList[offset++];
  4875. if (Val->getType() == Ty) {
  4876. return Val;
  4877. } else {
  4878. IRBuilder<> Builder(Ty->getContext());
  4879. // Don't cast int to bool. bool only for scalar.
  4880. if (Ty == Builder.getInt1Ty() && Val->getType() == Builder.getInt32Ty())
  4881. return Val;
  4882. Instruction::CastOps castOp =
  4883. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  4884. IsUnsigned(Type), IsUnsigned(Type), Val->getType(), Ty));
  4885. return cast<Constant>(Builder.CreateCast(castOp, Val, Ty));
  4886. }
  4887. }
  4888. }
  4889. Constant *CGMSHLSLRuntime::EmitHLSLConstInitListExpr(CodeGenModule &CGM,
  4890. InitListExpr *E) {
  4891. bool bDefaultRowMajor = m_pHLModule->GetHLOptions().bDefaultRowMajor;
  4892. SmallVector<Constant *, 4> EltValList;
  4893. if (!ScanConstInitList(CGM, E, EltValList, CGM.getTypes(), bDefaultRowMajor))
  4894. return nullptr;
  4895. QualType Type = E->getType();
  4896. unsigned offset = 0;
  4897. return BuildConstInitializer(Type, offset, EltValList, CGM.getTypes(),
  4898. bDefaultRowMajor);
  4899. }
  4900. Value *CGMSHLSLRuntime::EmitHLSLMatrixOperationCall(
  4901. CodeGenFunction &CGF, const clang::Expr *E, llvm::Type *RetType,
  4902. ArrayRef<Value *> paramList) {
  4903. HLOpcodeGroup group = GetHLOpcodeGroup(E->getStmtClass());
  4904. unsigned opcode = GetHLOpcode(E);
  4905. if (group == HLOpcodeGroup::HLInit)
  4906. return EmitHLSLArrayInit(CGF.Builder, group, opcode, RetType, paramList,
  4907. TheModule);
  4908. else
  4909. return EmitHLSLMatrixOperationCallImp(CGF.Builder, group, opcode, RetType,
  4910. paramList, TheModule);
  4911. }
  4912. void CGMSHLSLRuntime::EmitHLSLDiscard(CodeGenFunction &CGF) {
  4913. EmitHLSLMatrixOperationCallImp(
  4914. CGF.Builder, HLOpcodeGroup::HLIntrinsic,
  4915. static_cast<unsigned>(IntrinsicOp::IOP_clip),
  4916. llvm::Type::getVoidTy(CGF.getLLVMContext()),
  4917. {ConstantFP::get(llvm::Type::getFloatTy(CGF.getLLVMContext()), -1.0f)},
  4918. TheModule);
  4919. }
  4920. static llvm::Type *MergeIntType(llvm::IntegerType *T0, llvm::IntegerType *T1) {
  4921. if (T0->getBitWidth() > T1->getBitWidth())
  4922. return T0;
  4923. else
  4924. return T1;
  4925. }
  4926. static Value *CreateExt(CGBuilderTy &Builder, Value *Src, llvm::Type *DstTy,
  4927. bool bSigned) {
  4928. if (bSigned)
  4929. return Builder.CreateSExt(Src, DstTy);
  4930. else
  4931. return Builder.CreateZExt(Src, DstTy);
  4932. }
  4933. // For integer literal, try to get lowest precision.
  4934. static Value *CalcHLSLLiteralToLowestPrecision(CGBuilderTy &Builder, Value *Src,
  4935. bool bSigned) {
  4936. if (ConstantInt *CI = dyn_cast<ConstantInt>(Src)) {
  4937. APInt v = CI->getValue();
  4938. switch (v.getActiveWords()) {
  4939. case 4:
  4940. return Builder.getInt32(v.getLimitedValue());
  4941. case 8:
  4942. return Builder.getInt64(v.getLimitedValue());
  4943. case 2:
  4944. // TODO: use low precision type when support it in dxil.
  4945. // return Builder.getInt16(v.getLimitedValue());
  4946. return Builder.getInt32(v.getLimitedValue());
  4947. case 1:
  4948. // TODO: use precision type when support it in dxil.
  4949. // return Builder.getInt8(v.getLimitedValue());
  4950. return Builder.getInt32(v.getLimitedValue());
  4951. default:
  4952. return nullptr;
  4953. }
  4954. } else if (SelectInst *SI = dyn_cast<SelectInst>(Src)) {
  4955. if (SI->getType()->isIntegerTy()) {
  4956. Value *T = SI->getTrueValue();
  4957. Value *F = SI->getFalseValue();
  4958. Value *lowT = CalcHLSLLiteralToLowestPrecision(Builder, T, bSigned);
  4959. Value *lowF = CalcHLSLLiteralToLowestPrecision(Builder, F, bSigned);
  4960. if (lowT && lowF && lowT != T && lowF != F) {
  4961. llvm::IntegerType *TTy = cast<llvm::IntegerType>(lowT->getType());
  4962. llvm::IntegerType *FTy = cast<llvm::IntegerType>(lowF->getType());
  4963. llvm::Type *Ty = MergeIntType(TTy, FTy);
  4964. if (TTy != Ty) {
  4965. lowT = CreateExt(Builder, lowT, Ty, bSigned);
  4966. }
  4967. if (FTy != Ty) {
  4968. lowF = CreateExt(Builder, lowF, Ty, bSigned);
  4969. }
  4970. Value *Cond = SI->getCondition();
  4971. return Builder.CreateSelect(Cond, lowT, lowF);
  4972. }
  4973. }
  4974. } else if (llvm::BinaryOperator *BO = dyn_cast<llvm::BinaryOperator>(Src)) {
  4975. Value *Src0 = BO->getOperand(0);
  4976. Value *Src1 = BO->getOperand(1);
  4977. Value *CastSrc0 = CalcHLSLLiteralToLowestPrecision(Builder, Src0, bSigned);
  4978. Value *CastSrc1 = CalcHLSLLiteralToLowestPrecision(Builder, Src1, bSigned);
  4979. if (Src0 != CastSrc0 && Src1 != CastSrc1 && CastSrc0 && CastSrc1 &&
  4980. CastSrc0->getType() == CastSrc1->getType()) {
  4981. llvm::IntegerType *Ty0 = cast<llvm::IntegerType>(CastSrc0->getType());
  4982. llvm::IntegerType *Ty1 = cast<llvm::IntegerType>(CastSrc0->getType());
  4983. llvm::Type *Ty = MergeIntType(Ty0, Ty1);
  4984. if (Ty0 != Ty) {
  4985. CastSrc0 = CreateExt(Builder, CastSrc0, Ty, bSigned);
  4986. }
  4987. if (Ty1 != Ty) {
  4988. CastSrc1 = CreateExt(Builder, CastSrc1, Ty, bSigned);
  4989. }
  4990. return Builder.CreateBinOp(BO->getOpcode(), CastSrc0, CastSrc1);
  4991. }
  4992. }
  4993. return nullptr;
  4994. }
  4995. Value *CGMSHLSLRuntime::EmitHLSLLiteralCast(CodeGenFunction &CGF, Value *Src,
  4996. QualType SrcType,
  4997. QualType DstType) {
  4998. auto &Builder = CGF.Builder;
  4999. llvm::Type *DstTy = CGF.ConvertType(DstType);
  5000. bool bDstSigned = DstType->isSignedIntegerType();
  5001. if (ConstantInt *CI = dyn_cast<ConstantInt>(Src)) {
  5002. APInt v = CI->getValue();
  5003. if (llvm::IntegerType *IT = dyn_cast<llvm::IntegerType>(DstTy)) {
  5004. v = v.trunc(IT->getBitWidth());
  5005. switch (IT->getBitWidth()) {
  5006. case 32:
  5007. return Builder.getInt32(v.getLimitedValue());
  5008. case 64:
  5009. return Builder.getInt64(v.getLimitedValue());
  5010. case 16:
  5011. return Builder.getInt16(v.getLimitedValue());
  5012. case 8:
  5013. return Builder.getInt8(v.getLimitedValue());
  5014. default:
  5015. return nullptr;
  5016. }
  5017. } else {
  5018. DXASSERT_NOMSG(DstTy->isFloatingPointTy());
  5019. int64_t val = v.getLimitedValue();
  5020. if (v.isNegative())
  5021. val = 0-v.abs().getLimitedValue();
  5022. if (DstTy->isDoubleTy())
  5023. return ConstantFP::get(DstTy, (double)val);
  5024. else if (DstTy->isFloatTy())
  5025. return ConstantFP::get(DstTy, (float)val);
  5026. else {
  5027. if (bDstSigned)
  5028. return Builder.CreateSIToFP(Src, DstTy);
  5029. else
  5030. return Builder.CreateUIToFP(Src, DstTy);
  5031. }
  5032. }
  5033. } else if (ConstantFP *CF = dyn_cast<ConstantFP>(Src)) {
  5034. APFloat v = CF->getValueAPF();
  5035. double dv = v.convertToDouble();
  5036. if (llvm::IntegerType *IT = dyn_cast<llvm::IntegerType>(DstTy)) {
  5037. switch (IT->getBitWidth()) {
  5038. case 32:
  5039. return Builder.getInt32(dv);
  5040. case 64:
  5041. return Builder.getInt64(dv);
  5042. case 16:
  5043. return Builder.getInt16(dv);
  5044. case 8:
  5045. return Builder.getInt8(dv);
  5046. default:
  5047. return nullptr;
  5048. }
  5049. } else {
  5050. if (DstTy->isFloatTy()) {
  5051. float fv = dv;
  5052. return ConstantFP::get(DstTy->getContext(), APFloat(fv));
  5053. } else {
  5054. return Builder.CreateFPTrunc(Src, DstTy);
  5055. }
  5056. }
  5057. } else if (dyn_cast<UndefValue>(Src)) {
  5058. return UndefValue::get(DstTy);
  5059. } else {
  5060. Instruction *I = cast<Instruction>(Src);
  5061. if (SelectInst *SI = dyn_cast<SelectInst>(I)) {
  5062. Value *T = SI->getTrueValue();
  5063. Value *F = SI->getFalseValue();
  5064. Value *Cond = SI->getCondition();
  5065. if (isa<llvm::ConstantInt>(T) && isa<llvm::ConstantInt>(F)) {
  5066. llvm::APInt lhs = cast<llvm::ConstantInt>(T)->getValue();
  5067. llvm::APInt rhs = cast<llvm::ConstantInt>(F)->getValue();
  5068. if (DstTy == Builder.getInt32Ty()) {
  5069. T = Builder.getInt32(lhs.getLimitedValue());
  5070. F = Builder.getInt32(rhs.getLimitedValue());
  5071. Value *Sel = Builder.CreateSelect(Cond, T, F, "cond");
  5072. return Sel;
  5073. } else if (DstTy->isFloatingPointTy()) {
  5074. T = ConstantFP::get(DstTy, int64_t(lhs.getLimitedValue()));
  5075. F = ConstantFP::get(DstTy, int64_t(rhs.getLimitedValue()));
  5076. Value *Sel = Builder.CreateSelect(Cond, T, F, "cond");
  5077. return Sel;
  5078. }
  5079. } else if (isa<llvm::ConstantFP>(T) && isa<llvm::ConstantFP>(F)) {
  5080. llvm::APFloat lhs = cast<llvm::ConstantFP>(T)->getValueAPF();
  5081. llvm::APFloat rhs = cast<llvm::ConstantFP>(F)->getValueAPF();
  5082. double ld = lhs.convertToDouble();
  5083. double rd = rhs.convertToDouble();
  5084. if (DstTy->isFloatTy()) {
  5085. float lf = ld;
  5086. float rf = rd;
  5087. T = ConstantFP::get(DstTy->getContext(), APFloat(lf));
  5088. F = ConstantFP::get(DstTy->getContext(), APFloat(rf));
  5089. Value *Sel = Builder.CreateSelect(Cond, T, F, "cond");
  5090. return Sel;
  5091. } else if (DstTy == Builder.getInt32Ty()) {
  5092. T = Builder.getInt32(ld);
  5093. F = Builder.getInt32(rd);
  5094. Value *Sel = Builder.CreateSelect(Cond, T, F, "cond");
  5095. return Sel;
  5096. } else if (DstTy == Builder.getInt64Ty()) {
  5097. T = Builder.getInt64(ld);
  5098. F = Builder.getInt64(rd);
  5099. Value *Sel = Builder.CreateSelect(Cond, T, F, "cond");
  5100. return Sel;
  5101. }
  5102. }
  5103. } else if (llvm::BinaryOperator *BO = dyn_cast<llvm::BinaryOperator>(I)) {
  5104. // For integer binary operator, do the calc on lowest precision, then cast
  5105. // to dstTy.
  5106. if (I->getType()->isIntegerTy()) {
  5107. bool bSigned = DstType->isSignedIntegerType();
  5108. Value *CastResult =
  5109. CalcHLSLLiteralToLowestPrecision(Builder, BO, bSigned);
  5110. if (!CastResult)
  5111. return nullptr;
  5112. if (dyn_cast<llvm::IntegerType>(DstTy)) {
  5113. if (DstTy == CastResult->getType()) {
  5114. return CastResult;
  5115. } else {
  5116. if (bSigned)
  5117. return Builder.CreateSExtOrTrunc(CastResult, DstTy);
  5118. else
  5119. return Builder.CreateZExtOrTrunc(CastResult, DstTy);
  5120. }
  5121. } else {
  5122. if (bDstSigned)
  5123. return Builder.CreateSIToFP(CastResult, DstTy);
  5124. else
  5125. return Builder.CreateUIToFP(CastResult, DstTy);
  5126. }
  5127. }
  5128. }
  5129. // TODO: support other opcode if need.
  5130. return nullptr;
  5131. }
  5132. }
  5133. Value *CGMSHLSLRuntime::EmitHLSLMatrixSubscript(CodeGenFunction &CGF,
  5134. llvm::Type *RetType,
  5135. llvm::Value *Ptr,
  5136. llvm::Value *Idx,
  5137. clang::QualType Ty) {
  5138. bool isRowMajor =
  5139. IsRowMajorMatrix(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor);
  5140. unsigned opcode =
  5141. isRowMajor ? static_cast<unsigned>(HLSubscriptOpcode::RowMatSubscript)
  5142. : static_cast<unsigned>(HLSubscriptOpcode::ColMatSubscript);
  5143. Value *matBase = Ptr;
  5144. DXASSERT(matBase->getType()->isPointerTy(),
  5145. "matrix subscript should return pointer");
  5146. RetType =
  5147. llvm::PointerType::get(RetType->getPointerElementType(),
  5148. matBase->getType()->getPointerAddressSpace());
  5149. // Lower mat[Idx] into real idx.
  5150. SmallVector<Value *, 8> args;
  5151. args.emplace_back(Ptr);
  5152. unsigned row, col;
  5153. hlsl::GetHLSLMatRowColCount(Ty, row, col);
  5154. if (isRowMajor) {
  5155. Value *cCol = ConstantInt::get(Idx->getType(), col);
  5156. Value *Base = CGF.Builder.CreateMul(cCol, Idx);
  5157. for (unsigned i = 0; i < col; i++) {
  5158. Value *c = ConstantInt::get(Idx->getType(), i);
  5159. // r * col + c
  5160. Value *matIdx = CGF.Builder.CreateAdd(Base, c);
  5161. args.emplace_back(matIdx);
  5162. }
  5163. } else {
  5164. for (unsigned i = 0; i < col; i++) {
  5165. Value *cMulRow = ConstantInt::get(Idx->getType(), i * row);
  5166. // c * row + r
  5167. Value *matIdx = CGF.Builder.CreateAdd(cMulRow, Idx);
  5168. args.emplace_back(matIdx);
  5169. }
  5170. }
  5171. Value *matSub =
  5172. EmitHLSLMatrixOperationCallImp(CGF.Builder, HLOpcodeGroup::HLSubscript,
  5173. opcode, RetType, args, TheModule);
  5174. return matSub;
  5175. }
  5176. Value *CGMSHLSLRuntime::EmitHLSLMatrixElement(CodeGenFunction &CGF,
  5177. llvm::Type *RetType,
  5178. ArrayRef<Value *> paramList,
  5179. QualType Ty) {
  5180. bool isRowMajor =
  5181. IsRowMajorMatrix(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor);
  5182. unsigned opcode =
  5183. isRowMajor ? static_cast<unsigned>(HLSubscriptOpcode::RowMatElement)
  5184. : static_cast<unsigned>(HLSubscriptOpcode::ColMatElement);
  5185. Value *matBase = paramList[0];
  5186. DXASSERT(matBase->getType()->isPointerTy(),
  5187. "matrix element should return pointer");
  5188. RetType =
  5189. llvm::PointerType::get(RetType->getPointerElementType(),
  5190. matBase->getType()->getPointerAddressSpace());
  5191. Value *idx = paramList[HLOperandIndex::kMatSubscriptSubOpIdx-1];
  5192. // Lower _m00 into real idx.
  5193. // -1 to avoid opcode param which is added in EmitHLSLMatrixOperationCallImp.
  5194. Value *args[] = {paramList[HLOperandIndex::kMatSubscriptMatOpIdx - 1],
  5195. paramList[HLOperandIndex::kMatSubscriptSubOpIdx - 1]};
  5196. // For all zero idx. Still all zero idx.
  5197. if (ConstantAggregateZero *zeros = dyn_cast<ConstantAggregateZero>(idx)) {
  5198. Constant *zero = zeros->getAggregateElement((unsigned)0);
  5199. std::vector<Constant *> elts(zeros->getNumElements() >> 1, zero);
  5200. args[HLOperandIndex::kMatSubscriptSubOpIdx - 1] = ConstantVector::get(elts);
  5201. } else {
  5202. ConstantDataSequential *elts = cast<ConstantDataSequential>(idx);
  5203. unsigned count = elts->getNumElements();
  5204. unsigned row, col;
  5205. hlsl::GetHLSLMatRowColCount(Ty, row, col);
  5206. std::vector<Constant *> idxs(count >> 1);
  5207. for (unsigned i = 0; i < count; i += 2) {
  5208. unsigned rowIdx = elts->getElementAsInteger(i);
  5209. unsigned colIdx = elts->getElementAsInteger(i + 1);
  5210. unsigned matIdx = 0;
  5211. if (isRowMajor) {
  5212. matIdx = rowIdx * col + colIdx;
  5213. } else {
  5214. matIdx = colIdx * row + rowIdx;
  5215. }
  5216. idxs[i >> 1] = CGF.Builder.getInt32(matIdx);
  5217. }
  5218. args[HLOperandIndex::kMatSubscriptSubOpIdx - 1] = ConstantVector::get(idxs);
  5219. }
  5220. return EmitHLSLMatrixOperationCallImp(CGF.Builder, HLOpcodeGroup::HLSubscript,
  5221. opcode, RetType, args, TheModule);
  5222. }
  5223. Value *CGMSHLSLRuntime::EmitHLSLMatrixLoad(CGBuilderTy &Builder, Value *Ptr,
  5224. QualType Ty) {
  5225. bool isRowMajor =
  5226. IsRowMajorMatrix(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor);
  5227. unsigned opcode =
  5228. isRowMajor
  5229. ? static_cast<unsigned>(HLMatLoadStoreOpcode::RowMatLoad)
  5230. : static_cast<unsigned>(HLMatLoadStoreOpcode::ColMatLoad);
  5231. Value *matVal = EmitHLSLMatrixOperationCallImp(
  5232. Builder, HLOpcodeGroup::HLMatLoadStore, opcode,
  5233. Ptr->getType()->getPointerElementType(), {Ptr}, TheModule);
  5234. if (!isRowMajor) {
  5235. // ColMatLoad will return a col major matrix.
  5236. // All matrix Value should be row major.
  5237. // Cast it to row major.
  5238. matVal = EmitHLSLMatrixOperationCallImp(
  5239. Builder, HLOpcodeGroup::HLCast,
  5240. static_cast<unsigned>(HLCastOpcode::ColMatrixToRowMatrix),
  5241. matVal->getType(), {matVal}, TheModule);
  5242. }
  5243. return matVal;
  5244. }
  5245. void CGMSHLSLRuntime::EmitHLSLMatrixStore(CGBuilderTy &Builder, Value *Val,
  5246. Value *DestPtr, QualType Ty) {
  5247. bool isRowMajor =
  5248. IsRowMajorMatrix(Ty, m_pHLModule->GetHLOptions().bDefaultRowMajor);
  5249. unsigned opcode =
  5250. isRowMajor
  5251. ? static_cast<unsigned>(HLMatLoadStoreOpcode::RowMatStore)
  5252. : static_cast<unsigned>(HLMatLoadStoreOpcode::ColMatStore);
  5253. if (!isRowMajor) {
  5254. Value *ColVal = nullptr;
  5255. // If Val is casted from col major. Just use the original col major val.
  5256. if (CallInst *CI = dyn_cast<CallInst>(Val)) {
  5257. hlsl::HLOpcodeGroup group =
  5258. hlsl::GetHLOpcodeGroupByName(CI->getCalledFunction());
  5259. if (group == HLOpcodeGroup::HLCast) {
  5260. HLCastOpcode castOp = static_cast<HLCastOpcode>(hlsl::GetHLOpcode(CI));
  5261. if (castOp == HLCastOpcode::ColMatrixToRowMatrix) {
  5262. ColVal = CI->getArgOperand(HLOperandIndex::kUnaryOpSrc0Idx);
  5263. }
  5264. }
  5265. }
  5266. if (ColVal) {
  5267. Val = ColVal;
  5268. } else {
  5269. // All matrix Value should be row major.
  5270. // ColMatStore need a col major value.
  5271. // Cast it to row major.
  5272. Val = EmitHLSLMatrixOperationCallImp(
  5273. Builder, HLOpcodeGroup::HLCast,
  5274. static_cast<unsigned>(HLCastOpcode::RowMatrixToColMatrix),
  5275. Val->getType(), {Val}, TheModule);
  5276. }
  5277. }
  5278. EmitHLSLMatrixOperationCallImp(Builder, HLOpcodeGroup::HLMatLoadStore, opcode,
  5279. Val->getType(), {DestPtr, Val}, TheModule);
  5280. }
  5281. Value *CGMSHLSLRuntime::EmitHLSLMatrixLoad(CodeGenFunction &CGF, Value *Ptr,
  5282. QualType Ty) {
  5283. return EmitHLSLMatrixLoad(CGF.Builder, Ptr, Ty);
  5284. }
  5285. void CGMSHLSLRuntime::EmitHLSLMatrixStore(CodeGenFunction &CGF, Value *Val,
  5286. Value *DestPtr, QualType Ty) {
  5287. EmitHLSLMatrixStore(CGF.Builder, Val, DestPtr, Ty);
  5288. }
  5289. // Copy data from srcPtr to destPtr.
  5290. static void SimplePtrCopy(Value *DestPtr, Value *SrcPtr,
  5291. ArrayRef<Value *> idxList, CGBuilderTy &Builder) {
  5292. if (idxList.size() > 1) {
  5293. DestPtr = Builder.CreateInBoundsGEP(DestPtr, idxList);
  5294. SrcPtr = Builder.CreateInBoundsGEP(SrcPtr, idxList);
  5295. }
  5296. llvm::LoadInst *ld = Builder.CreateLoad(SrcPtr);
  5297. Builder.CreateStore(ld, DestPtr);
  5298. }
  5299. // Get Element val from SrvVal with extract value.
  5300. static Value *GetEltVal(Value *SrcVal, ArrayRef<Value*> idxList,
  5301. CGBuilderTy &Builder) {
  5302. Value *Val = SrcVal;
  5303. // Skip beginning pointer type.
  5304. for (unsigned i = 1; i < idxList.size(); i++) {
  5305. ConstantInt *idx = cast<ConstantInt>(idxList[i]);
  5306. llvm::Type *Ty = Val->getType();
  5307. if (Ty->isAggregateType()) {
  5308. Val = Builder.CreateExtractValue(Val, idx->getLimitedValue());
  5309. }
  5310. }
  5311. return Val;
  5312. }
  5313. // Copy srcVal to destPtr.
  5314. static void SimpleValCopy(Value *DestPtr, Value *SrcVal,
  5315. ArrayRef<Value*> idxList,
  5316. CGBuilderTy &Builder) {
  5317. Value *DestGEP = Builder.CreateInBoundsGEP(DestPtr, idxList);
  5318. Value *Val = GetEltVal(SrcVal, idxList, Builder);
  5319. Builder.CreateStore(Val, DestGEP);
  5320. }
  5321. static void SimpleCopy(Value *Dest, Value *Src,
  5322. ArrayRef<Value *> idxList,
  5323. CGBuilderTy &Builder) {
  5324. if (Src->getType()->isPointerTy())
  5325. SimplePtrCopy(Dest, Src, idxList, Builder);
  5326. else
  5327. SimpleValCopy(Dest, Src, idxList, Builder);
  5328. }
  5329. void CGMSHLSLRuntime::FlattenAggregatePtrToGepList(
  5330. CodeGenFunction &CGF, Value *Ptr, SmallVector<Value *, 4> &idxList,
  5331. clang::QualType Type, llvm::Type *Ty, SmallVector<Value *, 4> &GepList,
  5332. SmallVector<QualType, 4> &EltTyList) {
  5333. if (llvm::PointerType *PT = dyn_cast<llvm::PointerType>(Ty)) {
  5334. Constant *idx = Constant::getIntegerValue(
  5335. IntegerType::get(Ty->getContext(), 32), APInt(32, 0));
  5336. idxList.emplace_back(idx);
  5337. FlattenAggregatePtrToGepList(CGF, Ptr, idxList, Type, PT->getElementType(),
  5338. GepList, EltTyList);
  5339. idxList.pop_back();
  5340. } else if (HLMatrixLower::IsMatrixType(Ty)) {
  5341. // Use matLd/St for matrix.
  5342. unsigned col, row;
  5343. llvm::Type *EltTy = HLMatrixLower::GetMatrixInfo(Ty, col, row);
  5344. llvm::PointerType *EltPtrTy =
  5345. llvm::PointerType::get(EltTy, Ptr->getType()->getPointerAddressSpace());
  5346. QualType EltQualTy = hlsl::GetHLSLMatElementType(Type);
  5347. Value *matPtr = CGF.Builder.CreateInBoundsGEP(Ptr, idxList);
  5348. // Flatten matrix to elements.
  5349. for (unsigned r = 0; r < row; r++) {
  5350. for (unsigned c = 0; c < col; c++) {
  5351. ConstantInt *cRow = CGF.Builder.getInt32(r);
  5352. ConstantInt *cCol = CGF.Builder.getInt32(c);
  5353. Constant *CV = llvm::ConstantVector::get({cRow, cCol});
  5354. GepList.push_back(
  5355. EmitHLSLMatrixElement(CGF, EltPtrTy, {matPtr, CV}, Type));
  5356. EltTyList.push_back(EltQualTy);
  5357. }
  5358. }
  5359. } else if (StructType *ST = dyn_cast<StructType>(Ty)) {
  5360. if (HLModule::IsHLSLObjectType(ST)) {
  5361. // Avoid split HLSL object.
  5362. Value *GEP = CGF.Builder.CreateInBoundsGEP(Ptr, idxList);
  5363. GepList.push_back(GEP);
  5364. EltTyList.push_back(Type);
  5365. return;
  5366. }
  5367. const clang::RecordType *RT = Type->getAsStructureType();
  5368. RecordDecl *RD = RT->getDecl();
  5369. const CGRecordLayout &RL = CGF.getTypes().getCGRecordLayout(RD);
  5370. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  5371. if (CXXRD->getNumBases()) {
  5372. // Add base as field.
  5373. for (const auto &I : CXXRD->bases()) {
  5374. const CXXRecordDecl *BaseDecl =
  5375. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  5376. // Skip empty struct.
  5377. if (BaseDecl->field_empty())
  5378. continue;
  5379. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  5380. llvm::Type *parentType = CGF.ConvertType(parentTy);
  5381. unsigned i = RL.getNonVirtualBaseLLVMFieldNo(BaseDecl);
  5382. Constant *idx = llvm::Constant::getIntegerValue(
  5383. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  5384. idxList.emplace_back(idx);
  5385. FlattenAggregatePtrToGepList(CGF, Ptr, idxList, parentTy, parentType,
  5386. GepList, EltTyList);
  5387. idxList.pop_back();
  5388. }
  5389. }
  5390. }
  5391. for (auto fieldIter = RD->field_begin(), fieldEnd = RD->field_end();
  5392. fieldIter != fieldEnd; ++fieldIter) {
  5393. unsigned i = RL.getLLVMFieldNo(*fieldIter);
  5394. llvm::Type *ET = ST->getElementType(i);
  5395. Constant *idx = llvm::Constant::getIntegerValue(
  5396. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  5397. idxList.emplace_back(idx);
  5398. FlattenAggregatePtrToGepList(CGF, Ptr, idxList, fieldIter->getType(), ET,
  5399. GepList, EltTyList);
  5400. idxList.pop_back();
  5401. }
  5402. } else if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty)) {
  5403. llvm::Type *ET = AT->getElementType();
  5404. QualType EltType = CGF.getContext().getBaseElementType(Type);
  5405. for (uint32_t i = 0; i < AT->getNumElements(); i++) {
  5406. Constant *idx = Constant::getIntegerValue(
  5407. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  5408. idxList.emplace_back(idx);
  5409. FlattenAggregatePtrToGepList(CGF, Ptr, idxList, EltType, ET, GepList,
  5410. EltTyList);
  5411. idxList.pop_back();
  5412. }
  5413. } else if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  5414. // Flatten vector too.
  5415. QualType EltTy = hlsl::GetHLSLVecElementType(Type);
  5416. for (uint32_t i = 0; i < VT->getNumElements(); i++) {
  5417. Constant *idx = CGF.Builder.getInt32(i);
  5418. idxList.emplace_back(idx);
  5419. Value *GEP = CGF.Builder.CreateInBoundsGEP(Ptr, idxList);
  5420. GepList.push_back(GEP);
  5421. EltTyList.push_back(EltTy);
  5422. idxList.pop_back();
  5423. }
  5424. } else {
  5425. Value *GEP = CGF.Builder.CreateInBoundsGEP(Ptr, idxList);
  5426. GepList.push_back(GEP);
  5427. EltTyList.push_back(Type);
  5428. }
  5429. }
  5430. void CGMSHLSLRuntime::LoadFlattenedGepList(CodeGenFunction &CGF,
  5431. ArrayRef<Value *> GepList,
  5432. ArrayRef<QualType> EltTyList,
  5433. SmallVector<Value *, 4> &EltList) {
  5434. unsigned eltSize = GepList.size();
  5435. for (unsigned i = 0; i < eltSize; i++) {
  5436. Value *Ptr = GepList[i];
  5437. // Everying is element type.
  5438. EltList.push_back(CGF.Builder.CreateLoad(Ptr));
  5439. }
  5440. }
  5441. void CGMSHLSLRuntime::StoreFlattenedGepList(CodeGenFunction &CGF, ArrayRef<Value *> GepList,
  5442. ArrayRef<QualType> GepTyList, ArrayRef<Value *> EltValList, ArrayRef<QualType> SrcTyList) {
  5443. unsigned eltSize = GepList.size();
  5444. for (unsigned i = 0; i < eltSize; i++) {
  5445. Value *Ptr = GepList[i];
  5446. QualType DestType = GepTyList[i];
  5447. Value *Val = EltValList[i];
  5448. QualType SrcType = SrcTyList[i];
  5449. llvm::Type *Ty = Ptr->getType()->getPointerElementType();
  5450. // Everything is element type.
  5451. if (Ty != Val->getType()) {
  5452. Instruction::CastOps castOp =
  5453. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  5454. IsUnsigned(SrcType), IsUnsigned(DestType), Val->getType(), Ty));
  5455. Val = CGF.Builder.CreateCast(castOp, Val, Ty);
  5456. }
  5457. CGF.Builder.CreateStore(Val, Ptr);
  5458. }
  5459. }
  5460. // Copy data from SrcPtr to DestPtr.
  5461. // For matrix, use MatLoad/MatStore.
  5462. // For matrix array, EmitHLSLAggregateCopy on each element.
  5463. // For struct or array, use memcpy.
  5464. // Other just load/store.
  5465. void CGMSHLSLRuntime::EmitHLSLAggregateCopy(
  5466. CodeGenFunction &CGF, llvm::Value *SrcPtr, llvm::Value *DestPtr,
  5467. SmallVector<Value *, 4> &idxList, clang::QualType SrcType,
  5468. clang::QualType DestType, llvm::Type *Ty) {
  5469. if (llvm::PointerType *PT = dyn_cast<llvm::PointerType>(Ty)) {
  5470. Constant *idx = Constant::getIntegerValue(
  5471. IntegerType::get(Ty->getContext(), 32), APInt(32, 0));
  5472. idxList.emplace_back(idx);
  5473. EmitHLSLAggregateCopy(CGF, SrcPtr, DestPtr, idxList, SrcType, DestType,
  5474. PT->getElementType());
  5475. idxList.pop_back();
  5476. } else if (HLMatrixLower::IsMatrixType(Ty)) {
  5477. // Use matLd/St for matrix.
  5478. Value *srcGEP = CGF.Builder.CreateInBoundsGEP(SrcPtr, idxList);
  5479. Value *dstGEP = CGF.Builder.CreateInBoundsGEP(DestPtr, idxList);
  5480. Value *ldMat = EmitHLSLMatrixLoad(CGF, srcGEP, SrcType);
  5481. EmitHLSLMatrixStore(CGF, ldMat, dstGEP, DestType);
  5482. } else if (StructType *ST = dyn_cast<StructType>(Ty)) {
  5483. if (HLModule::IsHLSLObjectType(ST)) {
  5484. // Avoid split HLSL object.
  5485. SimpleCopy(DestPtr, SrcPtr, idxList, CGF.Builder);
  5486. return;
  5487. }
  5488. Value *srcGEP = CGF.Builder.CreateInBoundsGEP(SrcPtr, idxList);
  5489. Value *dstGEP = CGF.Builder.CreateInBoundsGEP(DestPtr, idxList);
  5490. unsigned size = this->TheModule.getDataLayout().getTypeAllocSize(ST);
  5491. // Memcpy struct.
  5492. CGF.Builder.CreateMemCpy(dstGEP, srcGEP, size, 1);
  5493. } else if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty)) {
  5494. if (!HLMatrixLower::IsMatrixArrayPointer(llvm::PointerType::get(Ty,0))) {
  5495. Value *srcGEP = CGF.Builder.CreateInBoundsGEP(SrcPtr, idxList);
  5496. Value *dstGEP = CGF.Builder.CreateInBoundsGEP(DestPtr, idxList);
  5497. unsigned size = this->TheModule.getDataLayout().getTypeAllocSize(AT);
  5498. // Memcpy non-matrix array.
  5499. CGF.Builder.CreateMemCpy(dstGEP, srcGEP, size, 1);
  5500. } else {
  5501. llvm::Type *ET = AT->getElementType();
  5502. QualType EltDestType = CGF.getContext().getBaseElementType(DestType);
  5503. QualType EltSrcType = CGF.getContext().getBaseElementType(SrcType);
  5504. for (uint32_t i = 0; i < AT->getNumElements(); i++) {
  5505. Constant *idx = Constant::getIntegerValue(
  5506. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  5507. idxList.emplace_back(idx);
  5508. EmitHLSLAggregateCopy(CGF, SrcPtr, DestPtr, idxList, EltSrcType,
  5509. EltDestType, ET);
  5510. idxList.pop_back();
  5511. }
  5512. }
  5513. } else {
  5514. SimpleCopy(DestPtr, SrcPtr, idxList, CGF.Builder);
  5515. }
  5516. }
  5517. void CGMSHLSLRuntime::EmitHLSLAggregateCopy(CodeGenFunction &CGF, llvm::Value *SrcPtr,
  5518. llvm::Value *DestPtr,
  5519. clang::QualType Ty) {
  5520. SmallVector<Value *, 4> idxList;
  5521. EmitHLSLAggregateCopy(CGF, SrcPtr, DestPtr, idxList, Ty, Ty, SrcPtr->getType());
  5522. }
  5523. // To memcpy, need element type match.
  5524. // For struct type, the layout should match in cbuffer layout.
  5525. // struct { float2 x; float3 y; } will not match struct { float3 x; float2 y; }.
  5526. // struct { float2 x; float3 y; } will not match array of float.
  5527. static bool IsTypeMatchForMemcpy(llvm::Type *SrcTy, llvm::Type *DestTy) {
  5528. llvm::Type *SrcEltTy = dxilutil::GetArrayEltTy(SrcTy);
  5529. llvm::Type *DestEltTy = dxilutil::GetArrayEltTy(DestTy);
  5530. if (SrcEltTy == DestEltTy)
  5531. return true;
  5532. llvm::StructType *SrcST = dyn_cast<llvm::StructType>(SrcEltTy);
  5533. llvm::StructType *DestST = dyn_cast<llvm::StructType>(DestEltTy);
  5534. if (SrcST && DestST) {
  5535. // Only allow identical struct.
  5536. return SrcST->isLayoutIdentical(DestST);
  5537. } else if (!SrcST && !DestST) {
  5538. // For basic type, if one is array, one is not array, layout is different.
  5539. // If both array, type mismatch. If both basic, copy should be fine.
  5540. // So all return false.
  5541. return false;
  5542. } else {
  5543. // One struct, one basic type.
  5544. // Make sure all struct element match the basic type and basic type is
  5545. // vector4.
  5546. llvm::StructType *ST = SrcST ? SrcST : DestST;
  5547. llvm::Type *Ty = SrcST ? DestEltTy : SrcEltTy;
  5548. if (!Ty->isVectorTy())
  5549. return false;
  5550. if (Ty->getVectorNumElements() != 4)
  5551. return false;
  5552. for (llvm::Type *EltTy : ST->elements()) {
  5553. if (EltTy != Ty)
  5554. return false;
  5555. }
  5556. return true;
  5557. }
  5558. }
  5559. void CGMSHLSLRuntime::EmitHLSLFlatConversionAggregateCopy(CodeGenFunction &CGF, llvm::Value *SrcPtr,
  5560. clang::QualType SrcTy,
  5561. llvm::Value *DestPtr,
  5562. clang::QualType DestTy) {
  5563. llvm::Type *SrcPtrTy = SrcPtr->getType()->getPointerElementType();
  5564. llvm::Type *DestPtrTy = DestPtr->getType()->getPointerElementType();
  5565. if (SrcPtrTy == DestPtrTy) {
  5566. // Memcpy if type is match.
  5567. unsigned size = TheModule.getDataLayout().getTypeAllocSize(SrcPtrTy);
  5568. CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, size, 1);
  5569. return;
  5570. } else if (HLModule::IsHLSLObjectType(dxilutil::GetArrayEltTy(SrcPtrTy)) &&
  5571. HLModule::IsHLSLObjectType(dxilutil::GetArrayEltTy(DestPtrTy))) {
  5572. unsigned sizeSrc = TheModule.getDataLayout().getTypeAllocSize(SrcPtrTy);
  5573. unsigned sizeDest = TheModule.getDataLayout().getTypeAllocSize(DestPtrTy);
  5574. CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, std::max(sizeSrc, sizeDest), 1);
  5575. return;
  5576. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(DestPtr)) {
  5577. if (GV->isInternalLinkage(GV->getLinkage()) &&
  5578. IsTypeMatchForMemcpy(SrcPtrTy, DestPtrTy)) {
  5579. unsigned sizeSrc = TheModule.getDataLayout().getTypeAllocSize(SrcPtrTy);
  5580. unsigned sizeDest = TheModule.getDataLayout().getTypeAllocSize(DestPtrTy);
  5581. CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, std::min(sizeSrc, sizeDest), 1);
  5582. return;
  5583. }
  5584. }
  5585. // It is possiable to implement EmitHLSLAggregateCopy, EmitHLSLAggregateStore
  5586. // the same way. But split value to scalar will generate many instruction when
  5587. // src type is same as dest type.
  5588. SmallVector<Value *, 4> idxList;
  5589. SmallVector<Value *, 4> SrcGEPList;
  5590. SmallVector<QualType, 4> SrcEltTyList;
  5591. FlattenAggregatePtrToGepList(CGF, SrcPtr, idxList, SrcTy, SrcPtr->getType(),
  5592. SrcGEPList, SrcEltTyList);
  5593. SmallVector<Value *, 4> LdEltList;
  5594. LoadFlattenedGepList(CGF, SrcGEPList, SrcEltTyList, LdEltList);
  5595. idxList.clear();
  5596. SmallVector<Value *, 4> DestGEPList;
  5597. SmallVector<QualType, 4> DestEltTyList;
  5598. FlattenAggregatePtrToGepList(CGF, DestPtr, idxList, DestTy,
  5599. DestPtr->getType(), DestGEPList, DestEltTyList);
  5600. StoreFlattenedGepList(CGF, DestGEPList, DestEltTyList, LdEltList,
  5601. SrcEltTyList);
  5602. }
  5603. void CGMSHLSLRuntime::EmitHLSLAggregateStore(CodeGenFunction &CGF, llvm::Value *SrcVal,
  5604. llvm::Value *DestPtr,
  5605. clang::QualType Ty) {
  5606. DXASSERT(0, "aggregate return type will use SRet, no aggregate store should exist");
  5607. }
  5608. static void SimpleFlatValCopy(Value *DestPtr, Value *SrcVal, QualType Ty,
  5609. QualType SrcTy, ArrayRef<Value *> idxList,
  5610. CGBuilderTy &Builder) {
  5611. Value *DestGEP = Builder.CreateInBoundsGEP(DestPtr, idxList);
  5612. llvm::Type *ToTy = DestGEP->getType()->getPointerElementType();
  5613. llvm::Type *EltToTy = ToTy;
  5614. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(ToTy)) {
  5615. EltToTy = VT->getElementType();
  5616. }
  5617. if (EltToTy != SrcVal->getType()) {
  5618. Instruction::CastOps castOp =
  5619. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  5620. IsUnsigned(SrcTy), IsUnsigned(Ty), SrcVal->getType(), ToTy));
  5621. SrcVal = Builder.CreateCast(castOp, SrcVal, EltToTy);
  5622. }
  5623. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(ToTy)) {
  5624. llvm::VectorType *VT1 = llvm::VectorType::get(EltToTy, 1);
  5625. Value *V1 =
  5626. Builder.CreateInsertElement(UndefValue::get(VT1), SrcVal, (uint64_t)0);
  5627. std::vector<int> shufIdx(VT->getNumElements(), 0);
  5628. Value *Vec = Builder.CreateShuffleVector(V1, V1, shufIdx);
  5629. Builder.CreateStore(Vec, DestGEP);
  5630. } else
  5631. Builder.CreateStore(SrcVal, DestGEP);
  5632. }
  5633. void CGMSHLSLRuntime::EmitHLSLFlatConversionToAggregate(
  5634. CodeGenFunction &CGF, Value *SrcVal, llvm::Value *DestPtr,
  5635. SmallVector<Value *, 4> &idxList, QualType Type, QualType SrcType,
  5636. llvm::Type *Ty) {
  5637. if (llvm::PointerType *PT = dyn_cast<llvm::PointerType>(Ty)) {
  5638. Constant *idx = Constant::getIntegerValue(
  5639. IntegerType::get(Ty->getContext(), 32), APInt(32, 0));
  5640. idxList.emplace_back(idx);
  5641. EmitHLSLFlatConversionToAggregate(CGF, SrcVal, DestPtr, idxList, Type,
  5642. SrcType, PT->getElementType());
  5643. idxList.pop_back();
  5644. } else if (HLMatrixLower::IsMatrixType(Ty)) {
  5645. // Use matLd/St for matrix.
  5646. Value *dstGEP = CGF.Builder.CreateInBoundsGEP(DestPtr, idxList);
  5647. unsigned row, col;
  5648. llvm::Type *EltTy = HLMatrixLower::GetMatrixInfo(Ty, col, row);
  5649. llvm::VectorType *VT1 = llvm::VectorType::get(EltTy, 1);
  5650. if (EltTy != SrcVal->getType()) {
  5651. Instruction::CastOps castOp =
  5652. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  5653. IsUnsigned(SrcType), IsUnsigned(Type), SrcVal->getType(), EltTy));
  5654. SrcVal = CGF.Builder.CreateCast(castOp, SrcVal, EltTy);
  5655. }
  5656. Value *V1 = CGF.Builder.CreateInsertElement(UndefValue::get(VT1), SrcVal,
  5657. (uint64_t)0);
  5658. std::vector<int> shufIdx(col * row, 0);
  5659. Value *VecMat = CGF.Builder.CreateShuffleVector(V1, V1, shufIdx);
  5660. Value *MatInit = EmitHLSLMatrixOperationCallImp(
  5661. CGF.Builder, HLOpcodeGroup::HLInit, 0, Ty, {VecMat}, TheModule);
  5662. EmitHLSLMatrixStore(CGF, MatInit, dstGEP, Type);
  5663. } else if (StructType *ST = dyn_cast<StructType>(Ty)) {
  5664. DXASSERT(!HLModule::IsHLSLObjectType(ST), "cannot cast to hlsl object, Sema should reject");
  5665. const clang::RecordType *RT = Type->getAsStructureType();
  5666. RecordDecl *RD = RT->getDecl();
  5667. const CGRecordLayout &RL = CGF.getTypes().getCGRecordLayout(RD);
  5668. // Take care base.
  5669. if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
  5670. if (CXXRD->getNumBases()) {
  5671. for (const auto &I : CXXRD->bases()) {
  5672. const CXXRecordDecl *BaseDecl =
  5673. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  5674. if (BaseDecl->field_empty())
  5675. continue;
  5676. QualType parentTy = QualType(BaseDecl->getTypeForDecl(), 0);
  5677. unsigned i = RL.getNonVirtualBaseLLVMFieldNo(BaseDecl);
  5678. llvm::Type *ET = ST->getElementType(i);
  5679. Constant *idx = llvm::Constant::getIntegerValue(
  5680. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  5681. idxList.emplace_back(idx);
  5682. EmitHLSLFlatConversionToAggregate(CGF, SrcVal, DestPtr, idxList,
  5683. parentTy, SrcType, ET);
  5684. idxList.pop_back();
  5685. }
  5686. }
  5687. }
  5688. for (auto fieldIter = RD->field_begin(), fieldEnd = RD->field_end();
  5689. fieldIter != fieldEnd; ++fieldIter) {
  5690. unsigned i = RL.getLLVMFieldNo(*fieldIter);
  5691. llvm::Type *ET = ST->getElementType(i);
  5692. Constant *idx = llvm::Constant::getIntegerValue(
  5693. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  5694. idxList.emplace_back(idx);
  5695. EmitHLSLFlatConversionToAggregate(CGF, SrcVal, DestPtr, idxList,
  5696. fieldIter->getType(), SrcType, ET);
  5697. idxList.pop_back();
  5698. }
  5699. } else if (llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty)) {
  5700. llvm::Type *ET = AT->getElementType();
  5701. QualType EltType = CGF.getContext().getBaseElementType(Type);
  5702. for (uint32_t i = 0; i < AT->getNumElements(); i++) {
  5703. Constant *idx = Constant::getIntegerValue(
  5704. IntegerType::get(Ty->getContext(), 32), APInt(32, i));
  5705. idxList.emplace_back(idx);
  5706. EmitHLSLFlatConversionToAggregate(CGF, SrcVal, DestPtr, idxList, EltType,
  5707. SrcType, ET);
  5708. idxList.pop_back();
  5709. }
  5710. } else {
  5711. SimpleFlatValCopy(DestPtr, SrcVal, Type, SrcType, idxList, CGF.Builder);
  5712. }
  5713. }
  5714. void CGMSHLSLRuntime::EmitHLSLFlatConversionToAggregate(CodeGenFunction &CGF,
  5715. Value *Val,
  5716. Value *DestPtr,
  5717. QualType Ty,
  5718. QualType SrcTy) {
  5719. if (SrcTy->isBuiltinType()) {
  5720. SmallVector<Value *, 4> idxList;
  5721. // Add first 0 for DestPtr.
  5722. Constant *idx = Constant::getIntegerValue(
  5723. IntegerType::get(Val->getContext(), 32), APInt(32, 0));
  5724. idxList.emplace_back(idx);
  5725. EmitHLSLFlatConversionToAggregate(
  5726. CGF, Val, DestPtr, idxList, Ty, SrcTy,
  5727. DestPtr->getType()->getPointerElementType());
  5728. }
  5729. else {
  5730. SmallVector<Value *, 4> idxList;
  5731. SmallVector<Value *, 4> DestGEPList;
  5732. SmallVector<QualType, 4> DestEltTyList;
  5733. FlattenAggregatePtrToGepList(CGF, DestPtr, idxList, Ty, DestPtr->getType(), DestGEPList, DestEltTyList);
  5734. SmallVector<Value *, 4> EltList;
  5735. SmallVector<QualType, 4> EltTyList;
  5736. FlattenValToInitList(CGF, EltList, EltTyList, SrcTy, Val);
  5737. StoreFlattenedGepList(CGF, DestGEPList, DestEltTyList, EltList, EltTyList);
  5738. }
  5739. }
  5740. void CGMSHLSLRuntime::EmitHLSLRootSignature(CodeGenFunction &CGF,
  5741. HLSLRootSignatureAttr *RSA,
  5742. Function *Fn) {
  5743. // Only parse root signature for entry function.
  5744. if (Fn != Entry.Func)
  5745. return;
  5746. StringRef StrRef = RSA->getSignatureName();
  5747. DiagnosticsEngine &Diags = CGF.getContext().getDiagnostics();
  5748. SourceLocation SLoc = RSA->getLocation();
  5749. clang::CompileRootSignature(StrRef, Diags, SLoc, rootSigVer, &m_pHLModule->GetRootSignature());
  5750. }
  5751. void CGMSHLSLRuntime::EmitHLSLOutParamConversionInit(
  5752. CodeGenFunction &CGF, const FunctionDecl *FD, const CallExpr *E,
  5753. llvm::SmallVector<LValue, 8> &castArgList,
  5754. llvm::SmallVector<const Stmt *, 8> &argList,
  5755. const std::function<void(const VarDecl *, llvm::Value *)> &TmpArgMap) {
  5756. // Special case: skip first argument of CXXOperatorCall (it is "this").
  5757. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(E) ? 1 : 0;
  5758. for (uint32_t i = 0; i < FD->getNumParams(); i++) {
  5759. const ParmVarDecl *Param = FD->getParamDecl(i);
  5760. const Expr *Arg = E->getArg(i+ArgsToSkip);
  5761. QualType ParamTy = Param->getType().getNonReferenceType();
  5762. bool RValOnRef = false;
  5763. if (!Param->isModifierOut()) {
  5764. if (!ParamTy->isAggregateType() || hlsl::IsHLSLMatType(ParamTy)) {
  5765. if (Arg->isRValue() && Param->getType()->isReferenceType()) {
  5766. // RValue on a reference type.
  5767. if (const CStyleCastExpr *cCast = dyn_cast<CStyleCastExpr>(Arg)) {
  5768. // TODO: Evolving this to warn then fail in future language versions.
  5769. // Allow special case like cast uint to uint for back-compat.
  5770. if (cCast->getCastKind() == CastKind::CK_NoOp) {
  5771. if (const ImplicitCastExpr *cast =
  5772. dyn_cast<ImplicitCastExpr>(cCast->getSubExpr())) {
  5773. if (cast->getCastKind() == CastKind::CK_LValueToRValue) {
  5774. // update the arg
  5775. argList[i] = cast->getSubExpr();
  5776. continue;
  5777. }
  5778. }
  5779. }
  5780. }
  5781. // EmitLValue will report error.
  5782. // Mark RValOnRef to create tmpArg for it.
  5783. RValOnRef = true;
  5784. } else {
  5785. continue;
  5786. }
  5787. }
  5788. }
  5789. // get original arg
  5790. LValue argLV = CGF.EmitLValue(Arg);
  5791. if (!Param->isModifierOut() && !RValOnRef) {
  5792. bool isDefaultAddrSpace = true;
  5793. if (argLV.isSimple()) {
  5794. isDefaultAddrSpace =
  5795. argLV.getAddress()->getType()->getPointerAddressSpace() ==
  5796. DXIL::kDefaultAddrSpace;
  5797. }
  5798. bool isHLSLIntrinsic = false;
  5799. if (const FunctionDecl *Callee = E->getDirectCallee()) {
  5800. isHLSLIntrinsic = Callee->hasAttr<HLSLIntrinsicAttr>();
  5801. }
  5802. // Copy in arg which is not default address space and not on hlsl intrinsic.
  5803. if (isDefaultAddrSpace || isHLSLIntrinsic)
  5804. continue;
  5805. }
  5806. // create temp Var
  5807. VarDecl *tmpArg =
  5808. VarDecl::Create(CGF.getContext(), const_cast<FunctionDecl *>(FD),
  5809. SourceLocation(), SourceLocation(),
  5810. /*IdentifierInfo*/ nullptr, ParamTy,
  5811. CGF.getContext().getTrivialTypeSourceInfo(ParamTy),
  5812. StorageClass::SC_Auto);
  5813. // Aggregate type will be indirect param convert to pointer type.
  5814. // So don't update to ReferenceType, use RValue for it.
  5815. bool isAggregateType = (ParamTy->isArrayType() || ParamTy->isRecordType()) &&
  5816. !hlsl::IsHLSLVecMatType(ParamTy);
  5817. const DeclRefExpr *tmpRef = DeclRefExpr::Create(
  5818. CGF.getContext(), NestedNameSpecifierLoc(), SourceLocation(), tmpArg,
  5819. /*enclosing*/ false, tmpArg->getLocation(), ParamTy,
  5820. isAggregateType ? VK_RValue : VK_LValue);
  5821. // update the arg
  5822. argList[i] = tmpRef;
  5823. // create alloc for the tmp arg
  5824. Value *tmpArgAddr = nullptr;
  5825. BasicBlock *InsertBlock = CGF.Builder.GetInsertBlock();
  5826. Function *F = InsertBlock->getParent();
  5827. BasicBlock *EntryBlock = &F->getEntryBlock();
  5828. if (ParamTy->isBooleanType()) {
  5829. // Create i32 for bool.
  5830. ParamTy = CGM.getContext().IntTy;
  5831. }
  5832. // Make sure the alloca is in entry block to stop inline create stacksave.
  5833. IRBuilder<> Builder(EntryBlock->getFirstInsertionPt());
  5834. tmpArgAddr = Builder.CreateAlloca(CGF.ConvertType(ParamTy));
  5835. // add it to local decl map
  5836. TmpArgMap(tmpArg, tmpArgAddr);
  5837. LValue tmpLV = LValue::MakeAddr(tmpArgAddr, ParamTy, argLV.getAlignment(),
  5838. CGF.getContext());
  5839. // save for cast after call
  5840. if (Param->isModifierOut()) {
  5841. castArgList.emplace_back(tmpLV);
  5842. castArgList.emplace_back(argLV);
  5843. }
  5844. bool isObject = HLModule::IsHLSLObjectType(
  5845. tmpArgAddr->getType()->getPointerElementType());
  5846. // cast before the call
  5847. if (Param->isModifierIn() &&
  5848. // Don't copy object
  5849. !isObject) {
  5850. QualType ArgTy = Arg->getType();
  5851. Value *outVal = nullptr;
  5852. bool isAggrageteTy = ParamTy->isAggregateType();
  5853. isAggrageteTy &= !IsHLSLVecMatType(ParamTy);
  5854. if (!isAggrageteTy) {
  5855. if (!IsHLSLMatType(ParamTy)) {
  5856. RValue outRVal = CGF.EmitLoadOfLValue(argLV, SourceLocation());
  5857. outVal = outRVal.getScalarVal();
  5858. } else {
  5859. Value *argAddr = argLV.getAddress();
  5860. outVal = EmitHLSLMatrixLoad(CGF, argAddr, ArgTy);
  5861. }
  5862. llvm::Type *ToTy = tmpArgAddr->getType()->getPointerElementType();
  5863. Instruction::CastOps castOp =
  5864. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  5865. IsUnsigned(argLV.getType()), IsUnsigned(tmpLV.getType()),
  5866. outVal->getType(), ToTy));
  5867. Value *castVal = CGF.Builder.CreateCast(castOp, outVal, ToTy);
  5868. if (!HLMatrixLower::IsMatrixType(ToTy))
  5869. CGF.Builder.CreateStore(castVal, tmpArgAddr);
  5870. else
  5871. EmitHLSLMatrixStore(CGF, castVal, tmpArgAddr, ParamTy);
  5872. } else {
  5873. SmallVector<Value *, 4> idxList;
  5874. EmitHLSLAggregateCopy(CGF, argLV.getAddress(), tmpLV.getAddress(),
  5875. idxList, ArgTy, ParamTy,
  5876. argLV.getAddress()->getType());
  5877. }
  5878. }
  5879. }
  5880. }
  5881. void CGMSHLSLRuntime::EmitHLSLOutParamConversionCopyBack(
  5882. CodeGenFunction &CGF, llvm::SmallVector<LValue, 8> &castArgList) {
  5883. for (uint32_t i = 0; i < castArgList.size(); i += 2) {
  5884. // cast after the call
  5885. LValue tmpLV = castArgList[i];
  5886. LValue argLV = castArgList[i + 1];
  5887. QualType ArgTy = argLV.getType().getNonReferenceType();
  5888. QualType ParamTy = tmpLV.getType().getNonReferenceType();
  5889. Value *tmpArgAddr = tmpLV.getAddress();
  5890. Value *outVal = nullptr;
  5891. bool isAggrageteTy = ArgTy->isAggregateType();
  5892. isAggrageteTy &= !IsHLSLVecMatType(ArgTy);
  5893. bool isObject = HLModule::IsHLSLObjectType(
  5894. tmpArgAddr->getType()->getPointerElementType());
  5895. if (!isObject) {
  5896. if (!isAggrageteTy) {
  5897. if (!IsHLSLMatType(ParamTy))
  5898. outVal = CGF.Builder.CreateLoad(tmpArgAddr);
  5899. else
  5900. outVal = EmitHLSLMatrixLoad(CGF, tmpArgAddr, ParamTy);
  5901. llvm::Type *ToTy = CGF.ConvertType(ArgTy);
  5902. llvm::Type *FromTy = outVal->getType();
  5903. Value *castVal = outVal;
  5904. if (ToTy == FromTy) {
  5905. // Don't need cast.
  5906. } else if (ToTy->getScalarType() == FromTy->getScalarType()) {
  5907. if (ToTy->getScalarType() == ToTy) {
  5908. DXASSERT(FromTy->isVectorTy() &&
  5909. FromTy->getVectorNumElements() == 1,
  5910. "must be vector of 1 element");
  5911. castVal = CGF.Builder.CreateExtractElement(outVal, (uint64_t)0);
  5912. } else {
  5913. DXASSERT(!FromTy->isVectorTy(), "must be scalar type");
  5914. DXASSERT(ToTy->isVectorTy() && ToTy->getVectorNumElements() == 1,
  5915. "must be vector of 1 element");
  5916. castVal = UndefValue::get(ToTy);
  5917. castVal =
  5918. CGF.Builder.CreateInsertElement(castVal, outVal, (uint64_t)0);
  5919. }
  5920. } else {
  5921. Instruction::CastOps castOp =
  5922. static_cast<Instruction::CastOps>(HLModule::FindCastOp(
  5923. IsUnsigned(tmpLV.getType()), IsUnsigned(argLV.getType()),
  5924. outVal->getType(), ToTy));
  5925. castVal = CGF.Builder.CreateCast(castOp, outVal, ToTy);
  5926. }
  5927. if (!HLMatrixLower::IsMatrixType(ToTy))
  5928. CGF.EmitStoreThroughLValue(RValue::get(castVal), argLV);
  5929. else {
  5930. Value *destPtr = argLV.getAddress();
  5931. EmitHLSLMatrixStore(CGF, castVal, destPtr, ArgTy);
  5932. }
  5933. } else {
  5934. SmallVector<Value *, 4> idxList;
  5935. EmitHLSLAggregateCopy(CGF, tmpLV.getAddress(), argLV.getAddress(),
  5936. idxList, ParamTy, ArgTy,
  5937. argLV.getAddress()->getType());
  5938. }
  5939. } else
  5940. tmpArgAddr->replaceAllUsesWith(argLV.getAddress());
  5941. }
  5942. }
  5943. CGHLSLRuntime *CodeGen::CreateMSHLSLRuntime(CodeGenModule &CGM) {
  5944. return new CGMSHLSLRuntime(CGM);
  5945. }