llvm_backend_expr.cpp 142 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633
  1. lbValue lb_emit_logical_binary_expr(lbProcedure *p, TokenKind op, Ast *left, Ast *right, Type *type) {
  2. lbModule *m = p->module;
  3. lbBlock *rhs = lb_create_block(p, "logical.cmp.rhs");
  4. lbBlock *done = lb_create_block(p, "logical.cmp.done");
  5. type = default_type(type);
  6. lbValue short_circuit = {};
  7. if (op == Token_CmpAnd) {
  8. lb_build_cond(p, left, rhs, done);
  9. short_circuit = lb_const_bool(m, type, false);
  10. } else if (op == Token_CmpOr) {
  11. lb_build_cond(p, left, done, rhs);
  12. short_circuit = lb_const_bool(m, type, true);
  13. }
  14. if (rhs->preds.count == 0) {
  15. lb_start_block(p, done);
  16. return short_circuit;
  17. }
  18. if (done->preds.count == 0) {
  19. lb_start_block(p, rhs);
  20. if (lb_is_expr_untyped_const(right)) {
  21. return lb_expr_untyped_const_to_typed(m, right, type);
  22. }
  23. return lb_build_expr(p, right);
  24. }
  25. Array<LLVMValueRef> incoming_values = {};
  26. Array<LLVMBasicBlockRef> incoming_blocks = {};
  27. array_init(&incoming_values, heap_allocator(), done->preds.count+1);
  28. array_init(&incoming_blocks, heap_allocator(), done->preds.count+1);
  29. for_array(i, done->preds) {
  30. incoming_values[i] = short_circuit.value;
  31. incoming_blocks[i] = done->preds[i]->block;
  32. }
  33. lb_start_block(p, rhs);
  34. lbValue edge = {};
  35. if (lb_is_expr_untyped_const(right)) {
  36. edge = lb_expr_untyped_const_to_typed(m, right, type);
  37. } else {
  38. edge = lb_build_expr(p, right);
  39. }
  40. incoming_values[done->preds.count] = edge.value;
  41. incoming_blocks[done->preds.count] = p->curr_block->block;
  42. lb_emit_jump(p, done);
  43. lb_start_block(p, done);
  44. LLVMTypeRef dst_type = lb_type(m, type);
  45. LLVMValueRef phi = nullptr;
  46. GB_ASSERT(incoming_values.count == incoming_blocks.count);
  47. GB_ASSERT(incoming_values.count > 0);
  48. LLVMTypeRef phi_type = nullptr;
  49. for_array(i, incoming_values) {
  50. LLVMValueRef incoming_value = incoming_values[i];
  51. if (!LLVMIsConstant(incoming_value)) {
  52. phi_type = LLVMTypeOf(incoming_value);
  53. break;
  54. }
  55. }
  56. if (phi_type == nullptr) {
  57. phi = LLVMBuildPhi(p->builder, dst_type, "");
  58. LLVMAddIncoming(phi, incoming_values.data, incoming_blocks.data, cast(unsigned)incoming_values.count);
  59. lbValue res = {};
  60. res.type = type;
  61. res.value = phi;
  62. return res;
  63. }
  64. for_array(i, incoming_values) {
  65. LLVMValueRef incoming_value = incoming_values[i];
  66. LLVMTypeRef incoming_type = LLVMTypeOf(incoming_value);
  67. if (phi_type != incoming_type) {
  68. GB_ASSERT_MSG(LLVMIsConstant(incoming_value), "%s vs %s", LLVMPrintTypeToString(phi_type), LLVMPrintTypeToString(incoming_type));
  69. bool ok = !!LLVMConstIntGetZExtValue(incoming_value);
  70. incoming_values[i] = LLVMConstInt(phi_type, ok, false);
  71. }
  72. }
  73. phi = LLVMBuildPhi(p->builder, phi_type, "");
  74. LLVMAddIncoming(phi, incoming_values.data, incoming_blocks.data, cast(unsigned)incoming_values.count);
  75. LLVMTypeRef i1 = LLVMInt1TypeInContext(m->ctx);
  76. if ((phi_type == i1) ^ (dst_type == i1)) {
  77. if (phi_type == i1) {
  78. phi = LLVMBuildZExt(p->builder, phi, dst_type, "");
  79. } else {
  80. phi = LLVMBuildTruncOrBitCast(p->builder, phi, dst_type, "");
  81. }
  82. } else if (lb_sizeof(phi_type) < lb_sizeof(dst_type)) {
  83. phi = LLVMBuildZExt(p->builder, phi, dst_type, "");
  84. } else {
  85. phi = LLVMBuildTruncOrBitCast(p->builder, phi, dst_type, "");
  86. }
  87. lbValue res = {};
  88. res.type = type;
  89. res.value = phi;
  90. return res;
  91. }
  92. lbValue lb_emit_unary_arith(lbProcedure *p, TokenKind op, lbValue x, Type *type) {
  93. switch (op) {
  94. case Token_Add:
  95. return x;
  96. case Token_Not: // Boolean not
  97. case Token_Xor: // Bitwise not
  98. case Token_Sub: // Number negation
  99. break;
  100. case Token_Pointer:
  101. GB_PANIC("This should be handled elsewhere");
  102. break;
  103. }
  104. if (is_type_array_like(x.type)) {
  105. // IMPORTANT TODO(bill): This is very wasteful with regards to stack memory
  106. Type *tl = base_type(x.type);
  107. lbValue val = lb_address_from_load_or_generate_local(p, x);
  108. GB_ASSERT(is_type_array_like(type));
  109. Type *elem_type = base_array_type(type);
  110. // NOTE(bill): Doesn't need to be zero because it will be initialized in the loops
  111. lbAddr res_addr = lb_add_local(p, type, nullptr, false, 0, true);
  112. lbValue res = lb_addr_get_ptr(p, res_addr);
  113. bool inline_array_arith = type_size_of(type) <= build_context.max_align;
  114. i32 count = cast(i32)get_array_type_count(tl);
  115. LLVMTypeRef vector_type = nullptr;
  116. if (op != Token_Not && lb_try_vector_cast(p->module, val, &vector_type)) {
  117. LLVMValueRef vp = LLVMBuildPointerCast(p->builder, val.value, LLVMPointerType(vector_type, 0), "");
  118. LLVMValueRef v = LLVMBuildLoad2(p->builder, vector_type, vp, "");
  119. LLVMValueRef opv = nullptr;
  120. switch (op) {
  121. case Token_Xor:
  122. opv = LLVMBuildNot(p->builder, v, "");
  123. break;
  124. case Token_Sub:
  125. if (is_type_float(elem_type)) {
  126. opv = LLVMBuildFNeg(p->builder, v, "");
  127. } else {
  128. opv = LLVMBuildNeg(p->builder, v, "");
  129. }
  130. break;
  131. }
  132. if (opv != nullptr) {
  133. LLVMSetAlignment(res.value, cast(unsigned)lb_alignof(vector_type));
  134. LLVMValueRef res_ptr = LLVMBuildPointerCast(p->builder, res.value, LLVMPointerType(vector_type, 0), "");
  135. LLVMBuildStore(p->builder, opv, res_ptr);
  136. return lb_emit_conv(p, lb_emit_load(p, res), type);
  137. }
  138. }
  139. if (inline_array_arith) {
  140. // inline
  141. for (i32 i = 0; i < count; i++) {
  142. lbValue e = lb_emit_load(p, lb_emit_array_epi(p, val, i));
  143. lbValue z = lb_emit_unary_arith(p, op, e, elem_type);
  144. lb_emit_store(p, lb_emit_array_epi(p, res, i), z);
  145. }
  146. } else {
  147. auto loop_data = lb_loop_start(p, count, t_i32);
  148. lbValue e = lb_emit_load(p, lb_emit_array_ep(p, val, loop_data.idx));
  149. lbValue z = lb_emit_unary_arith(p, op, e, elem_type);
  150. lb_emit_store(p, lb_emit_array_ep(p, res, loop_data.idx), z);
  151. lb_loop_end(p, loop_data);
  152. }
  153. return lb_emit_load(p, res);
  154. }
  155. if (op == Token_Xor) {
  156. lbValue cmp = {};
  157. cmp.value = LLVMBuildNot(p->builder, x.value, "");
  158. cmp.type = x.type;
  159. return lb_emit_conv(p, cmp, type);
  160. }
  161. if (op == Token_Not) {
  162. lbValue cmp = {};
  163. LLVMValueRef zero = LLVMConstInt(lb_type(p->module, x.type), 0, false);
  164. cmp.value = LLVMBuildICmp(p->builder, LLVMIntEQ, x.value, zero, "");
  165. cmp.type = t_llvm_bool;
  166. return lb_emit_conv(p, cmp, type);
  167. }
  168. if (op == Token_Sub && is_type_integer(type) && is_type_different_to_arch_endianness(type)) {
  169. Type *platform_type = integer_endian_type_to_platform_type(type);
  170. lbValue v = lb_emit_byte_swap(p, x, platform_type);
  171. lbValue res = {};
  172. res.value = LLVMBuildNeg(p->builder, v.value, "");
  173. res.type = platform_type;
  174. return lb_emit_byte_swap(p, res, type);
  175. }
  176. if (op == Token_Sub && is_type_float(type) && is_type_different_to_arch_endianness(type)) {
  177. Type *platform_type = integer_endian_type_to_platform_type(type);
  178. lbValue v = lb_emit_byte_swap(p, x, platform_type);
  179. lbValue res = {};
  180. res.value = LLVMBuildFNeg(p->builder, v.value, "");
  181. res.type = platform_type;
  182. return lb_emit_byte_swap(p, res, type);
  183. }
  184. lbValue res = {};
  185. switch (op) {
  186. case Token_Not: // Boolean not
  187. case Token_Xor: // Bitwise not
  188. res.value = LLVMBuildNot(p->builder, x.value, "");
  189. res.type = x.type;
  190. return res;
  191. case Token_Sub: // Number negation
  192. if (is_type_integer(x.type)) {
  193. res.value = LLVMBuildNeg(p->builder, x.value, "");
  194. } else if (is_type_float(x.type)) {
  195. res.value = LLVMBuildFNeg(p->builder, x.value, "");
  196. } else if (is_type_complex(x.type)) {
  197. LLVMValueRef v0 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 0, ""), "");
  198. LLVMValueRef v1 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 1, ""), "");
  199. lbAddr addr = lb_add_local_generated(p, x.type, false);
  200. LLVMBuildStore(p->builder, v0, LLVMBuildStructGEP(p->builder, addr.addr.value, 0, ""));
  201. LLVMBuildStore(p->builder, v1, LLVMBuildStructGEP(p->builder, addr.addr.value, 1, ""));
  202. return lb_addr_load(p, addr);
  203. } else if (is_type_quaternion(x.type)) {
  204. LLVMValueRef v0 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 0, ""), "");
  205. LLVMValueRef v1 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 1, ""), "");
  206. LLVMValueRef v2 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 2, ""), "");
  207. LLVMValueRef v3 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 3, ""), "");
  208. lbAddr addr = lb_add_local_generated(p, x.type, false);
  209. LLVMBuildStore(p->builder, v0, LLVMBuildStructGEP(p->builder, addr.addr.value, 0, ""));
  210. LLVMBuildStore(p->builder, v1, LLVMBuildStructGEP(p->builder, addr.addr.value, 1, ""));
  211. LLVMBuildStore(p->builder, v2, LLVMBuildStructGEP(p->builder, addr.addr.value, 2, ""));
  212. LLVMBuildStore(p->builder, v3, LLVMBuildStructGEP(p->builder, addr.addr.value, 3, ""));
  213. return lb_addr_load(p, addr);
  214. } else {
  215. GB_PANIC("Unhandled type %s", type_to_string(x.type));
  216. }
  217. res.type = x.type;
  218. return res;
  219. }
  220. return res;
  221. }
  222. bool lb_try_direct_vector_arith(lbProcedure *p, TokenKind op, lbValue lhs, lbValue rhs, Type *type, lbValue *res_) {
  223. GB_ASSERT(is_type_array_like(type));
  224. Type *elem_type = base_array_type(type);
  225. // NOTE(bill): Shift operations cannot be easily dealt with due to Odin's semantics
  226. if (op == Token_Shl || op == Token_Shr) {
  227. return false;
  228. }
  229. if (!LLVMIsALoadInst(lhs.value) || !LLVMIsALoadInst(rhs.value)) {
  230. return false;
  231. }
  232. lbValue lhs_ptr = {};
  233. lbValue rhs_ptr = {};
  234. lhs_ptr.value = LLVMGetOperand(lhs.value, 0);
  235. lhs_ptr.type = alloc_type_pointer(lhs.type);
  236. rhs_ptr.value = LLVMGetOperand(rhs.value, 0);
  237. rhs_ptr.type = alloc_type_pointer(rhs.type);
  238. LLVMTypeRef vector_type0 = nullptr;
  239. LLVMTypeRef vector_type1 = nullptr;
  240. if (lb_try_vector_cast(p->module, lhs_ptr, &vector_type0) &&
  241. lb_try_vector_cast(p->module, rhs_ptr, &vector_type1)) {
  242. GB_ASSERT(vector_type0 == vector_type1);
  243. LLVMTypeRef vector_type = vector_type0;
  244. LLVMValueRef lhs_vp = LLVMBuildPointerCast(p->builder, lhs_ptr.value, LLVMPointerType(vector_type, 0), "");
  245. LLVMValueRef rhs_vp = LLVMBuildPointerCast(p->builder, rhs_ptr.value, LLVMPointerType(vector_type, 0), "");
  246. LLVMValueRef x = LLVMBuildLoad2(p->builder, vector_type, lhs_vp, "");
  247. LLVMValueRef y = LLVMBuildLoad2(p->builder, vector_type, rhs_vp, "");
  248. LLVMValueRef z = nullptr;
  249. Type *integral_type = base_type(elem_type);
  250. if (is_type_simd_vector(integral_type)) {
  251. integral_type = core_array_type(integral_type);
  252. }
  253. if (is_type_bit_set(integral_type)) {
  254. switch (op) {
  255. case Token_Add: op = Token_Or; break;
  256. case Token_Sub: op = Token_AndNot; break;
  257. }
  258. }
  259. if (is_type_float(integral_type)) {
  260. switch (op) {
  261. case Token_Add:
  262. z = LLVMBuildFAdd(p->builder, x, y, "");
  263. break;
  264. case Token_Sub:
  265. z = LLVMBuildFSub(p->builder, x, y, "");
  266. break;
  267. case Token_Mul:
  268. z = LLVMBuildFMul(p->builder, x, y, "");
  269. break;
  270. case Token_Quo:
  271. z = LLVMBuildFDiv(p->builder, x, y, "");
  272. break;
  273. case Token_Mod:
  274. z = LLVMBuildFRem(p->builder, x, y, "");
  275. break;
  276. default:
  277. GB_PANIC("Unsupported vector operation %.*s", LIT(token_strings[op]));
  278. break;
  279. }
  280. } else {
  281. switch (op) {
  282. case Token_Add:
  283. z = LLVMBuildAdd(p->builder, x, y, "");
  284. break;
  285. case Token_Sub:
  286. z = LLVMBuildSub(p->builder, x, y, "");
  287. break;
  288. case Token_Mul:
  289. z = LLVMBuildMul(p->builder, x, y, "");
  290. break;
  291. case Token_Quo:
  292. if (is_type_unsigned(integral_type)) {
  293. z = LLVMBuildUDiv(p->builder, x, y, "");
  294. } else {
  295. z = LLVMBuildSDiv(p->builder, x, y, "");
  296. }
  297. break;
  298. case Token_Mod:
  299. if (is_type_unsigned(integral_type)) {
  300. z = LLVMBuildURem(p->builder, x, y, "");
  301. } else {
  302. z = LLVMBuildSRem(p->builder, x, y, "");
  303. }
  304. break;
  305. case Token_ModMod:
  306. if (is_type_unsigned(integral_type)) {
  307. z = LLVMBuildURem(p->builder, x, y, "");
  308. } else {
  309. LLVMValueRef a = LLVMBuildSRem(p->builder, x, y, "");
  310. LLVMValueRef b = LLVMBuildAdd(p->builder, a, y, "");
  311. z = LLVMBuildSRem(p->builder, b, y, "");
  312. }
  313. break;
  314. case Token_And:
  315. z = LLVMBuildAnd(p->builder, x, y, "");
  316. break;
  317. case Token_AndNot:
  318. z = LLVMBuildAnd(p->builder, x, LLVMBuildNot(p->builder, y, ""), "");
  319. break;
  320. case Token_Or:
  321. z = LLVMBuildOr(p->builder, x, y, "");
  322. break;
  323. case Token_Xor:
  324. z = LLVMBuildXor(p->builder, x, y, "");
  325. break;
  326. default:
  327. GB_PANIC("Unsupported vector operation");
  328. break;
  329. }
  330. }
  331. if (z != nullptr) {
  332. lbAddr res = lb_add_local_generated_temp(p, type, lb_alignof(vector_type));
  333. LLVMValueRef vp = LLVMBuildPointerCast(p->builder, res.addr.value, LLVMPointerType(vector_type, 0), "");
  334. LLVMBuildStore(p->builder, z, vp);
  335. lbValue v = lb_addr_load(p, res);
  336. if (res_) *res_ = v;
  337. return true;
  338. }
  339. }
  340. return false;
  341. }
  342. lbValue lb_emit_arith_array(lbProcedure *p, TokenKind op, lbValue lhs, lbValue rhs, Type *type) {
  343. GB_ASSERT(is_type_array_like(lhs.type) || is_type_array_like(rhs.type));
  344. lhs = lb_emit_conv(p, lhs, type);
  345. rhs = lb_emit_conv(p, rhs, type);
  346. GB_ASSERT(is_type_array_like(type));
  347. Type *elem_type = base_array_type(type);
  348. i64 count = get_array_type_count(type);
  349. unsigned n = cast(unsigned)count;
  350. // NOTE(bill, 2021-06-12): Try to do a direct operation as a vector, if possible
  351. lbValue direct_vector_res = {};
  352. if (lb_try_direct_vector_arith(p, op, lhs, rhs, type, &direct_vector_res)) {
  353. return direct_vector_res;
  354. }
  355. bool inline_array_arith = type_size_of(type) <= build_context.max_align;
  356. if (inline_array_arith) {
  357. auto dst_ptrs = slice_make<lbValue>(temporary_allocator(), n);
  358. auto a_loads = slice_make<lbValue>(temporary_allocator(), n);
  359. auto b_loads = slice_make<lbValue>(temporary_allocator(), n);
  360. auto c_ops = slice_make<lbValue>(temporary_allocator(), n);
  361. for (unsigned i = 0; i < n; i++) {
  362. a_loads[i].value = LLVMBuildExtractValue(p->builder, lhs.value, i, "");
  363. a_loads[i].type = elem_type;
  364. }
  365. for (unsigned i = 0; i < n; i++) {
  366. b_loads[i].value = LLVMBuildExtractValue(p->builder, rhs.value, i, "");
  367. b_loads[i].type = elem_type;
  368. }
  369. for (unsigned i = 0; i < n; i++) {
  370. c_ops[i] = lb_emit_arith(p, op, a_loads[i], b_loads[i], elem_type);
  371. }
  372. lbAddr res = lb_add_local_generated(p, type, false);
  373. for (unsigned i = 0; i < n; i++) {
  374. dst_ptrs[i] = lb_emit_array_epi(p, res.addr, i);
  375. }
  376. for (unsigned i = 0; i < n; i++) {
  377. lb_emit_store(p, dst_ptrs[i], c_ops[i]);
  378. }
  379. return lb_addr_load(p, res);
  380. } else {
  381. lbValue x = lb_address_from_load_or_generate_local(p, lhs);
  382. lbValue y = lb_address_from_load_or_generate_local(p, rhs);
  383. lbAddr res = lb_add_local_generated(p, type, false);
  384. auto loop_data = lb_loop_start(p, cast(isize)count, t_i32);
  385. lbValue a_ptr = lb_emit_array_ep(p, x, loop_data.idx);
  386. lbValue b_ptr = lb_emit_array_ep(p, y, loop_data.idx);
  387. lbValue dst_ptr = lb_emit_array_ep(p, res.addr, loop_data.idx);
  388. lbValue a = lb_emit_load(p, a_ptr);
  389. lbValue b = lb_emit_load(p, b_ptr);
  390. lbValue c = lb_emit_arith(p, op, a, b, elem_type);
  391. lb_emit_store(p, dst_ptr, c);
  392. lb_loop_end(p, loop_data);
  393. return lb_addr_load(p, res);
  394. }
  395. }
  396. bool lb_is_matrix_simdable(Type *t) {
  397. Type *mt = base_type(t);
  398. GB_ASSERT(mt->kind == Type_Matrix);
  399. Type *elem = core_type(mt->Matrix.elem);
  400. if (is_type_complex(elem)) {
  401. return false;
  402. }
  403. if (is_type_different_to_arch_endianness(elem)) {
  404. return false;
  405. }
  406. switch (build_context.metrics.arch) {
  407. default:
  408. return false;
  409. case TargetArch_amd64:
  410. case TargetArch_arm64:
  411. break;
  412. }
  413. if (elem->kind == Type_Basic) {
  414. switch (elem->Basic.kind) {
  415. case Basic_f16:
  416. case Basic_f16le:
  417. case Basic_f16be:
  418. switch (build_context.metrics.arch) {
  419. case TargetArch_amd64:
  420. return false;
  421. case TargetArch_arm64:
  422. // TODO(bill): determine when this is fine
  423. return true;
  424. case TargetArch_i386:
  425. case TargetArch_wasm32:
  426. case TargetArch_wasm64:
  427. return false;
  428. }
  429. }
  430. }
  431. return true;
  432. }
  433. LLVMValueRef lb_matrix_to_vector(lbProcedure *p, lbValue matrix) {
  434. Type *mt = base_type(matrix.type);
  435. GB_ASSERT(mt->kind == Type_Matrix);
  436. LLVMTypeRef elem_type = lb_type(p->module, mt->Matrix.elem);
  437. unsigned total_count = cast(unsigned)matrix_type_total_internal_elems(mt);
  438. LLVMTypeRef total_matrix_type = LLVMVectorType(elem_type, total_count);
  439. #if 1
  440. LLVMValueRef ptr = lb_address_from_load_or_generate_local(p, matrix).value;
  441. LLVMValueRef matrix_vector_ptr = LLVMBuildPointerCast(p->builder, ptr, LLVMPointerType(total_matrix_type, 0), "");
  442. LLVMValueRef matrix_vector = LLVMBuildLoad(p->builder, matrix_vector_ptr, "");
  443. LLVMSetAlignment(matrix_vector, cast(unsigned)type_align_of(mt));
  444. return matrix_vector;
  445. #else
  446. LLVMValueRef matrix_vector = LLVMBuildBitCast(p->builder, matrix.value, total_matrix_type, "");
  447. return matrix_vector;
  448. #endif
  449. }
  450. LLVMValueRef lb_matrix_trimmed_vector_mask(lbProcedure *p, Type *mt) {
  451. mt = base_type(mt);
  452. GB_ASSERT(mt->kind == Type_Matrix);
  453. unsigned stride = cast(unsigned)matrix_type_stride_in_elems(mt);
  454. unsigned row_count = cast(unsigned)mt->Matrix.row_count;
  455. unsigned column_count = cast(unsigned)mt->Matrix.column_count;
  456. unsigned mask_elems_index = 0;
  457. auto mask_elems = slice_make<LLVMValueRef>(permanent_allocator(), row_count*column_count);
  458. for (unsigned j = 0; j < column_count; j++) {
  459. for (unsigned i = 0; i < row_count; i++) {
  460. unsigned offset = stride*j + i;
  461. mask_elems[mask_elems_index++] = lb_const_int(p->module, t_u32, offset).value;
  462. }
  463. }
  464. LLVMValueRef mask = LLVMConstVector(mask_elems.data, cast(unsigned)mask_elems.count);
  465. return mask;
  466. }
  467. LLVMValueRef lb_matrix_to_trimmed_vector(lbProcedure *p, lbValue m) {
  468. LLVMValueRef vector = lb_matrix_to_vector(p, m);
  469. Type *mt = base_type(m.type);
  470. GB_ASSERT(mt->kind == Type_Matrix);
  471. unsigned stride = cast(unsigned)matrix_type_stride_in_elems(mt);
  472. unsigned row_count = cast(unsigned)mt->Matrix.row_count;
  473. if (stride == row_count) {
  474. return vector;
  475. }
  476. LLVMValueRef mask = lb_matrix_trimmed_vector_mask(p, mt);
  477. LLVMValueRef trimmed_vector = llvm_basic_shuffle(p, vector, mask);
  478. return trimmed_vector;
  479. }
  480. lbValue lb_emit_matrix_tranpose(lbProcedure *p, lbValue m, Type *type) {
  481. if (is_type_array(m.type)) {
  482. // no-op
  483. m.type = type;
  484. return m;
  485. }
  486. Type *mt = base_type(m.type);
  487. GB_ASSERT(mt->kind == Type_Matrix);
  488. if (lb_is_matrix_simdable(mt)) {
  489. unsigned stride = cast(unsigned)matrix_type_stride_in_elems(mt);
  490. unsigned row_count = cast(unsigned)mt->Matrix.row_count;
  491. unsigned column_count = cast(unsigned)mt->Matrix.column_count;
  492. auto rows = slice_make<LLVMValueRef>(permanent_allocator(), row_count);
  493. auto mask_elems = slice_make<LLVMValueRef>(permanent_allocator(), column_count);
  494. LLVMValueRef vector = lb_matrix_to_vector(p, m);
  495. for (unsigned i = 0; i < row_count; i++) {
  496. for (unsigned j = 0; j < column_count; j++) {
  497. unsigned offset = stride*j + i;
  498. mask_elems[j] = lb_const_int(p->module, t_u32, offset).value;
  499. }
  500. // transpose mask
  501. LLVMValueRef mask = LLVMConstVector(mask_elems.data, column_count);
  502. LLVMValueRef row = llvm_basic_shuffle(p, vector, mask);
  503. rows[i] = row;
  504. }
  505. lbAddr res = lb_add_local_generated(p, type, true);
  506. for_array(i, rows) {
  507. LLVMValueRef row = rows[i];
  508. lbValue dst_row_ptr = lb_emit_matrix_epi(p, res.addr, 0, i);
  509. LLVMValueRef ptr = dst_row_ptr.value;
  510. ptr = LLVMBuildPointerCast(p->builder, ptr, LLVMPointerType(LLVMTypeOf(row), 0), "");
  511. LLVMBuildStore(p->builder, row, ptr);
  512. }
  513. return lb_addr_load(p, res);
  514. }
  515. lbAddr res = lb_add_local_generated(p, type, true);
  516. i64 row_count = mt->Matrix.row_count;
  517. i64 column_count = mt->Matrix.column_count;
  518. for (i64 j = 0; j < column_count; j++) {
  519. for (i64 i = 0; i < row_count; i++) {
  520. lbValue src = lb_emit_matrix_ev(p, m, i, j);
  521. lbValue dst = lb_emit_matrix_epi(p, res.addr, j, i);
  522. lb_emit_store(p, dst, src);
  523. }
  524. }
  525. return lb_addr_load(p, res);
  526. }
  527. lbValue lb_matrix_cast_vector_to_type(lbProcedure *p, LLVMValueRef vector, Type *type) {
  528. lbAddr res = lb_add_local_generated(p, type, true);
  529. LLVMValueRef res_ptr = res.addr.value;
  530. unsigned alignment = cast(unsigned)gb_max(type_align_of(type), lb_alignof(LLVMTypeOf(vector)));
  531. LLVMSetAlignment(res_ptr, alignment);
  532. res_ptr = LLVMBuildPointerCast(p->builder, res_ptr, LLVMPointerType(LLVMTypeOf(vector), 0), "");
  533. LLVMBuildStore(p->builder, vector, res_ptr);
  534. return lb_addr_load(p, res);
  535. }
  536. lbValue lb_emit_matrix_flatten(lbProcedure *p, lbValue m, Type *type) {
  537. if (is_type_array(m.type)) {
  538. // no-op
  539. m.type = type;
  540. return m;
  541. }
  542. Type *mt = base_type(m.type);
  543. GB_ASSERT(mt->kind == Type_Matrix);
  544. if (lb_is_matrix_simdable(mt)) {
  545. LLVMValueRef vector = lb_matrix_to_trimmed_vector(p, m);
  546. return lb_matrix_cast_vector_to_type(p, vector, type);
  547. }
  548. lbAddr res = lb_add_local_generated(p, type, true);
  549. i64 row_count = mt->Matrix.row_count;
  550. i64 column_count = mt->Matrix.column_count;
  551. for (i64 j = 0; j < column_count; j++) {
  552. for (i64 i = 0; i < row_count; i++) {
  553. lbValue src = lb_emit_matrix_ev(p, m, i, j);
  554. lbValue dst = lb_emit_array_epi(p, res.addr, i + j*row_count);
  555. lb_emit_store(p, dst, src);
  556. }
  557. }
  558. return lb_addr_load(p, res);
  559. }
  560. lbValue lb_emit_outer_product(lbProcedure *p, lbValue a, lbValue b, Type *type) {
  561. Type *mt = base_type(type);
  562. Type *at = base_type(a.type);
  563. Type *bt = base_type(b.type);
  564. GB_ASSERT(mt->kind == Type_Matrix);
  565. GB_ASSERT(at->kind == Type_Array);
  566. GB_ASSERT(bt->kind == Type_Array);
  567. i64 row_count = mt->Matrix.row_count;
  568. i64 column_count = mt->Matrix.column_count;
  569. GB_ASSERT(row_count == at->Array.count);
  570. GB_ASSERT(column_count == bt->Array.count);
  571. lbAddr res = lb_add_local_generated(p, type, true);
  572. for (i64 j = 0; j < column_count; j++) {
  573. for (i64 i = 0; i < row_count; i++) {
  574. lbValue x = lb_emit_struct_ev(p, a, cast(i32)i);
  575. lbValue y = lb_emit_struct_ev(p, b, cast(i32)j);
  576. lbValue src = lb_emit_arith(p, Token_Mul, x, y, mt->Matrix.elem);
  577. lbValue dst = lb_emit_matrix_epi(p, res.addr, i, j);
  578. lb_emit_store(p, dst, src);
  579. }
  580. }
  581. return lb_addr_load(p, res);
  582. }
  583. lbValue lb_emit_matrix_mul(lbProcedure *p, lbValue lhs, lbValue rhs, Type *type) {
  584. // TODO(bill): Handle edge case for f16 types on x86(-64) platforms
  585. Type *xt = base_type(lhs.type);
  586. Type *yt = base_type(rhs.type);
  587. GB_ASSERT(is_type_matrix(type));
  588. GB_ASSERT(is_type_matrix(xt));
  589. GB_ASSERT(is_type_matrix(yt));
  590. GB_ASSERT(xt->Matrix.column_count == yt->Matrix.row_count);
  591. GB_ASSERT(are_types_identical(xt->Matrix.elem, yt->Matrix.elem));
  592. Type *elem = xt->Matrix.elem;
  593. unsigned outer_rows = cast(unsigned)xt->Matrix.row_count;
  594. unsigned inner = cast(unsigned)xt->Matrix.column_count;
  595. unsigned outer_columns = cast(unsigned)yt->Matrix.column_count;
  596. if (lb_is_matrix_simdable(xt)) {
  597. unsigned x_stride = cast(unsigned)matrix_type_stride_in_elems(xt);
  598. unsigned y_stride = cast(unsigned)matrix_type_stride_in_elems(yt);
  599. auto x_rows = slice_make<LLVMValueRef>(permanent_allocator(), outer_rows);
  600. auto y_columns = slice_make<LLVMValueRef>(permanent_allocator(), outer_columns);
  601. LLVMValueRef x_vector = lb_matrix_to_vector(p, lhs);
  602. LLVMValueRef y_vector = lb_matrix_to_vector(p, rhs);
  603. auto mask_elems = slice_make<LLVMValueRef>(permanent_allocator(), inner);
  604. for (unsigned i = 0; i < outer_rows; i++) {
  605. for (unsigned j = 0; j < inner; j++) {
  606. unsigned offset = x_stride*j + i;
  607. mask_elems[j] = lb_const_int(p->module, t_u32, offset).value;
  608. }
  609. // transpose mask
  610. LLVMValueRef mask = LLVMConstVector(mask_elems.data, inner);
  611. LLVMValueRef row = llvm_basic_shuffle(p, x_vector, mask);
  612. x_rows[i] = row;
  613. }
  614. for (unsigned i = 0; i < outer_columns; i++) {
  615. LLVMValueRef mask = llvm_mask_iota(p->module, y_stride*i, inner);
  616. LLVMValueRef column = llvm_basic_shuffle(p, y_vector, mask);
  617. y_columns[i] = column;
  618. }
  619. lbAddr res = lb_add_local_generated(p, type, true);
  620. for_array(i, x_rows) {
  621. LLVMValueRef x_row = x_rows[i];
  622. for_array(j, y_columns) {
  623. LLVMValueRef y_column = y_columns[j];
  624. LLVMValueRef elem = llvm_vector_dot(p, x_row, y_column);
  625. lbValue dst = lb_emit_matrix_epi(p, res.addr, i, j);
  626. LLVMBuildStore(p->builder, elem, dst.value);
  627. }
  628. }
  629. return lb_addr_load(p, res);
  630. }
  631. {
  632. lbAddr res = lb_add_local_generated(p, type, true);
  633. auto inners = slice_make<lbValue[2]>(permanent_allocator(), inner);
  634. for (unsigned j = 0; j < outer_columns; j++) {
  635. for (unsigned i = 0; i < outer_rows; i++) {
  636. lbValue dst = lb_emit_matrix_epi(p, res.addr, i, j);
  637. for (unsigned k = 0; k < inner; k++) {
  638. inners[k][0] = lb_emit_matrix_ev(p, lhs, i, k);
  639. inners[k][1] = lb_emit_matrix_ev(p, rhs, k, j);
  640. }
  641. lbValue sum = lb_const_nil(p->module, elem);
  642. for (unsigned k = 0; k < inner; k++) {
  643. lbValue a = inners[k][0];
  644. lbValue b = inners[k][1];
  645. sum = lb_emit_mul_add(p, a, b, sum, elem);
  646. }
  647. lb_emit_store(p, dst, sum);
  648. }
  649. }
  650. return lb_addr_load(p, res);
  651. }
  652. }
  653. lbValue lb_emit_matrix_mul_vector(lbProcedure *p, lbValue lhs, lbValue rhs, Type *type) {
  654. // TODO(bill): Handle edge case for f16 types on x86(-64) platforms
  655. Type *mt = base_type(lhs.type);
  656. Type *vt = base_type(rhs.type);
  657. GB_ASSERT(is_type_matrix(mt));
  658. GB_ASSERT(is_type_array_like(vt));
  659. i64 vector_count = get_array_type_count(vt);
  660. GB_ASSERT(mt->Matrix.column_count == vector_count);
  661. GB_ASSERT(are_types_identical(mt->Matrix.elem, base_array_type(vt)));
  662. Type *elem = mt->Matrix.elem;
  663. if (lb_is_matrix_simdable(mt)) {
  664. unsigned stride = cast(unsigned)matrix_type_stride_in_elems(mt);
  665. unsigned row_count = cast(unsigned)mt->Matrix.row_count;
  666. unsigned column_count = cast(unsigned)mt->Matrix.column_count;
  667. auto m_columns = slice_make<LLVMValueRef>(permanent_allocator(), column_count);
  668. auto v_rows = slice_make<LLVMValueRef>(permanent_allocator(), column_count);
  669. LLVMValueRef matrix_vector = lb_matrix_to_vector(p, lhs);
  670. for (unsigned column_index = 0; column_index < column_count; column_index++) {
  671. LLVMValueRef mask = llvm_mask_iota(p->module, stride*column_index, row_count);
  672. LLVMValueRef column = llvm_basic_shuffle(p, matrix_vector, mask);
  673. m_columns[column_index] = column;
  674. }
  675. for (unsigned row_index = 0; row_index < column_count; row_index++) {
  676. LLVMValueRef value = lb_emit_struct_ev(p, rhs, row_index).value;
  677. LLVMValueRef row = llvm_vector_broadcast(p, value, row_count);
  678. v_rows[row_index] = row;
  679. }
  680. GB_ASSERT(column_count > 0);
  681. LLVMValueRef vector = nullptr;
  682. for (i64 i = 0; i < column_count; i++) {
  683. if (i == 0) {
  684. vector = llvm_vector_mul(p, m_columns[i], v_rows[i]);
  685. } else {
  686. vector = llvm_vector_mul_add(p, m_columns[i], v_rows[i], vector);
  687. }
  688. }
  689. return lb_matrix_cast_vector_to_type(p, vector, type);
  690. }
  691. lbAddr res = lb_add_local_generated(p, type, true);
  692. for (i64 i = 0; i < mt->Matrix.row_count; i++) {
  693. for (i64 j = 0; j < mt->Matrix.column_count; j++) {
  694. lbValue dst = lb_emit_matrix_epi(p, res.addr, i, 0);
  695. lbValue d0 = lb_emit_load(p, dst);
  696. lbValue a = lb_emit_matrix_ev(p, lhs, i, j);
  697. lbValue b = lb_emit_struct_ev(p, rhs, cast(i32)j);
  698. lbValue c = lb_emit_mul_add(p, a, b, d0, elem);
  699. lb_emit_store(p, dst, c);
  700. }
  701. }
  702. return lb_addr_load(p, res);
  703. }
  704. lbValue lb_emit_vector_mul_matrix(lbProcedure *p, lbValue lhs, lbValue rhs, Type *type) {
  705. // TODO(bill): Handle edge case for f16 types on x86(-64) platforms
  706. Type *mt = base_type(rhs.type);
  707. Type *vt = base_type(lhs.type);
  708. GB_ASSERT(is_type_matrix(mt));
  709. GB_ASSERT(is_type_array_like(vt));
  710. i64 vector_count = get_array_type_count(vt);
  711. GB_ASSERT(vector_count == mt->Matrix.row_count);
  712. GB_ASSERT(are_types_identical(mt->Matrix.elem, base_array_type(vt)));
  713. Type *elem = mt->Matrix.elem;
  714. if (lb_is_matrix_simdable(mt)) {
  715. unsigned stride = cast(unsigned)matrix_type_stride_in_elems(mt);
  716. unsigned row_count = cast(unsigned)mt->Matrix.row_count;
  717. unsigned column_count = cast(unsigned)mt->Matrix.column_count; gb_unused(column_count);
  718. auto m_columns = slice_make<LLVMValueRef>(permanent_allocator(), row_count);
  719. auto v_rows = slice_make<LLVMValueRef>(permanent_allocator(), row_count);
  720. LLVMValueRef matrix_vector = lb_matrix_to_vector(p, rhs);
  721. auto mask_elems = slice_make<LLVMValueRef>(permanent_allocator(), column_count);
  722. for (unsigned row_index = 0; row_index < row_count; row_index++) {
  723. for (unsigned column_index = 0; column_index < column_count; column_index++) {
  724. unsigned offset = row_index + column_index*stride;
  725. mask_elems[column_index] = lb_const_int(p->module, t_u32, offset).value;
  726. }
  727. // transpose mask
  728. LLVMValueRef mask = LLVMConstVector(mask_elems.data, column_count);
  729. LLVMValueRef column = llvm_basic_shuffle(p, matrix_vector, mask);
  730. m_columns[row_index] = column;
  731. }
  732. for (unsigned column_index = 0; column_index < row_count; column_index++) {
  733. LLVMValueRef value = lb_emit_struct_ev(p, lhs, column_index).value;
  734. LLVMValueRef row = llvm_vector_broadcast(p, value, column_count);
  735. v_rows[column_index] = row;
  736. }
  737. GB_ASSERT(row_count > 0);
  738. LLVMValueRef vector = nullptr;
  739. for (i64 i = 0; i < row_count; i++) {
  740. if (i == 0) {
  741. vector = llvm_vector_mul(p, v_rows[i], m_columns[i]);
  742. } else {
  743. vector = llvm_vector_mul_add(p, v_rows[i], m_columns[i], vector);
  744. }
  745. }
  746. lbAddr res = lb_add_local_generated(p, type, true);
  747. LLVMValueRef res_ptr = res.addr.value;
  748. unsigned alignment = cast(unsigned)gb_max(type_align_of(type), lb_alignof(LLVMTypeOf(vector)));
  749. LLVMSetAlignment(res_ptr, alignment);
  750. res_ptr = LLVMBuildPointerCast(p->builder, res_ptr, LLVMPointerType(LLVMTypeOf(vector), 0), "");
  751. LLVMBuildStore(p->builder, vector, res_ptr);
  752. return lb_addr_load(p, res);
  753. }
  754. lbAddr res = lb_add_local_generated(p, type, true);
  755. for (i64 j = 0; j < mt->Matrix.column_count; j++) {
  756. for (i64 k = 0; k < mt->Matrix.row_count; k++) {
  757. lbValue dst = lb_emit_matrix_epi(p, res.addr, 0, j);
  758. lbValue d0 = lb_emit_load(p, dst);
  759. lbValue a = lb_emit_struct_ev(p, lhs, cast(i32)k);
  760. lbValue b = lb_emit_matrix_ev(p, rhs, k, j);
  761. lbValue c = lb_emit_mul_add(p, a, b, d0, elem);
  762. lb_emit_store(p, dst, c);
  763. }
  764. }
  765. return lb_addr_load(p, res);
  766. }
  767. lbValue lb_emit_arith_matrix(lbProcedure *p, TokenKind op, lbValue lhs, lbValue rhs, Type *type, bool component_wise=false) {
  768. GB_ASSERT(is_type_matrix(lhs.type) || is_type_matrix(rhs.type));
  769. if (op == Token_Mul && !component_wise) {
  770. Type *xt = base_type(lhs.type);
  771. Type *yt = base_type(rhs.type);
  772. if (xt->kind == Type_Matrix) {
  773. if (yt->kind == Type_Matrix) {
  774. return lb_emit_matrix_mul(p, lhs, rhs, type);
  775. } else if (is_type_array_like(yt)) {
  776. return lb_emit_matrix_mul_vector(p, lhs, rhs, type);
  777. }
  778. } else if (is_type_array_like(xt)) {
  779. GB_ASSERT(yt->kind == Type_Matrix);
  780. return lb_emit_vector_mul_matrix(p, lhs, rhs, type);
  781. }
  782. } else {
  783. if (is_type_matrix(lhs.type)) {
  784. rhs = lb_emit_conv(p, rhs, lhs.type);
  785. } else {
  786. lhs = lb_emit_conv(p, lhs, rhs.type);
  787. }
  788. Type *xt = base_type(lhs.type);
  789. Type *yt = base_type(rhs.type);
  790. GB_ASSERT_MSG(are_types_identical(xt, yt), "%s %.*s %s", type_to_string(lhs.type), LIT(token_strings[op]), type_to_string(rhs.type));
  791. GB_ASSERT(xt->kind == Type_Matrix);
  792. // element-wise arithmetic
  793. // pretend it is an array
  794. lbValue array_lhs = lhs;
  795. lbValue array_rhs = rhs;
  796. Type *array_type = alloc_type_array(xt->Matrix.elem, matrix_type_total_internal_elems(xt));
  797. GB_ASSERT(type_size_of(array_type) == type_size_of(xt));
  798. array_lhs.type = array_type;
  799. array_rhs.type = array_type;
  800. if (token_is_comparison(op)) {
  801. lbValue res = lb_emit_comp(p, op, array_lhs, array_rhs);
  802. return lb_emit_conv(p, res, type);
  803. } else {
  804. lbValue array = lb_emit_arith(p, op, array_lhs, array_rhs, array_type);
  805. array.type = type;
  806. return array;
  807. }
  808. }
  809. GB_PANIC("TODO: lb_emit_arith_matrix");
  810. return {};
  811. }
  812. lbValue lb_emit_arith(lbProcedure *p, TokenKind op, lbValue lhs, lbValue rhs, Type *type) {
  813. if (is_type_array_like(lhs.type) || is_type_array_like(rhs.type)) {
  814. return lb_emit_arith_array(p, op, lhs, rhs, type);
  815. } else if (is_type_matrix(lhs.type) || is_type_matrix(rhs.type)) {
  816. return lb_emit_arith_matrix(p, op, lhs, rhs, type);
  817. } else if (is_type_complex(type)) {
  818. lhs = lb_emit_conv(p, lhs, type);
  819. rhs = lb_emit_conv(p, rhs, type);
  820. Type *ft = base_complex_elem_type(type);
  821. if (op == Token_Quo) {
  822. auto args = array_make<lbValue>(permanent_allocator(), 2);
  823. args[0] = lhs;
  824. args[1] = rhs;
  825. switch (type_size_of(ft)) {
  826. case 4: return lb_emit_runtime_call(p, "quo_complex64", args);
  827. case 8: return lb_emit_runtime_call(p, "quo_complex128", args);
  828. default: GB_PANIC("Unknown float type"); break;
  829. }
  830. }
  831. lbAddr res = lb_add_local_generated(p, type, false); // NOTE: initialized in full later
  832. lbValue a = lb_emit_struct_ev(p, lhs, 0);
  833. lbValue b = lb_emit_struct_ev(p, lhs, 1);
  834. lbValue c = lb_emit_struct_ev(p, rhs, 0);
  835. lbValue d = lb_emit_struct_ev(p, rhs, 1);
  836. lbValue real = {};
  837. lbValue imag = {};
  838. switch (op) {
  839. case Token_Add:
  840. real = lb_emit_arith(p, Token_Add, a, c, ft);
  841. imag = lb_emit_arith(p, Token_Add, b, d, ft);
  842. break;
  843. case Token_Sub:
  844. real = lb_emit_arith(p, Token_Sub, a, c, ft);
  845. imag = lb_emit_arith(p, Token_Sub, b, d, ft);
  846. break;
  847. case Token_Mul: {
  848. lbValue x = lb_emit_arith(p, Token_Mul, a, c, ft);
  849. lbValue y = lb_emit_arith(p, Token_Mul, b, d, ft);
  850. real = lb_emit_arith(p, Token_Sub, x, y, ft);
  851. lbValue z = lb_emit_arith(p, Token_Mul, b, c, ft);
  852. lbValue w = lb_emit_arith(p, Token_Mul, a, d, ft);
  853. imag = lb_emit_arith(p, Token_Add, z, w, ft);
  854. break;
  855. }
  856. }
  857. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 0), real);
  858. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 1), imag);
  859. return lb_addr_load(p, res);
  860. } else if (is_type_quaternion(type)) {
  861. lhs = lb_emit_conv(p, lhs, type);
  862. rhs = lb_emit_conv(p, rhs, type);
  863. Type *ft = base_complex_elem_type(type);
  864. if (op == Token_Add || op == Token_Sub) {
  865. lbAddr res = lb_add_local_generated(p, type, false); // NOTE: initialized in full later
  866. lbValue x0 = lb_emit_struct_ev(p, lhs, 0);
  867. lbValue x1 = lb_emit_struct_ev(p, lhs, 1);
  868. lbValue x2 = lb_emit_struct_ev(p, lhs, 2);
  869. lbValue x3 = lb_emit_struct_ev(p, lhs, 3);
  870. lbValue y0 = lb_emit_struct_ev(p, rhs, 0);
  871. lbValue y1 = lb_emit_struct_ev(p, rhs, 1);
  872. lbValue y2 = lb_emit_struct_ev(p, rhs, 2);
  873. lbValue y3 = lb_emit_struct_ev(p, rhs, 3);
  874. lbValue z0 = lb_emit_arith(p, op, x0, y0, ft);
  875. lbValue z1 = lb_emit_arith(p, op, x1, y1, ft);
  876. lbValue z2 = lb_emit_arith(p, op, x2, y2, ft);
  877. lbValue z3 = lb_emit_arith(p, op, x3, y3, ft);
  878. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 0), z0);
  879. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 1), z1);
  880. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 2), z2);
  881. lb_emit_store(p, lb_emit_struct_ep(p, res.addr, 3), z3);
  882. return lb_addr_load(p, res);
  883. } else if (op == Token_Mul) {
  884. auto args = array_make<lbValue>(permanent_allocator(), 2);
  885. args[0] = lhs;
  886. args[1] = rhs;
  887. switch (8*type_size_of(ft)) {
  888. case 32: return lb_emit_runtime_call(p, "mul_quaternion128", args);
  889. case 64: return lb_emit_runtime_call(p, "mul_quaternion256", args);
  890. default: GB_PANIC("Unknown float type"); break;
  891. }
  892. } else if (op == Token_Quo) {
  893. auto args = array_make<lbValue>(permanent_allocator(), 2);
  894. args[0] = lhs;
  895. args[1] = rhs;
  896. switch (8*type_size_of(ft)) {
  897. case 32: return lb_emit_runtime_call(p, "quo_quaternion128", args);
  898. case 64: return lb_emit_runtime_call(p, "quo_quaternion256", args);
  899. default: GB_PANIC("Unknown float type"); break;
  900. }
  901. }
  902. }
  903. if (is_type_integer(type) && is_type_different_to_arch_endianness(type)) {
  904. switch (op) {
  905. case Token_AndNot:
  906. case Token_And:
  907. case Token_Or:
  908. case Token_Xor:
  909. goto handle_op;
  910. }
  911. Type *platform_type = integer_endian_type_to_platform_type(type);
  912. lbValue x = lb_emit_byte_swap(p, lhs, integer_endian_type_to_platform_type(lhs.type));
  913. lbValue y = lb_emit_byte_swap(p, rhs, integer_endian_type_to_platform_type(rhs.type));
  914. lbValue res = lb_emit_arith(p, op, x, y, platform_type);
  915. return lb_emit_byte_swap(p, res, type);
  916. }
  917. if (is_type_float(type) && is_type_different_to_arch_endianness(type)) {
  918. Type *platform_type = integer_endian_type_to_platform_type(type);
  919. lbValue x = lb_emit_conv(p, lhs, integer_endian_type_to_platform_type(lhs.type));
  920. lbValue y = lb_emit_conv(p, rhs, integer_endian_type_to_platform_type(rhs.type));
  921. lbValue res = lb_emit_arith(p, op, x, y, platform_type);
  922. return lb_emit_byte_swap(p, res, type);
  923. }
  924. handle_op:
  925. lhs = lb_emit_conv(p, lhs, type);
  926. rhs = lb_emit_conv(p, rhs, type);
  927. lbValue res = {};
  928. res.type = type;
  929. // NOTE(bill): Bit Set Aliases for + and -
  930. if (is_type_bit_set(type)) {
  931. switch (op) {
  932. case Token_Add: op = Token_Or; break;
  933. case Token_Sub: op = Token_AndNot; break;
  934. }
  935. }
  936. Type *integral_type = type;
  937. if (is_type_simd_vector(integral_type)) {
  938. integral_type = core_array_type(integral_type);
  939. }
  940. switch (op) {
  941. case Token_Add:
  942. if (is_type_float(integral_type)) {
  943. res.value = LLVMBuildFAdd(p->builder, lhs.value, rhs.value, "");
  944. return res;
  945. }
  946. res.value = LLVMBuildAdd(p->builder, lhs.value, rhs.value, "");
  947. return res;
  948. case Token_Sub:
  949. if (is_type_float(integral_type)) {
  950. res.value = LLVMBuildFSub(p->builder, lhs.value, rhs.value, "");
  951. return res;
  952. }
  953. res.value = LLVMBuildSub(p->builder, lhs.value, rhs.value, "");
  954. return res;
  955. case Token_Mul:
  956. if (is_type_float(integral_type)) {
  957. res.value = LLVMBuildFMul(p->builder, lhs.value, rhs.value, "");
  958. return res;
  959. }
  960. res.value = LLVMBuildMul(p->builder, lhs.value, rhs.value, "");
  961. return res;
  962. case Token_Quo:
  963. if (is_type_float(integral_type)) {
  964. res.value = LLVMBuildFDiv(p->builder, lhs.value, rhs.value, "");
  965. return res;
  966. } else if (is_type_unsigned(integral_type)) {
  967. res.value = LLVMBuildUDiv(p->builder, lhs.value, rhs.value, "");
  968. return res;
  969. }
  970. res.value = LLVMBuildSDiv(p->builder, lhs.value, rhs.value, "");
  971. return res;
  972. case Token_Mod:
  973. if (is_type_float(integral_type)) {
  974. res.value = LLVMBuildFRem(p->builder, lhs.value, rhs.value, "");
  975. return res;
  976. } else if (is_type_unsigned(integral_type)) {
  977. res.value = LLVMBuildURem(p->builder, lhs.value, rhs.value, "");
  978. return res;
  979. }
  980. res.value = LLVMBuildSRem(p->builder, lhs.value, rhs.value, "");
  981. return res;
  982. case Token_ModMod:
  983. if (is_type_unsigned(integral_type)) {
  984. res.value = LLVMBuildURem(p->builder, lhs.value, rhs.value, "");
  985. return res;
  986. } else {
  987. LLVMValueRef a = LLVMBuildSRem(p->builder, lhs.value, rhs.value, "");
  988. LLVMValueRef b = LLVMBuildAdd(p->builder, a, rhs.value, "");
  989. LLVMValueRef c = LLVMBuildSRem(p->builder, b, rhs.value, "");
  990. res.value = c;
  991. return res;
  992. }
  993. case Token_And:
  994. res.value = LLVMBuildAnd(p->builder, lhs.value, rhs.value, "");
  995. return res;
  996. case Token_Or:
  997. res.value = LLVMBuildOr(p->builder, lhs.value, rhs.value, "");
  998. return res;
  999. case Token_Xor:
  1000. res.value = LLVMBuildXor(p->builder, lhs.value, rhs.value, "");
  1001. return res;
  1002. case Token_Shl:
  1003. {
  1004. rhs = lb_emit_conv(p, rhs, lhs.type);
  1005. LLVMValueRef lhsval = lhs.value;
  1006. LLVMValueRef bits = rhs.value;
  1007. LLVMValueRef bit_size = LLVMConstInt(lb_type(p->module, rhs.type), 8*type_size_of(lhs.type), false);
  1008. LLVMValueRef width_test = LLVMBuildICmp(p->builder, LLVMIntULT, bits, bit_size, "");
  1009. res.value = LLVMBuildShl(p->builder, lhsval, bits, "");
  1010. LLVMValueRef zero = LLVMConstNull(lb_type(p->module, lhs.type));
  1011. res.value = LLVMBuildSelect(p->builder, width_test, res.value, zero, "");
  1012. return res;
  1013. }
  1014. case Token_Shr:
  1015. {
  1016. rhs = lb_emit_conv(p, rhs, lhs.type);
  1017. LLVMValueRef lhsval = lhs.value;
  1018. LLVMValueRef bits = rhs.value;
  1019. bool is_unsigned = is_type_unsigned(integral_type);
  1020. LLVMValueRef bit_size = LLVMConstInt(lb_type(p->module, rhs.type), 8*type_size_of(lhs.type), false);
  1021. LLVMValueRef width_test = LLVMBuildICmp(p->builder, LLVMIntULT, bits, bit_size, "");
  1022. if (is_unsigned) {
  1023. res.value = LLVMBuildLShr(p->builder, lhsval, bits, "");
  1024. } else {
  1025. res.value = LLVMBuildAShr(p->builder, lhsval, bits, "");
  1026. }
  1027. LLVMValueRef zero = LLVMConstNull(lb_type(p->module, lhs.type));
  1028. res.value = LLVMBuildSelect(p->builder, width_test, res.value, zero, "");
  1029. return res;
  1030. }
  1031. case Token_AndNot:
  1032. {
  1033. LLVMValueRef new_rhs = LLVMBuildNot(p->builder, rhs.value, "");
  1034. res.value = LLVMBuildAnd(p->builder, lhs.value, new_rhs, "");
  1035. return res;
  1036. }
  1037. break;
  1038. }
  1039. GB_PANIC("unhandled operator of lb_emit_arith");
  1040. return {};
  1041. }
  1042. lbValue lb_build_binary_expr(lbProcedure *p, Ast *expr) {
  1043. ast_node(be, BinaryExpr, expr);
  1044. TypeAndValue tv = type_and_value_of_expr(expr);
  1045. if (is_type_matrix(be->left->tav.type) || is_type_matrix(be->right->tav.type)) {
  1046. lbValue left = lb_build_expr(p, be->left);
  1047. lbValue right = lb_build_expr(p, be->right);
  1048. return lb_emit_arith_matrix(p, be->op.kind, left, right, default_type(tv.type));
  1049. }
  1050. switch (be->op.kind) {
  1051. case Token_Add:
  1052. case Token_Sub:
  1053. case Token_Mul:
  1054. case Token_Quo:
  1055. case Token_Mod:
  1056. case Token_ModMod:
  1057. case Token_And:
  1058. case Token_Or:
  1059. case Token_Xor:
  1060. case Token_AndNot: {
  1061. Type *type = default_type(tv.type);
  1062. lbValue left = lb_build_expr(p, be->left);
  1063. lbValue right = lb_build_expr(p, be->right);
  1064. return lb_emit_arith(p, be->op.kind, left, right, type);
  1065. }
  1066. case Token_Shl:
  1067. case Token_Shr: {
  1068. lbValue left, right;
  1069. Type *type = default_type(tv.type);
  1070. left = lb_build_expr(p, be->left);
  1071. if (lb_is_expr_untyped_const(be->right)) {
  1072. // NOTE(bill): RHS shift operands can still be untyped
  1073. // Just bypass the standard lb_build_expr
  1074. right = lb_expr_untyped_const_to_typed(p->module, be->right, type);
  1075. } else {
  1076. right = lb_build_expr(p, be->right);
  1077. }
  1078. return lb_emit_arith(p, be->op.kind, left, right, type);
  1079. }
  1080. case Token_CmpEq:
  1081. case Token_NotEq:
  1082. if (is_type_untyped_nil(be->right->tav.type)) {
  1083. lbValue left = lb_build_expr(p, be->left);
  1084. lbValue cmp = lb_emit_comp_against_nil(p, be->op.kind, left);
  1085. Type *type = default_type(tv.type);
  1086. return lb_emit_conv(p, cmp, type);
  1087. } else if (is_type_untyped_nil(be->left->tav.type)) {
  1088. lbValue right = lb_build_expr(p, be->right);
  1089. lbValue cmp = lb_emit_comp_against_nil(p, be->op.kind, right);
  1090. Type *type = default_type(tv.type);
  1091. return lb_emit_conv(p, cmp, type);
  1092. }
  1093. /*fallthrough*/
  1094. case Token_Lt:
  1095. case Token_LtEq:
  1096. case Token_Gt:
  1097. case Token_GtEq:
  1098. {
  1099. lbValue left = {};
  1100. lbValue right = {};
  1101. if (be->left->tav.mode == Addressing_Type) {
  1102. left = lb_typeid(p->module, be->left->tav.type);
  1103. }
  1104. if (be->right->tav.mode == Addressing_Type) {
  1105. right = lb_typeid(p->module, be->right->tav.type);
  1106. }
  1107. if (left.value == nullptr) left = lb_build_expr(p, be->left);
  1108. if (right.value == nullptr) right = lb_build_expr(p, be->right);
  1109. lbValue cmp = lb_emit_comp(p, be->op.kind, left, right);
  1110. Type *type = default_type(tv.type);
  1111. return lb_emit_conv(p, cmp, type);
  1112. }
  1113. case Token_CmpAnd:
  1114. case Token_CmpOr:
  1115. return lb_emit_logical_binary_expr(p, be->op.kind, be->left, be->right, tv.type);
  1116. case Token_in:
  1117. case Token_not_in:
  1118. {
  1119. lbValue left = lb_build_expr(p, be->left);
  1120. lbValue right = lb_build_expr(p, be->right);
  1121. Type *rt = base_type(right.type);
  1122. if (is_type_pointer(rt)) {
  1123. right = lb_emit_load(p, right);
  1124. rt = base_type(type_deref(rt));
  1125. }
  1126. switch (rt->kind) {
  1127. case Type_Map:
  1128. {
  1129. lbValue addr = lb_address_from_load_or_generate_local(p, right);
  1130. lbValue h = lb_gen_map_header(p, addr, rt);
  1131. lbValue key = lb_gen_map_hash(p, left, rt->Map.key);
  1132. auto args = array_make<lbValue>(permanent_allocator(), 2);
  1133. args[0] = h;
  1134. args[1] = key;
  1135. lbValue ptr = lb_emit_runtime_call(p, "__dynamic_map_get", args);
  1136. if (be->op.kind == Token_in) {
  1137. return lb_emit_conv(p, lb_emit_comp_against_nil(p, Token_NotEq, ptr), t_bool);
  1138. } else {
  1139. return lb_emit_conv(p, lb_emit_comp_against_nil(p, Token_CmpEq, ptr), t_bool);
  1140. }
  1141. }
  1142. break;
  1143. case Type_BitSet:
  1144. {
  1145. Type *key_type = rt->BitSet.elem;
  1146. GB_ASSERT(are_types_identical(left.type, key_type));
  1147. Type *it = bit_set_to_int(rt);
  1148. left = lb_emit_conv(p, left, it);
  1149. lbValue lower = lb_const_value(p->module, it, exact_value_i64(rt->BitSet.lower));
  1150. lbValue key = lb_emit_arith(p, Token_Sub, left, lower, it);
  1151. lbValue bit = lb_emit_arith(p, Token_Shl, lb_const_int(p->module, it, 1), key, it);
  1152. bit = lb_emit_conv(p, bit, it);
  1153. lbValue old_value = lb_emit_transmute(p, right, it);
  1154. lbValue new_value = lb_emit_arith(p, Token_And, old_value, bit, it);
  1155. if (be->op.kind == Token_in) {
  1156. return lb_emit_conv(p, lb_emit_comp(p, Token_NotEq, new_value, lb_const_int(p->module, new_value.type, 0)), t_bool);
  1157. } else {
  1158. return lb_emit_conv(p, lb_emit_comp(p, Token_CmpEq, new_value, lb_const_int(p->module, new_value.type, 0)), t_bool);
  1159. }
  1160. }
  1161. break;
  1162. default:
  1163. GB_PANIC("Invalid 'in' type");
  1164. }
  1165. break;
  1166. }
  1167. break;
  1168. default:
  1169. GB_PANIC("Invalid binary expression");
  1170. break;
  1171. }
  1172. return {};
  1173. }
  1174. lbValue lb_emit_conv(lbProcedure *p, lbValue value, Type *t) {
  1175. lbModule *m = p->module;
  1176. t = reduce_tuple_to_single_type(t);
  1177. Type *src_type = value.type;
  1178. if (are_types_identical(t, src_type)) {
  1179. return value;
  1180. }
  1181. Type *src = core_type(src_type);
  1182. Type *dst = core_type(t);
  1183. GB_ASSERT(src != nullptr);
  1184. GB_ASSERT(dst != nullptr);
  1185. if (is_type_untyped_nil(src)) {
  1186. return lb_const_nil(m, t);
  1187. }
  1188. if (is_type_untyped_undef(src)) {
  1189. return lb_const_undef(m, t);
  1190. }
  1191. if (LLVMIsConstant(value.value)) {
  1192. if (is_type_any(dst)) {
  1193. Type *st = default_type(src_type);
  1194. lbAddr default_value = lb_add_local_generated(p, st, false);
  1195. lb_addr_store(p, default_value, value);
  1196. lbValue data = lb_emit_conv(p, default_value.addr, t_rawptr);
  1197. lbValue id = lb_typeid(m, st);
  1198. lbAddr res = lb_add_local_generated(p, t, false);
  1199. lbValue a0 = lb_emit_struct_ep(p, res.addr, 0);
  1200. lbValue a1 = lb_emit_struct_ep(p, res.addr, 1);
  1201. lb_emit_store(p, a0, data);
  1202. lb_emit_store(p, a1, id);
  1203. return lb_addr_load(p, res);
  1204. } else if (dst->kind == Type_Basic) {
  1205. if (src->Basic.kind == Basic_string && dst->Basic.kind == Basic_cstring) {
  1206. String str = lb_get_const_string(m, value);
  1207. lbValue res = {};
  1208. res.type = t;
  1209. res.value = llvm_cstring(m, str);
  1210. return res;
  1211. }
  1212. // if (is_type_float(dst)) {
  1213. // return value;
  1214. // } else if (is_type_integer(dst)) {
  1215. // return value;
  1216. // }
  1217. // ExactValue ev = value->Constant.value;
  1218. // if (is_type_float(dst)) {
  1219. // ev = exact_value_to_float(ev);
  1220. // } else if (is_type_complex(dst)) {
  1221. // ev = exact_value_to_complex(ev);
  1222. // } else if (is_type_quaternion(dst)) {
  1223. // ev = exact_value_to_quaternion(ev);
  1224. // } else if (is_type_string(dst)) {
  1225. // // Handled elsewhere
  1226. // GB_ASSERT_MSG(ev.kind == ExactValue_String, "%d", ev.kind);
  1227. // } else if (is_type_integer(dst)) {
  1228. // ev = exact_value_to_integer(ev);
  1229. // } else if (is_type_pointer(dst)) {
  1230. // // IMPORTANT NOTE(bill): LLVM doesn't support pointer constants expect 'null'
  1231. // lbValue i = lb_add_module_constant(p->module, t_uintptr, ev);
  1232. // return lb_emit(p, lb_instr_conv(p, irConv_inttoptr, i, t_uintptr, dst));
  1233. // }
  1234. // return lb_const_value(p->module, t, ev);
  1235. }
  1236. }
  1237. if (are_types_identical(src, dst)) {
  1238. if (!are_types_identical(src_type, t)) {
  1239. return lb_emit_transmute(p, value, t);
  1240. }
  1241. return value;
  1242. }
  1243. // bool <-> llvm bool
  1244. if (is_type_boolean(src) && dst == t_llvm_bool) {
  1245. lbValue res = {};
  1246. res.value = LLVMBuildTrunc(p->builder, value.value, lb_type(m, dst), "");
  1247. res.type = dst;
  1248. return res;
  1249. }
  1250. if (src == t_llvm_bool && is_type_boolean(dst)) {
  1251. lbValue res = {};
  1252. res.value = LLVMBuildZExt(p->builder, value.value, lb_type(m, dst), "");
  1253. res.type = dst;
  1254. return res;
  1255. }
  1256. // integer -> integer
  1257. if (is_type_integer(src) && is_type_integer(dst)) {
  1258. GB_ASSERT(src->kind == Type_Basic &&
  1259. dst->kind == Type_Basic);
  1260. i64 sz = type_size_of(default_type(src));
  1261. i64 dz = type_size_of(default_type(dst));
  1262. if (sz == dz) {
  1263. if (dz > 1 && !types_have_same_internal_endian(src, dst)) {
  1264. return lb_emit_byte_swap(p, value, t);
  1265. }
  1266. lbValue res = {};
  1267. res.value = value.value;
  1268. res.type = t;
  1269. return res;
  1270. }
  1271. if (sz > 1 && is_type_different_to_arch_endianness(src)) {
  1272. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  1273. value = lb_emit_byte_swap(p, value, platform_src_type);
  1274. }
  1275. LLVMOpcode op = LLVMTrunc;
  1276. if (dz < sz) {
  1277. op = LLVMTrunc;
  1278. } else if (dz == sz) {
  1279. // NOTE(bill): In LLVM, all integers are signed and rely upon 2's compliment
  1280. // NOTE(bill): Copy the value just for type correctness
  1281. op = LLVMBitCast;
  1282. } else if (dz > sz) {
  1283. op = is_type_unsigned(src) ? LLVMZExt : LLVMSExt; // zero extent
  1284. }
  1285. if (dz > 1 && is_type_different_to_arch_endianness(dst)) {
  1286. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  1287. lbValue res = {};
  1288. res.value = LLVMBuildCast(p->builder, op, value.value, lb_type(m, platform_dst_type), "");
  1289. res.type = t;
  1290. return lb_emit_byte_swap(p, res, t);
  1291. } else {
  1292. lbValue res = {};
  1293. res.value = LLVMBuildCast(p->builder, op, value.value, lb_type(m, t), "");
  1294. res.type = t;
  1295. return res;
  1296. }
  1297. }
  1298. // boolean -> boolean/integer
  1299. if (is_type_boolean(src) && (is_type_boolean(dst) || is_type_integer(dst))) {
  1300. LLVMValueRef b = LLVMBuildICmp(p->builder, LLVMIntNE, value.value, LLVMConstNull(lb_type(m, value.type)), "");
  1301. lbValue res = {};
  1302. res.value = LLVMBuildIntCast2(p->builder, b, lb_type(m, t), false, "");
  1303. res.type = t;
  1304. return res;
  1305. }
  1306. if (is_type_cstring(src) && is_type_u8_ptr(dst)) {
  1307. return lb_emit_transmute(p, value, dst);
  1308. }
  1309. if (is_type_u8_ptr(src) && is_type_cstring(dst)) {
  1310. return lb_emit_transmute(p, value, dst);
  1311. }
  1312. if (is_type_cstring(src) && is_type_u8_multi_ptr(dst)) {
  1313. return lb_emit_transmute(p, value, dst);
  1314. }
  1315. if (is_type_u8_multi_ptr(src) && is_type_cstring(dst)) {
  1316. return lb_emit_transmute(p, value, dst);
  1317. }
  1318. if (is_type_cstring(src) && is_type_rawptr(dst)) {
  1319. return lb_emit_transmute(p, value, dst);
  1320. }
  1321. if (is_type_rawptr(src) && is_type_cstring(dst)) {
  1322. return lb_emit_transmute(p, value, dst);
  1323. }
  1324. if (are_types_identical(src, t_cstring) && are_types_identical(dst, t_string)) {
  1325. lbValue c = lb_emit_conv(p, value, t_cstring);
  1326. auto args = array_make<lbValue>(permanent_allocator(), 1);
  1327. args[0] = c;
  1328. lbValue s = lb_emit_runtime_call(p, "cstring_to_string", args);
  1329. return lb_emit_conv(p, s, dst);
  1330. }
  1331. // integer -> boolean
  1332. if (is_type_integer(src) && is_type_boolean(dst)) {
  1333. lbValue res = {};
  1334. res.value = LLVMBuildICmp(p->builder, LLVMIntNE, value.value, LLVMConstNull(lb_type(m, value.type)), "");
  1335. res.type = t_llvm_bool;
  1336. return lb_emit_conv(p, res, t);
  1337. }
  1338. // float -> float
  1339. if (is_type_float(src) && is_type_float(dst)) {
  1340. i64 sz = type_size_of(src);
  1341. i64 dz = type_size_of(dst);
  1342. if (dz == sz) {
  1343. if (types_have_same_internal_endian(src, dst)) {
  1344. lbValue res = {};
  1345. res.type = t;
  1346. res.value = value.value;
  1347. return res;
  1348. } else {
  1349. return lb_emit_byte_swap(p, value, t);
  1350. }
  1351. }
  1352. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  1353. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  1354. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  1355. lbValue res = {};
  1356. res = lb_emit_conv(p, value, platform_src_type);
  1357. res = lb_emit_conv(p, res, platform_dst_type);
  1358. if (is_type_different_to_arch_endianness(dst)) {
  1359. res = lb_emit_byte_swap(p, res, t);
  1360. }
  1361. return lb_emit_conv(p, res, t);
  1362. }
  1363. lbValue res = {};
  1364. res.type = t;
  1365. if (dz >= sz) {
  1366. res.value = LLVMBuildFPExt(p->builder, value.value, lb_type(m, t), "");
  1367. } else {
  1368. res.value = LLVMBuildFPTrunc(p->builder, value.value, lb_type(m, t), "");
  1369. }
  1370. return res;
  1371. }
  1372. if (is_type_complex(src) && is_type_complex(dst)) {
  1373. Type *ft = base_complex_elem_type(dst);
  1374. lbAddr gen = lb_add_local_generated(p, t, false);
  1375. lbValue gp = lb_addr_get_ptr(p, gen);
  1376. lbValue real = lb_emit_conv(p, lb_emit_struct_ev(p, value, 0), ft);
  1377. lbValue imag = lb_emit_conv(p, lb_emit_struct_ev(p, value, 1), ft);
  1378. lb_emit_store(p, lb_emit_struct_ep(p, gp, 0), real);
  1379. lb_emit_store(p, lb_emit_struct_ep(p, gp, 1), imag);
  1380. return lb_addr_load(p, gen);
  1381. }
  1382. if (is_type_quaternion(src) && is_type_quaternion(dst)) {
  1383. // @QuaternionLayout
  1384. Type *ft = base_complex_elem_type(dst);
  1385. lbAddr gen = lb_add_local_generated(p, t, false);
  1386. lbValue gp = lb_addr_get_ptr(p, gen);
  1387. lbValue q0 = lb_emit_conv(p, lb_emit_struct_ev(p, value, 0), ft);
  1388. lbValue q1 = lb_emit_conv(p, lb_emit_struct_ev(p, value, 1), ft);
  1389. lbValue q2 = lb_emit_conv(p, lb_emit_struct_ev(p, value, 2), ft);
  1390. lbValue q3 = lb_emit_conv(p, lb_emit_struct_ev(p, value, 3), ft);
  1391. lb_emit_store(p, lb_emit_struct_ep(p, gp, 0), q0);
  1392. lb_emit_store(p, lb_emit_struct_ep(p, gp, 1), q1);
  1393. lb_emit_store(p, lb_emit_struct_ep(p, gp, 2), q2);
  1394. lb_emit_store(p, lb_emit_struct_ep(p, gp, 3), q3);
  1395. return lb_addr_load(p, gen);
  1396. }
  1397. if (is_type_integer(src) && is_type_complex(dst)) {
  1398. Type *ft = base_complex_elem_type(dst);
  1399. lbAddr gen = lb_add_local_generated(p, t, true);
  1400. lbValue gp = lb_addr_get_ptr(p, gen);
  1401. lbValue real = lb_emit_conv(p, value, ft);
  1402. lb_emit_store(p, lb_emit_struct_ep(p, gp, 0), real);
  1403. return lb_addr_load(p, gen);
  1404. }
  1405. if (is_type_float(src) && is_type_complex(dst)) {
  1406. Type *ft = base_complex_elem_type(dst);
  1407. lbAddr gen = lb_add_local_generated(p, t, true);
  1408. lbValue gp = lb_addr_get_ptr(p, gen);
  1409. lbValue real = lb_emit_conv(p, value, ft);
  1410. lb_emit_store(p, lb_emit_struct_ep(p, gp, 0), real);
  1411. return lb_addr_load(p, gen);
  1412. }
  1413. if (is_type_integer(src) && is_type_quaternion(dst)) {
  1414. Type *ft = base_complex_elem_type(dst);
  1415. lbAddr gen = lb_add_local_generated(p, t, true);
  1416. lbValue gp = lb_addr_get_ptr(p, gen);
  1417. lbValue real = lb_emit_conv(p, value, ft);
  1418. // @QuaternionLayout
  1419. lb_emit_store(p, lb_emit_struct_ep(p, gp, 3), real);
  1420. return lb_addr_load(p, gen);
  1421. }
  1422. if (is_type_float(src) && is_type_quaternion(dst)) {
  1423. Type *ft = base_complex_elem_type(dst);
  1424. lbAddr gen = lb_add_local_generated(p, t, true);
  1425. lbValue gp = lb_addr_get_ptr(p, gen);
  1426. lbValue real = lb_emit_conv(p, value, ft);
  1427. // @QuaternionLayout
  1428. lb_emit_store(p, lb_emit_struct_ep(p, gp, 3), real);
  1429. return lb_addr_load(p, gen);
  1430. }
  1431. if (is_type_complex(src) && is_type_quaternion(dst)) {
  1432. Type *ft = base_complex_elem_type(dst);
  1433. lbAddr gen = lb_add_local_generated(p, t, true);
  1434. lbValue gp = lb_addr_get_ptr(p, gen);
  1435. lbValue real = lb_emit_conv(p, lb_emit_struct_ev(p, value, 0), ft);
  1436. lbValue imag = lb_emit_conv(p, lb_emit_struct_ev(p, value, 1), ft);
  1437. // @QuaternionLayout
  1438. lb_emit_store(p, lb_emit_struct_ep(p, gp, 3), real);
  1439. lb_emit_store(p, lb_emit_struct_ep(p, gp, 0), imag);
  1440. return lb_addr_load(p, gen);
  1441. }
  1442. // float <-> integer
  1443. if (is_type_float(src) && is_type_integer(dst)) {
  1444. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  1445. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  1446. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  1447. lbValue res = {};
  1448. res = lb_emit_conv(p, value, platform_src_type);
  1449. res = lb_emit_conv(p, res, platform_dst_type);
  1450. if (is_type_different_to_arch_endianness(dst)) {
  1451. res = lb_emit_byte_swap(p, res, platform_dst_type);
  1452. }
  1453. return lb_emit_conv(p, res, t);
  1454. }
  1455. if (is_type_integer_128bit(dst)) {
  1456. auto args = array_make<lbValue>(temporary_allocator(), 1);
  1457. args[0] = value;
  1458. char const *call = "fixunsdfdi";
  1459. if (is_type_unsigned(dst)) {
  1460. call = "fixunsdfti";
  1461. }
  1462. lbValue res_i128 = lb_emit_runtime_call(p, call, args);
  1463. return lb_emit_conv(p, res_i128, t);
  1464. }
  1465. lbValue res = {};
  1466. res.type = t;
  1467. if (is_type_unsigned(dst)) {
  1468. res.value = LLVMBuildFPToUI(p->builder, value.value, lb_type(m, t), "");
  1469. } else {
  1470. res.value = LLVMBuildFPToSI(p->builder, value.value, lb_type(m, t), "");
  1471. }
  1472. return res;
  1473. }
  1474. if (is_type_integer(src) && is_type_float(dst)) {
  1475. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  1476. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  1477. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  1478. lbValue res = {};
  1479. res = lb_emit_conv(p, value, platform_src_type);
  1480. res = lb_emit_conv(p, res, platform_dst_type);
  1481. if (is_type_different_to_arch_endianness(dst)) {
  1482. res = lb_emit_byte_swap(p, res, t);
  1483. }
  1484. return lb_emit_conv(p, res, t);
  1485. }
  1486. if (is_type_integer_128bit(src)) {
  1487. auto args = array_make<lbValue>(temporary_allocator(), 1);
  1488. args[0] = value;
  1489. char const *call = "floattidf";
  1490. if (is_type_unsigned(src)) {
  1491. call = "floattidf_unsigned";
  1492. }
  1493. lbValue res_f64 = lb_emit_runtime_call(p, call, args);
  1494. return lb_emit_conv(p, res_f64, t);
  1495. }
  1496. lbValue res = {};
  1497. res.type = t;
  1498. if (is_type_unsigned(src)) {
  1499. res.value = LLVMBuildUIToFP(p->builder, value.value, lb_type(m, t), "");
  1500. } else {
  1501. res.value = LLVMBuildSIToFP(p->builder, value.value, lb_type(m, t), "");
  1502. }
  1503. return res;
  1504. }
  1505. // Pointer <-> uintptr
  1506. if (is_type_pointer(src) && is_type_uintptr(dst)) {
  1507. lbValue res = {};
  1508. res.type = t;
  1509. res.value = LLVMBuildPtrToInt(p->builder, value.value, lb_type(m, t), "");
  1510. return res;
  1511. }
  1512. if (is_type_uintptr(src) && is_type_pointer(dst)) {
  1513. lbValue res = {};
  1514. res.type = t;
  1515. res.value = LLVMBuildIntToPtr(p->builder, value.value, lb_type(m, t), "");
  1516. return res;
  1517. }
  1518. if (is_type_multi_pointer(src) && is_type_uintptr(dst)) {
  1519. lbValue res = {};
  1520. res.type = t;
  1521. res.value = LLVMBuildPtrToInt(p->builder, value.value, lb_type(m, t), "");
  1522. return res;
  1523. }
  1524. if (is_type_uintptr(src) && is_type_multi_pointer(dst)) {
  1525. lbValue res = {};
  1526. res.type = t;
  1527. res.value = LLVMBuildIntToPtr(p->builder, value.value, lb_type(m, t), "");
  1528. return res;
  1529. }
  1530. if (is_type_union(dst)) {
  1531. for_array(i, dst->Union.variants) {
  1532. Type *vt = dst->Union.variants[i];
  1533. if (are_types_identical(vt, src_type)) {
  1534. lbAddr parent = lb_add_local_generated(p, t, true);
  1535. lb_emit_store_union_variant(p, parent.addr, value, vt);
  1536. return lb_addr_load(p, parent);
  1537. }
  1538. }
  1539. }
  1540. // NOTE(bill): This has to be done before 'Pointer <-> Pointer' as it's
  1541. // subtype polymorphism casting
  1542. if (check_is_assignable_to_using_subtype(src_type, t)) {
  1543. Type *st = type_deref(src_type);
  1544. st = type_deref(st);
  1545. bool st_is_ptr = is_type_pointer(src_type);
  1546. st = base_type(st);
  1547. Type *dt = t;
  1548. GB_ASSERT(is_type_struct(st) || is_type_raw_union(st));
  1549. String field_name = lookup_subtype_polymorphic_field(t, src_type);
  1550. if (field_name.len > 0) {
  1551. // NOTE(bill): It can be casted
  1552. Selection sel = lookup_field(st, field_name, false, true);
  1553. if (sel.entity != nullptr) {
  1554. if (st_is_ptr) {
  1555. lbValue res = lb_emit_deep_field_gep(p, value, sel);
  1556. Type *rt = res.type;
  1557. if (!are_types_identical(rt, dt) && are_types_identical(type_deref(rt), dt)) {
  1558. res = lb_emit_load(p, res);
  1559. }
  1560. return res;
  1561. } else {
  1562. if (is_type_pointer(value.type)) {
  1563. Type *rt = value.type;
  1564. if (!are_types_identical(rt, dt) && are_types_identical(type_deref(rt), dt)) {
  1565. value = lb_emit_load(p, value);
  1566. } else {
  1567. value = lb_emit_deep_field_gep(p, value, sel);
  1568. return lb_emit_load(p, value);
  1569. }
  1570. }
  1571. return lb_emit_deep_field_ev(p, value, sel);
  1572. }
  1573. } else {
  1574. GB_PANIC("invalid subtype cast %s.%.*s", type_to_string(src_type), LIT(field_name));
  1575. }
  1576. }
  1577. }
  1578. // Pointer <-> Pointer
  1579. if (is_type_pointer(src) && is_type_pointer(dst)) {
  1580. lbValue res = {};
  1581. res.type = t;
  1582. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1583. return res;
  1584. }
  1585. if (is_type_multi_pointer(src) && is_type_pointer(dst)) {
  1586. lbValue res = {};
  1587. res.type = t;
  1588. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1589. return res;
  1590. }
  1591. if (is_type_pointer(src) && is_type_multi_pointer(dst)) {
  1592. lbValue res = {};
  1593. res.type = t;
  1594. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1595. return res;
  1596. }
  1597. if (is_type_multi_pointer(src) && is_type_multi_pointer(dst)) {
  1598. lbValue res = {};
  1599. res.type = t;
  1600. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1601. return res;
  1602. }
  1603. // proc <-> proc
  1604. if (is_type_proc(src) && is_type_proc(dst)) {
  1605. lbValue res = {};
  1606. res.type = t;
  1607. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1608. return res;
  1609. }
  1610. // pointer -> proc
  1611. if (is_type_pointer(src) && is_type_proc(dst)) {
  1612. lbValue res = {};
  1613. res.type = t;
  1614. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1615. return res;
  1616. }
  1617. // proc -> pointer
  1618. if (is_type_proc(src) && is_type_pointer(dst)) {
  1619. lbValue res = {};
  1620. res.type = t;
  1621. res.value = LLVMBuildPointerCast(p->builder, value.value, lb_type(m, t), "");
  1622. return res;
  1623. }
  1624. // []byte/[]u8 <-> string
  1625. if (is_type_u8_slice(src) && is_type_string(dst)) {
  1626. return lb_emit_transmute(p, value, t);
  1627. }
  1628. if (is_type_string(src) && is_type_u8_slice(dst)) {
  1629. return lb_emit_transmute(p, value, t);
  1630. }
  1631. if (is_type_array_like(dst)) {
  1632. Type *elem = base_array_type(dst);
  1633. lbValue e = lb_emit_conv(p, value, elem);
  1634. // NOTE(bill): Doesn't need to be zero because it will be initialized in the loops
  1635. lbAddr v = lb_add_local_generated(p, t, false);
  1636. isize index_count = cast(isize)get_array_type_count(dst);
  1637. for (isize i = 0; i < index_count; i++) {
  1638. lbValue elem = lb_emit_array_epi(p, v.addr, i);
  1639. lb_emit_store(p, elem, e);
  1640. }
  1641. return lb_addr_load(p, v);
  1642. }
  1643. if (is_type_matrix(dst) && !is_type_matrix(src)) {
  1644. GB_ASSERT_MSG(dst->Matrix.row_count == dst->Matrix.column_count, "%s <- %s", type_to_string(dst), type_to_string(src));
  1645. Type *elem = base_array_type(dst);
  1646. lbValue e = lb_emit_conv(p, value, elem);
  1647. lbAddr v = lb_add_local_generated(p, t, false);
  1648. for (i64 i = 0; i < dst->Matrix.row_count; i++) {
  1649. isize j = cast(isize)i;
  1650. lbValue ptr = lb_emit_matrix_epi(p, v.addr, j, j);
  1651. lb_emit_store(p, ptr, e);
  1652. }
  1653. return lb_addr_load(p, v);
  1654. }
  1655. if (is_type_matrix(dst) && is_type_matrix(src)) {
  1656. GB_ASSERT(dst->kind == Type_Matrix);
  1657. GB_ASSERT(src->kind == Type_Matrix);
  1658. lbAddr v = lb_add_local_generated(p, t, true);
  1659. if (is_matrix_square(dst) && is_matrix_square(dst)) {
  1660. for (i64 j = 0; j < dst->Matrix.column_count; j++) {
  1661. for (i64 i = 0; i < dst->Matrix.row_count; i++) {
  1662. if (i < src->Matrix.row_count && j < src->Matrix.column_count) {
  1663. lbValue d = lb_emit_matrix_epi(p, v.addr, i, j);
  1664. lbValue s = lb_emit_matrix_ev(p, value, i, j);
  1665. lb_emit_store(p, d, s);
  1666. } else if (i == j) {
  1667. lbValue d = lb_emit_matrix_epi(p, v.addr, i, j);
  1668. lbValue s = lb_const_value(p->module, dst->Matrix.elem, exact_value_i64(1), true);
  1669. lb_emit_store(p, d, s);
  1670. }
  1671. }
  1672. }
  1673. } else {
  1674. i64 dst_count = dst->Matrix.row_count*dst->Matrix.column_count;
  1675. i64 src_count = src->Matrix.row_count*src->Matrix.column_count;
  1676. GB_ASSERT(dst_count == src_count);
  1677. lbValue pdst = v.addr;
  1678. lbValue psrc = lb_address_from_load_or_generate_local(p, value);
  1679. bool same_elem_base_types = are_types_identical(
  1680. base_type(dst->Matrix.elem),
  1681. base_type(src->Matrix.elem)
  1682. );
  1683. if (same_elem_base_types && type_size_of(dst) == type_size_of(src)) {
  1684. lb_mem_copy_overlapping(p, v.addr, psrc, lb_const_int(p->module, t_int, type_size_of(dst)));
  1685. } else {
  1686. for (i64 i = 0; i < src_count; i++) {
  1687. lbValue dp = lb_emit_array_epi(p, v.addr, matrix_column_major_index_to_offset(dst, i));
  1688. lbValue sp = lb_emit_array_epi(p, psrc, matrix_column_major_index_to_offset(src, i));
  1689. lbValue s = lb_emit_load(p, sp);
  1690. s = lb_emit_conv(p, s, dst->Matrix.elem);
  1691. lb_emit_store(p, dp, s);
  1692. }
  1693. }
  1694. }
  1695. return lb_addr_load(p, v);
  1696. }
  1697. if (is_type_any(dst)) {
  1698. if (is_type_untyped_nil(src)) {
  1699. return lb_const_nil(p->module, t);
  1700. }
  1701. if (is_type_untyped_undef(src)) {
  1702. return lb_const_undef(p->module, t);
  1703. }
  1704. lbAddr result = lb_add_local_generated(p, t, true);
  1705. Type *st = default_type(src_type);
  1706. lbValue data = lb_address_from_load_or_generate_local(p, value);
  1707. GB_ASSERT_MSG(is_type_pointer(data.type), "%s", type_to_string(data.type));
  1708. GB_ASSERT_MSG(is_type_typed(st), "%s", type_to_string(st));
  1709. data = lb_emit_conv(p, data, t_rawptr);
  1710. lbValue id = lb_typeid(p->module, st);
  1711. lbValue any_data = lb_emit_struct_ep(p, result.addr, 0);
  1712. lbValue any_id = lb_emit_struct_ep(p, result.addr, 1);
  1713. lb_emit_store(p, any_data, data);
  1714. lb_emit_store(p, any_id, id);
  1715. return lb_addr_load(p, result);
  1716. }
  1717. i64 src_sz = type_size_of(src);
  1718. i64 dst_sz = type_size_of(dst);
  1719. if (src_sz == dst_sz) {
  1720. // bit_set <-> integer
  1721. if (is_type_integer(src) && is_type_bit_set(dst)) {
  1722. lbValue res = lb_emit_conv(p, value, bit_set_to_int(dst));
  1723. res.type = dst;
  1724. return res;
  1725. }
  1726. if (is_type_bit_set(src) && is_type_integer(dst)) {
  1727. lbValue bs = value;
  1728. bs.type = bit_set_to_int(src);
  1729. return lb_emit_conv(p, bs, dst);
  1730. }
  1731. // typeid <-> integer
  1732. if (is_type_integer(src) && is_type_typeid(dst)) {
  1733. return lb_emit_transmute(p, value, dst);
  1734. }
  1735. if (is_type_typeid(src) && is_type_integer(dst)) {
  1736. return lb_emit_transmute(p, value, dst);
  1737. }
  1738. }
  1739. if (is_type_untyped(src)) {
  1740. if (is_type_string(src) && is_type_string(dst)) {
  1741. lbAddr result = lb_add_local_generated(p, t, false);
  1742. lb_addr_store(p, result, value);
  1743. return lb_addr_load(p, result);
  1744. }
  1745. }
  1746. gb_printf_err("%.*s\n", LIT(p->name));
  1747. gb_printf_err("lb_emit_conv: src -> dst\n");
  1748. gb_printf_err("Not Identical %s != %s\n", type_to_string(src_type), type_to_string(t));
  1749. gb_printf_err("Not Identical %s != %s\n", type_to_string(src), type_to_string(dst));
  1750. gb_printf_err("Not Identical %p != %p\n", src_type, t);
  1751. gb_printf_err("Not Identical %p != %p\n", src, dst);
  1752. GB_PANIC("Invalid type conversion: '%s' to '%s' for procedure '%.*s'",
  1753. type_to_string(src_type), type_to_string(t),
  1754. LIT(p->name));
  1755. return {};
  1756. }
  1757. lbValue lb_compare_records(lbProcedure *p, TokenKind op_kind, lbValue left, lbValue right, Type *type) {
  1758. GB_ASSERT((is_type_struct(type) || is_type_union(type)) && is_type_comparable(type));
  1759. lbValue left_ptr = lb_address_from_load_or_generate_local(p, left);
  1760. lbValue right_ptr = lb_address_from_load_or_generate_local(p, right);
  1761. lbValue res = {};
  1762. if (is_type_simple_compare(type)) {
  1763. // TODO(bill): Test to see if this is actually faster!!!!
  1764. auto args = array_make<lbValue>(permanent_allocator(), 3);
  1765. args[0] = lb_emit_conv(p, left_ptr, t_rawptr);
  1766. args[1] = lb_emit_conv(p, right_ptr, t_rawptr);
  1767. args[2] = lb_const_int(p->module, t_int, type_size_of(type));
  1768. res = lb_emit_runtime_call(p, "memory_equal", args);
  1769. } else {
  1770. lbValue value = lb_get_equal_proc_for_type(p->module, type);
  1771. auto args = array_make<lbValue>(permanent_allocator(), 2);
  1772. args[0] = lb_emit_conv(p, left_ptr, t_rawptr);
  1773. args[1] = lb_emit_conv(p, right_ptr, t_rawptr);
  1774. res = lb_emit_call(p, value, args);
  1775. }
  1776. if (op_kind == Token_NotEq) {
  1777. res = lb_emit_unary_arith(p, Token_Not, res, res.type);
  1778. }
  1779. return res;
  1780. }
  1781. lbValue lb_emit_comp(lbProcedure *p, TokenKind op_kind, lbValue left, lbValue right) {
  1782. Type *a = core_type(left.type);
  1783. Type *b = core_type(right.type);
  1784. GB_ASSERT(gb_is_between(op_kind, Token__ComparisonBegin+1, Token__ComparisonEnd-1));
  1785. lbValue nil_check = {};
  1786. if (is_type_untyped_nil(left.type)) {
  1787. nil_check = lb_emit_comp_against_nil(p, op_kind, right);
  1788. } else if (is_type_untyped_nil(right.type)) {
  1789. nil_check = lb_emit_comp_against_nil(p, op_kind, left);
  1790. }
  1791. if (nil_check.value != nullptr) {
  1792. return nil_check;
  1793. }
  1794. if (are_types_identical(a, b)) {
  1795. // NOTE(bill): No need for a conversion
  1796. } else if (lb_is_const(left) || lb_is_const_nil(left)) {
  1797. left = lb_emit_conv(p, left, right.type);
  1798. } else if (lb_is_const(right) || lb_is_const_nil(right)) {
  1799. right = lb_emit_conv(p, right, left.type);
  1800. } else {
  1801. Type *lt = left.type;
  1802. Type *rt = right.type;
  1803. lt = left.type;
  1804. rt = right.type;
  1805. i64 ls = type_size_of(lt);
  1806. i64 rs = type_size_of(rt);
  1807. // NOTE(bill): Quick heuristic, larger types are usually the target type
  1808. if (ls < rs) {
  1809. left = lb_emit_conv(p, left, rt);
  1810. } else if (ls > rs) {
  1811. right = lb_emit_conv(p, right, lt);
  1812. } else {
  1813. if (is_type_union(rt)) {
  1814. left = lb_emit_conv(p, left, rt);
  1815. } else {
  1816. right = lb_emit_conv(p, right, lt);
  1817. }
  1818. }
  1819. }
  1820. if (is_type_array(a) || is_type_enumerated_array(a)) {
  1821. Type *tl = base_type(a);
  1822. lbValue lhs = lb_address_from_load_or_generate_local(p, left);
  1823. lbValue rhs = lb_address_from_load_or_generate_local(p, right);
  1824. TokenKind cmp_op = Token_And;
  1825. lbValue res = lb_const_bool(p->module, t_llvm_bool, true);
  1826. if (op_kind == Token_NotEq) {
  1827. res = lb_const_bool(p->module, t_llvm_bool, false);
  1828. cmp_op = Token_Or;
  1829. } else if (op_kind == Token_CmpEq) {
  1830. res = lb_const_bool(p->module, t_llvm_bool, true);
  1831. cmp_op = Token_And;
  1832. }
  1833. bool inline_array_arith = type_size_of(tl) <= build_context.max_align;
  1834. i32 count = 0;
  1835. switch (tl->kind) {
  1836. case Type_Array: count = cast(i32)tl->Array.count; break;
  1837. case Type_EnumeratedArray: count = cast(i32)tl->EnumeratedArray.count; break;
  1838. }
  1839. if (inline_array_arith) {
  1840. // inline
  1841. lbAddr val = lb_add_local_generated(p, t_bool, false);
  1842. lb_addr_store(p, val, res);
  1843. for (i32 i = 0; i < count; i++) {
  1844. lbValue x = lb_emit_load(p, lb_emit_array_epi(p, lhs, i));
  1845. lbValue y = lb_emit_load(p, lb_emit_array_epi(p, rhs, i));
  1846. lbValue cmp = lb_emit_comp(p, op_kind, x, y);
  1847. lbValue new_res = lb_emit_arith(p, cmp_op, lb_addr_load(p, val), cmp, t_bool);
  1848. lb_addr_store(p, val, lb_emit_conv(p, new_res, t_bool));
  1849. }
  1850. return lb_addr_load(p, val);
  1851. } else {
  1852. if (is_type_simple_compare(tl) && (op_kind == Token_CmpEq || op_kind == Token_NotEq)) {
  1853. // TODO(bill): Test to see if this is actually faster!!!!
  1854. auto args = array_make<lbValue>(permanent_allocator(), 3);
  1855. args[0] = lb_emit_conv(p, lhs, t_rawptr);
  1856. args[1] = lb_emit_conv(p, rhs, t_rawptr);
  1857. args[2] = lb_const_int(p->module, t_int, type_size_of(tl));
  1858. lbValue val = lb_emit_runtime_call(p, "memory_compare", args);
  1859. lbValue res = lb_emit_comp(p, op_kind, val, lb_const_nil(p->module, val.type));
  1860. return lb_emit_conv(p, res, t_bool);
  1861. } else {
  1862. lbAddr val = lb_add_local_generated(p, t_bool, false);
  1863. lb_addr_store(p, val, res);
  1864. auto loop_data = lb_loop_start(p, count, t_i32);
  1865. {
  1866. lbValue i = loop_data.idx;
  1867. lbValue x = lb_emit_load(p, lb_emit_array_ep(p, lhs, i));
  1868. lbValue y = lb_emit_load(p, lb_emit_array_ep(p, rhs, i));
  1869. lbValue cmp = lb_emit_comp(p, op_kind, x, y);
  1870. lbValue new_res = lb_emit_arith(p, cmp_op, lb_addr_load(p, val), cmp, t_bool);
  1871. lb_addr_store(p, val, lb_emit_conv(p, new_res, t_bool));
  1872. }
  1873. lb_loop_end(p, loop_data);
  1874. return lb_addr_load(p, val);
  1875. }
  1876. }
  1877. }
  1878. if ((is_type_struct(a) || is_type_union(a)) && is_type_comparable(a)) {
  1879. return lb_compare_records(p, op_kind, left, right, a);
  1880. }
  1881. if ((is_type_struct(b) || is_type_union(b)) && is_type_comparable(b)) {
  1882. return lb_compare_records(p, op_kind, left, right, b);
  1883. }
  1884. if (is_type_string(a)) {
  1885. if (is_type_cstring(a)) {
  1886. left = lb_emit_conv(p, left, t_string);
  1887. right = lb_emit_conv(p, right, t_string);
  1888. }
  1889. char const *runtime_procedure = nullptr;
  1890. switch (op_kind) {
  1891. case Token_CmpEq: runtime_procedure = "string_eq"; break;
  1892. case Token_NotEq: runtime_procedure = "string_ne"; break;
  1893. case Token_Lt: runtime_procedure = "string_lt"; break;
  1894. case Token_Gt: runtime_procedure = "string_gt"; break;
  1895. case Token_LtEq: runtime_procedure = "string_le"; break;
  1896. case Token_GtEq: runtime_procedure = "string_gt"; break;
  1897. }
  1898. GB_ASSERT(runtime_procedure != nullptr);
  1899. auto args = array_make<lbValue>(permanent_allocator(), 2);
  1900. args[0] = left;
  1901. args[1] = right;
  1902. return lb_emit_runtime_call(p, runtime_procedure, args);
  1903. }
  1904. if (is_type_complex(a)) {
  1905. char const *runtime_procedure = "";
  1906. i64 sz = 8*type_size_of(a);
  1907. switch (sz) {
  1908. case 32:
  1909. switch (op_kind) {
  1910. case Token_CmpEq: runtime_procedure = "complex32_eq"; break;
  1911. case Token_NotEq: runtime_procedure = "complex32_ne"; break;
  1912. }
  1913. break;
  1914. case 64:
  1915. switch (op_kind) {
  1916. case Token_CmpEq: runtime_procedure = "complex64_eq"; break;
  1917. case Token_NotEq: runtime_procedure = "complex64_ne"; break;
  1918. }
  1919. break;
  1920. case 128:
  1921. switch (op_kind) {
  1922. case Token_CmpEq: runtime_procedure = "complex128_eq"; break;
  1923. case Token_NotEq: runtime_procedure = "complex128_ne"; break;
  1924. }
  1925. break;
  1926. }
  1927. GB_ASSERT(runtime_procedure != nullptr);
  1928. auto args = array_make<lbValue>(permanent_allocator(), 2);
  1929. args[0] = left;
  1930. args[1] = right;
  1931. return lb_emit_runtime_call(p, runtime_procedure, args);
  1932. }
  1933. if (is_type_quaternion(a)) {
  1934. char const *runtime_procedure = "";
  1935. i64 sz = 8*type_size_of(a);
  1936. switch (sz) {
  1937. case 64:
  1938. switch (op_kind) {
  1939. case Token_CmpEq: runtime_procedure = "quaternion64_eq"; break;
  1940. case Token_NotEq: runtime_procedure = "quaternion64_ne"; break;
  1941. }
  1942. break;
  1943. case 128:
  1944. switch (op_kind) {
  1945. case Token_CmpEq: runtime_procedure = "quaternion128_eq"; break;
  1946. case Token_NotEq: runtime_procedure = "quaternion128_ne"; break;
  1947. }
  1948. break;
  1949. case 256:
  1950. switch (op_kind) {
  1951. case Token_CmpEq: runtime_procedure = "quaternion256_eq"; break;
  1952. case Token_NotEq: runtime_procedure = "quaternion256_ne"; break;
  1953. }
  1954. break;
  1955. }
  1956. GB_ASSERT(runtime_procedure != nullptr);
  1957. auto args = array_make<lbValue>(permanent_allocator(), 2);
  1958. args[0] = left;
  1959. args[1] = right;
  1960. return lb_emit_runtime_call(p, runtime_procedure, args);
  1961. }
  1962. if (is_type_bit_set(a)) {
  1963. switch (op_kind) {
  1964. case Token_Lt:
  1965. case Token_LtEq:
  1966. case Token_Gt:
  1967. case Token_GtEq:
  1968. {
  1969. Type *it = bit_set_to_int(a);
  1970. lbValue lhs = lb_emit_transmute(p, left, it);
  1971. lbValue rhs = lb_emit_transmute(p, right, it);
  1972. lbValue res = lb_emit_arith(p, Token_And, lhs, rhs, it);
  1973. if (op_kind == Token_Lt || op_kind == Token_LtEq) {
  1974. // (lhs & rhs) == lhs
  1975. res.value = LLVMBuildICmp(p->builder, LLVMIntEQ, res.value, lhs.value, "");
  1976. res.type = t_llvm_bool;
  1977. } else if (op_kind == Token_Gt || op_kind == Token_GtEq) {
  1978. // (lhs & rhs) == rhs
  1979. res.value = LLVMBuildICmp(p->builder, LLVMIntEQ, res.value, rhs.value, "");
  1980. res.type = t_llvm_bool;
  1981. }
  1982. // NOTE(bill): Strict subsets
  1983. if (op_kind == Token_Lt || op_kind == Token_Gt) {
  1984. // res &~ (lhs == rhs)
  1985. lbValue eq = {};
  1986. eq.value = LLVMBuildICmp(p->builder, LLVMIntEQ, lhs.value, rhs.value, "");
  1987. eq.type = t_llvm_bool;
  1988. res = lb_emit_arith(p, Token_AndNot, res, eq, t_llvm_bool);
  1989. }
  1990. return res;
  1991. }
  1992. case Token_CmpEq:
  1993. case Token_NotEq:
  1994. {
  1995. LLVMIntPredicate pred = {};
  1996. switch (op_kind) {
  1997. case Token_CmpEq: pred = LLVMIntEQ; break;
  1998. case Token_NotEq: pred = LLVMIntNE; break;
  1999. }
  2000. lbValue res = {};
  2001. res.type = t_llvm_bool;
  2002. res.value = LLVMBuildICmp(p->builder, pred, left.value, right.value, "");
  2003. return res;
  2004. }
  2005. }
  2006. }
  2007. if (op_kind != Token_CmpEq && op_kind != Token_NotEq) {
  2008. Type *t = left.type;
  2009. if (is_type_integer(t) && is_type_different_to_arch_endianness(t)) {
  2010. Type *platform_type = integer_endian_type_to_platform_type(t);
  2011. lbValue x = lb_emit_byte_swap(p, left, platform_type);
  2012. lbValue y = lb_emit_byte_swap(p, right, platform_type);
  2013. left = x;
  2014. right = y;
  2015. } else if (is_type_float(t) && is_type_different_to_arch_endianness(t)) {
  2016. Type *platform_type = integer_endian_type_to_platform_type(t);
  2017. lbValue x = lb_emit_conv(p, left, platform_type);
  2018. lbValue y = lb_emit_conv(p, right, platform_type);
  2019. left = x;
  2020. right = y;
  2021. }
  2022. }
  2023. a = core_type(left.type);
  2024. b = core_type(right.type);
  2025. lbValue res = {};
  2026. res.type = t_llvm_bool;
  2027. if (is_type_integer(a) ||
  2028. is_type_boolean(a) ||
  2029. is_type_pointer(a) ||
  2030. is_type_multi_pointer(a) ||
  2031. is_type_proc(a) ||
  2032. is_type_enum(a)) {
  2033. LLVMIntPredicate pred = {};
  2034. if (is_type_unsigned(left.type)) {
  2035. switch (op_kind) {
  2036. case Token_Gt: pred = LLVMIntUGT; break;
  2037. case Token_GtEq: pred = LLVMIntUGE; break;
  2038. case Token_Lt: pred = LLVMIntULT; break;
  2039. case Token_LtEq: pred = LLVMIntULE; break;
  2040. }
  2041. } else {
  2042. switch (op_kind) {
  2043. case Token_Gt: pred = LLVMIntSGT; break;
  2044. case Token_GtEq: pred = LLVMIntSGE; break;
  2045. case Token_Lt: pred = LLVMIntSLT; break;
  2046. case Token_LtEq: pred = LLVMIntSLE; break;
  2047. }
  2048. }
  2049. switch (op_kind) {
  2050. case Token_CmpEq: pred = LLVMIntEQ; break;
  2051. case Token_NotEq: pred = LLVMIntNE; break;
  2052. }
  2053. LLVMValueRef lhs = left.value;
  2054. LLVMValueRef rhs = right.value;
  2055. if (LLVMTypeOf(lhs) != LLVMTypeOf(rhs)) {
  2056. if (lb_is_type_kind(LLVMTypeOf(lhs), LLVMPointerTypeKind)) {
  2057. rhs = LLVMBuildPointerCast(p->builder, rhs, LLVMTypeOf(lhs), "");
  2058. }
  2059. }
  2060. res.value = LLVMBuildICmp(p->builder, pred, lhs, rhs, "");
  2061. } else if (is_type_float(a)) {
  2062. LLVMRealPredicate pred = {};
  2063. switch (op_kind) {
  2064. case Token_CmpEq: pred = LLVMRealOEQ; break;
  2065. case Token_Gt: pred = LLVMRealOGT; break;
  2066. case Token_GtEq: pred = LLVMRealOGE; break;
  2067. case Token_Lt: pred = LLVMRealOLT; break;
  2068. case Token_LtEq: pred = LLVMRealOLE; break;
  2069. case Token_NotEq: pred = LLVMRealONE; break;
  2070. }
  2071. res.value = LLVMBuildFCmp(p->builder, pred, left.value, right.value, "");
  2072. } else if (is_type_typeid(a)) {
  2073. LLVMIntPredicate pred = {};
  2074. switch (op_kind) {
  2075. case Token_Gt: pred = LLVMIntUGT; break;
  2076. case Token_GtEq: pred = LLVMIntUGE; break;
  2077. case Token_Lt: pred = LLVMIntULT; break;
  2078. case Token_LtEq: pred = LLVMIntULE; break;
  2079. case Token_CmpEq: pred = LLVMIntEQ; break;
  2080. case Token_NotEq: pred = LLVMIntNE; break;
  2081. }
  2082. res.value = LLVMBuildICmp(p->builder, pred, left.value, right.value, "");
  2083. } else {
  2084. GB_PANIC("Unhandled comparison kind %s (%s) %.*s %s (%s)", type_to_string(left.type), type_to_string(base_type(left.type)), LIT(token_strings[op_kind]), type_to_string(right.type), type_to_string(base_type(right.type)));
  2085. }
  2086. return res;
  2087. }
  2088. lbValue lb_emit_comp_against_nil(lbProcedure *p, TokenKind op_kind, lbValue x) {
  2089. lbValue res = {};
  2090. res.type = t_llvm_bool;
  2091. Type *t = x.type;
  2092. Type *bt = base_type(t);
  2093. TypeKind type_kind = bt->kind;
  2094. switch (type_kind) {
  2095. case Type_Basic:
  2096. switch (bt->Basic.kind) {
  2097. case Basic_rawptr:
  2098. case Basic_cstring:
  2099. if (op_kind == Token_CmpEq) {
  2100. res.value = LLVMBuildIsNull(p->builder, x.value, "");
  2101. } else if (op_kind == Token_NotEq) {
  2102. res.value = LLVMBuildIsNotNull(p->builder, x.value, "");
  2103. }
  2104. return res;
  2105. case Basic_any:
  2106. {
  2107. // TODO(bill): is this correct behaviour for nil comparison for any?
  2108. lbValue data = lb_emit_struct_ev(p, x, 0);
  2109. lbValue ti = lb_emit_struct_ev(p, x, 1);
  2110. if (op_kind == Token_CmpEq) {
  2111. LLVMValueRef a = LLVMBuildIsNull(p->builder, data.value, "");
  2112. LLVMValueRef b = LLVMBuildIsNull(p->builder, ti.value, "");
  2113. res.value = LLVMBuildOr(p->builder, a, b, "");
  2114. return res;
  2115. } else if (op_kind == Token_NotEq) {
  2116. LLVMValueRef a = LLVMBuildIsNotNull(p->builder, data.value, "");
  2117. LLVMValueRef b = LLVMBuildIsNotNull(p->builder, ti.value, "");
  2118. res.value = LLVMBuildAnd(p->builder, a, b, "");
  2119. return res;
  2120. }
  2121. }
  2122. break;
  2123. case Basic_typeid:
  2124. lbValue invalid_typeid = lb_const_value(p->module, t_typeid, exact_value_i64(0));
  2125. return lb_emit_comp(p, op_kind, x, invalid_typeid);
  2126. }
  2127. break;
  2128. case Type_Enum:
  2129. case Type_Pointer:
  2130. case Type_MultiPointer:
  2131. case Type_Proc:
  2132. case Type_BitSet:
  2133. if (op_kind == Token_CmpEq) {
  2134. res.value = LLVMBuildIsNull(p->builder, x.value, "");
  2135. } else if (op_kind == Token_NotEq) {
  2136. res.value = LLVMBuildIsNotNull(p->builder, x.value, "");
  2137. }
  2138. return res;
  2139. case Type_Slice:
  2140. {
  2141. lbValue data = lb_emit_struct_ev(p, x, 0);
  2142. if (op_kind == Token_CmpEq) {
  2143. res.value = LLVMBuildIsNull(p->builder, data.value, "");
  2144. return res;
  2145. } else if (op_kind == Token_NotEq) {
  2146. res.value = LLVMBuildIsNotNull(p->builder, data.value, "");
  2147. return res;
  2148. }
  2149. }
  2150. break;
  2151. case Type_DynamicArray:
  2152. {
  2153. lbValue data = lb_emit_struct_ev(p, x, 0);
  2154. if (op_kind == Token_CmpEq) {
  2155. res.value = LLVMBuildIsNull(p->builder, data.value, "");
  2156. return res;
  2157. } else if (op_kind == Token_NotEq) {
  2158. res.value = LLVMBuildIsNotNull(p->builder, data.value, "");
  2159. return res;
  2160. }
  2161. }
  2162. break;
  2163. case Type_Map:
  2164. {
  2165. lbValue map_ptr = lb_address_from_load_or_generate_local(p, x);
  2166. unsigned indices[2] = {0, 0};
  2167. lbValue hashes_data = lb_emit_struct_ep(p, map_ptr, 0);
  2168. lbValue hashes_data_ptr_ptr = lb_emit_struct_ep(p, hashes_data, 0);
  2169. LLVMValueRef hashes_data_ptr = LLVMBuildLoad(p->builder, hashes_data_ptr_ptr.value, "");
  2170. if (op_kind == Token_CmpEq) {
  2171. res.value = LLVMBuildIsNull(p->builder, hashes_data_ptr, "");
  2172. return res;
  2173. } else {
  2174. res.value = LLVMBuildIsNotNull(p->builder, hashes_data_ptr, "");
  2175. return res;
  2176. }
  2177. }
  2178. break;
  2179. case Type_Union:
  2180. {
  2181. if (type_size_of(t) == 0) {
  2182. if (op_kind == Token_CmpEq) {
  2183. return lb_const_bool(p->module, t_llvm_bool, true);
  2184. } else if (op_kind == Token_NotEq) {
  2185. return lb_const_bool(p->module, t_llvm_bool, false);
  2186. }
  2187. } else if (is_type_union_maybe_pointer(t)) {
  2188. lbValue tag = lb_emit_transmute(p, x, t_rawptr);
  2189. return lb_emit_comp_against_nil(p, op_kind, tag);
  2190. } else {
  2191. lbValue tag = lb_emit_union_tag_value(p, x);
  2192. return lb_emit_comp(p, op_kind, tag, lb_zero(p->module, tag.type));
  2193. }
  2194. }
  2195. case Type_Struct:
  2196. if (is_type_soa_struct(t)) {
  2197. Type *bt = base_type(t);
  2198. if (bt->Struct.soa_kind == StructSoa_Slice) {
  2199. LLVMValueRef the_value = {};
  2200. if (bt->Struct.fields.count == 0) {
  2201. lbValue len = lb_soa_struct_len(p, x);
  2202. the_value = len.value;
  2203. } else {
  2204. lbValue first_field = lb_emit_struct_ev(p, x, 0);
  2205. the_value = first_field.value;
  2206. }
  2207. if (op_kind == Token_CmpEq) {
  2208. res.value = LLVMBuildIsNull(p->builder, the_value, "");
  2209. return res;
  2210. } else if (op_kind == Token_NotEq) {
  2211. res.value = LLVMBuildIsNotNull(p->builder, the_value, "");
  2212. return res;
  2213. }
  2214. } else if (bt->Struct.soa_kind == StructSoa_Dynamic) {
  2215. LLVMValueRef the_value = {};
  2216. if (bt->Struct.fields.count == 0) {
  2217. lbValue cap = lb_soa_struct_cap(p, x);
  2218. the_value = cap.value;
  2219. } else {
  2220. lbValue first_field = lb_emit_struct_ev(p, x, 0);
  2221. the_value = first_field.value;
  2222. }
  2223. if (op_kind == Token_CmpEq) {
  2224. res.value = LLVMBuildIsNull(p->builder, the_value, "");
  2225. return res;
  2226. } else if (op_kind == Token_NotEq) {
  2227. res.value = LLVMBuildIsNotNull(p->builder, the_value, "");
  2228. return res;
  2229. }
  2230. }
  2231. } else if (is_type_struct(t) && type_has_nil(t)) {
  2232. auto args = array_make<lbValue>(permanent_allocator(), 2);
  2233. lbValue lhs = lb_address_from_load_or_generate_local(p, x);
  2234. args[0] = lb_emit_conv(p, lhs, t_rawptr);
  2235. args[1] = lb_const_int(p->module, t_int, type_size_of(t));
  2236. lbValue val = lb_emit_runtime_call(p, "memory_compare_zero", args);
  2237. lbValue res = lb_emit_comp(p, op_kind, val, lb_const_int(p->module, t_int, 0));
  2238. return res;
  2239. }
  2240. break;
  2241. }
  2242. GB_PANIC("Unknown handled type: %s -> %s", type_to_string(t), type_to_string(bt));
  2243. return {};
  2244. }
  2245. lbValue lb_build_unary_and(lbProcedure *p, Ast *expr) {
  2246. ast_node(ue, UnaryExpr, expr);
  2247. auto tv = type_and_value_of_expr(expr);
  2248. Ast *ue_expr = unparen_expr(ue->expr);
  2249. if (ue_expr->kind == Ast_IndexExpr && tv.mode == Addressing_OptionalOkPtr && is_type_tuple(tv.type)) {
  2250. Type *tuple = tv.type;
  2251. Type *map_type = type_of_expr(ue_expr->IndexExpr.expr);
  2252. Type *ot = base_type(map_type);
  2253. Type *t = base_type(type_deref(ot));
  2254. bool deref = t != ot;
  2255. GB_ASSERT(t->kind == Type_Map);
  2256. ast_node(ie, IndexExpr, ue_expr);
  2257. lbValue map_val = lb_build_addr_ptr(p, ie->expr);
  2258. if (deref) {
  2259. map_val = lb_emit_load(p, map_val);
  2260. }
  2261. lbValue key = lb_build_expr(p, ie->index);
  2262. key = lb_emit_conv(p, key, t->Map.key);
  2263. lbAddr addr = lb_addr_map(map_val, key, t, alloc_type_pointer(t->Map.value));
  2264. lbValue ptr = lb_addr_get_ptr(p, addr);
  2265. lbValue ok = lb_emit_comp_against_nil(p, Token_NotEq, ptr);
  2266. ok = lb_emit_conv(p, ok, tuple->Tuple.variables[1]->type);
  2267. lbAddr res = lb_add_local_generated(p, tuple, false);
  2268. lbValue gep0 = lb_emit_struct_ep(p, res.addr, 0);
  2269. lbValue gep1 = lb_emit_struct_ep(p, res.addr, 1);
  2270. lb_emit_store(p, gep0, ptr);
  2271. lb_emit_store(p, gep1, ok);
  2272. return lb_addr_load(p, res);
  2273. } if (ue_expr->kind == Ast_CompoundLit) {
  2274. lbValue v = lb_build_expr(p, ue->expr);
  2275. Type *type = v.type;
  2276. lbAddr addr = {};
  2277. if (p->is_startup) {
  2278. addr = lb_add_global_generated(p->module, type, v);
  2279. } else {
  2280. addr = lb_add_local_generated(p, type, false);
  2281. }
  2282. lb_addr_store(p, addr, v);
  2283. return addr.addr;
  2284. } else if (ue_expr->kind == Ast_TypeAssertion) {
  2285. if (is_type_tuple(tv.type)) {
  2286. Type *tuple = tv.type;
  2287. Type *ptr_type = tuple->Tuple.variables[0]->type;
  2288. Type *ok_type = tuple->Tuple.variables[1]->type;
  2289. ast_node(ta, TypeAssertion, ue_expr);
  2290. TokenPos pos = ast_token(expr).pos;
  2291. Type *type = type_of_expr(ue_expr);
  2292. GB_ASSERT(!is_type_tuple(type));
  2293. lbValue e = lb_build_expr(p, ta->expr);
  2294. Type *t = type_deref(e.type);
  2295. if (is_type_union(t)) {
  2296. lbValue v = e;
  2297. if (!is_type_pointer(v.type)) {
  2298. v = lb_address_from_load_or_generate_local(p, v);
  2299. }
  2300. Type *src_type = type_deref(v.type);
  2301. Type *dst_type = type;
  2302. lbValue src_tag = {};
  2303. lbValue dst_tag = {};
  2304. if (is_type_union_maybe_pointer(src_type)) {
  2305. src_tag = lb_emit_comp_against_nil(p, Token_NotEq, v);
  2306. dst_tag = lb_const_bool(p->module, t_bool, true);
  2307. } else {
  2308. src_tag = lb_emit_load(p, lb_emit_union_tag_ptr(p, v));
  2309. dst_tag = lb_const_union_tag(p->module, src_type, dst_type);
  2310. }
  2311. lbValue ok = lb_emit_comp(p, Token_CmpEq, src_tag, dst_tag);
  2312. lbValue data_ptr = lb_emit_conv(p, v, ptr_type);
  2313. lbAddr res = lb_add_local_generated(p, tuple, true);
  2314. lbValue gep0 = lb_emit_struct_ep(p, res.addr, 0);
  2315. lbValue gep1 = lb_emit_struct_ep(p, res.addr, 1);
  2316. lb_emit_store(p, gep0, lb_emit_select(p, ok, data_ptr, lb_const_nil(p->module, ptr_type)));
  2317. lb_emit_store(p, gep1, lb_emit_conv(p, ok, ok_type));
  2318. return lb_addr_load(p, res);
  2319. } else if (is_type_any(t)) {
  2320. lbValue v = e;
  2321. if (is_type_pointer(v.type)) {
  2322. v = lb_emit_load(p, v);
  2323. }
  2324. lbValue data_ptr = lb_emit_conv(p, lb_emit_struct_ev(p, v, 0), ptr_type);
  2325. lbValue any_id = lb_emit_struct_ev(p, v, 1);
  2326. lbValue id = lb_typeid(p->module, type);
  2327. lbValue ok = lb_emit_comp(p, Token_CmpEq, any_id, id);
  2328. lbAddr res = lb_add_local_generated(p, tuple, false);
  2329. lbValue gep0 = lb_emit_struct_ep(p, res.addr, 0);
  2330. lbValue gep1 = lb_emit_struct_ep(p, res.addr, 1);
  2331. lb_emit_store(p, gep0, lb_emit_select(p, ok, data_ptr, lb_const_nil(p->module, ptr_type)));
  2332. lb_emit_store(p, gep1, lb_emit_conv(p, ok, ok_type));
  2333. return lb_addr_load(p, res);
  2334. } else {
  2335. GB_PANIC("TODO(bill): type assertion %s", type_to_string(type));
  2336. }
  2337. } else {
  2338. GB_ASSERT(is_type_pointer(tv.type));
  2339. ast_node(ta, TypeAssertion, ue_expr);
  2340. TokenPos pos = ast_token(expr).pos;
  2341. Type *type = type_of_expr(ue_expr);
  2342. GB_ASSERT(!is_type_tuple(type));
  2343. lbValue e = lb_build_expr(p, ta->expr);
  2344. Type *t = type_deref(e.type);
  2345. if (is_type_union(t)) {
  2346. lbValue v = e;
  2347. if (!is_type_pointer(v.type)) {
  2348. v = lb_address_from_load_or_generate_local(p, v);
  2349. }
  2350. Type *src_type = type_deref(v.type);
  2351. Type *dst_type = type;
  2352. if ((p->state_flags & StateFlag_no_type_assert) == 0) {
  2353. lbValue src_tag = {};
  2354. lbValue dst_tag = {};
  2355. if (is_type_union_maybe_pointer(src_type)) {
  2356. src_tag = lb_emit_comp_against_nil(p, Token_NotEq, v);
  2357. dst_tag = lb_const_bool(p->module, t_bool, true);
  2358. } else {
  2359. src_tag = lb_emit_load(p, lb_emit_union_tag_ptr(p, v));
  2360. dst_tag = lb_const_union_tag(p->module, src_type, dst_type);
  2361. }
  2362. lbValue ok = lb_emit_comp(p, Token_CmpEq, src_tag, dst_tag);
  2363. auto args = array_make<lbValue>(permanent_allocator(), 6);
  2364. args[0] = ok;
  2365. args[1] = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id));
  2366. args[2] = lb_const_int(p->module, t_i32, pos.line);
  2367. args[3] = lb_const_int(p->module, t_i32, pos.column);
  2368. args[4] = lb_typeid(p->module, src_type);
  2369. args[5] = lb_typeid(p->module, dst_type);
  2370. lb_emit_runtime_call(p, "type_assertion_check", args);
  2371. }
  2372. lbValue data_ptr = v;
  2373. return lb_emit_conv(p, data_ptr, tv.type);
  2374. } else if (is_type_any(t)) {
  2375. lbValue v = e;
  2376. if (is_type_pointer(v.type)) {
  2377. v = lb_emit_load(p, v);
  2378. }
  2379. lbValue data_ptr = lb_emit_struct_ev(p, v, 0);
  2380. if ((p->state_flags & StateFlag_no_type_assert) == 0) {
  2381. lbValue any_id = lb_emit_struct_ev(p, v, 1);
  2382. lbValue id = lb_typeid(p->module, type);
  2383. lbValue ok = lb_emit_comp(p, Token_CmpEq, any_id, id);
  2384. auto args = array_make<lbValue>(permanent_allocator(), 6);
  2385. args[0] = ok;
  2386. args[1] = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id));
  2387. args[2] = lb_const_int(p->module, t_i32, pos.line);
  2388. args[3] = lb_const_int(p->module, t_i32, pos.column);
  2389. args[4] = any_id;
  2390. args[5] = id;
  2391. lb_emit_runtime_call(p, "type_assertion_check", args);
  2392. }
  2393. return lb_emit_conv(p, data_ptr, tv.type);
  2394. } else {
  2395. GB_PANIC("TODO(bill): type assertion %s", type_to_string(type));
  2396. }
  2397. }
  2398. }
  2399. return lb_build_addr_ptr(p, ue->expr);
  2400. }
  2401. lbValue lb_build_expr(lbProcedure *p, Ast *expr) {
  2402. lbModule *m = p->module;
  2403. u16 prev_state_flags = p->state_flags;
  2404. defer (p->state_flags = prev_state_flags);
  2405. if (expr->state_flags != 0) {
  2406. u16 in = expr->state_flags;
  2407. u16 out = p->state_flags;
  2408. if (in & StateFlag_bounds_check) {
  2409. out |= StateFlag_bounds_check;
  2410. out &= ~StateFlag_no_bounds_check;
  2411. } else if (in & StateFlag_no_bounds_check) {
  2412. out |= StateFlag_no_bounds_check;
  2413. out &= ~StateFlag_bounds_check;
  2414. }
  2415. if (in & StateFlag_type_assert) {
  2416. out |= StateFlag_type_assert;
  2417. out &= ~StateFlag_no_type_assert;
  2418. } else if (in & StateFlag_no_type_assert) {
  2419. out |= StateFlag_no_type_assert;
  2420. out &= ~StateFlag_type_assert;
  2421. }
  2422. p->state_flags = out;
  2423. }
  2424. expr = unparen_expr(expr);
  2425. TokenPos expr_pos = ast_token(expr).pos;
  2426. TypeAndValue tv = type_and_value_of_expr(expr);
  2427. Type *type = type_of_expr(expr);
  2428. GB_ASSERT_MSG(tv.mode != Addressing_Invalid, "invalid expression '%s' (tv.mode = %d, tv.type = %s) @ %s\n Current Proc: %.*s : %s", expr_to_string(expr), tv.mode, type_to_string(tv.type), token_pos_to_string(expr_pos), LIT(p->name), type_to_string(p->type));
  2429. if (tv.value.kind != ExactValue_Invalid) {
  2430. // NOTE(bill): The commented out code below is just for debug purposes only
  2431. // if (is_type_untyped(type)) {
  2432. // gb_printf_err("%s %s : %s @ %p\n", token_pos_to_string(expr_pos), expr_to_string(expr), type_to_string(expr->tav.type), expr);
  2433. // GB_PANIC("%s\n", type_to_string(tv.type));
  2434. // }
  2435. // NOTE(bill): Short on constant values
  2436. return lb_const_value(p->module, type, tv.value);
  2437. }
  2438. #if 0
  2439. LLVMMetadataRef prev_debug_location = nullptr;
  2440. if (p->debug_info != nullptr) {
  2441. prev_debug_location = LLVMGetCurrentDebugLocation2(p->builder);
  2442. LLVMSetCurrentDebugLocation2(p->builder, lb_debug_location_from_ast(p, expr));
  2443. }
  2444. defer (if (prev_debug_location != nullptr) {
  2445. LLVMSetCurrentDebugLocation2(p->builder, prev_debug_location);
  2446. });
  2447. #endif
  2448. switch (expr->kind) {
  2449. case_ast_node(bl, BasicLit, expr);
  2450. TokenPos pos = bl->token.pos;
  2451. GB_PANIC("Non-constant basic literal %s - %.*s", token_pos_to_string(pos), LIT(token_strings[bl->token.kind]));
  2452. case_end;
  2453. case_ast_node(bd, BasicDirective, expr);
  2454. TokenPos pos = bd->token.pos;
  2455. GB_PANIC("Non-constant basic literal %s - %.*s", token_pos_to_string(pos), LIT(bd->name.string));
  2456. case_end;
  2457. case_ast_node(i, Implicit, expr);
  2458. return lb_addr_load(p, lb_build_addr(p, expr));
  2459. case_end;
  2460. case_ast_node(u, Undef, expr)
  2461. lbValue res = {};
  2462. if (is_type_untyped(type)) {
  2463. res.value = nullptr;
  2464. res.type = t_untyped_undef;
  2465. } else {
  2466. res.value = LLVMGetUndef(lb_type(m, type));
  2467. res.type = type;
  2468. }
  2469. return res;
  2470. case_end;
  2471. case_ast_node(i, Ident, expr);
  2472. Entity *e = entity_from_expr(expr);
  2473. e = strip_entity_wrapping(e);
  2474. GB_ASSERT_MSG(e != nullptr, "%s", expr_to_string(expr));
  2475. if (e->kind == Entity_Builtin) {
  2476. Token token = ast_token(expr);
  2477. GB_PANIC("TODO(bill): lb_build_expr Entity_Builtin '%.*s'\n"
  2478. "\t at %s", LIT(builtin_procs[e->Builtin.id].name),
  2479. token_pos_to_string(token.pos));
  2480. return {};
  2481. } else if (e->kind == Entity_Nil) {
  2482. lbValue res = {};
  2483. res.value = nullptr;
  2484. res.type = e->type;
  2485. return res;
  2486. }
  2487. GB_ASSERT(e->kind != Entity_ProcGroup);
  2488. return lb_find_ident(p, m, e, expr);
  2489. case_end;
  2490. case_ast_node(de, DerefExpr, expr);
  2491. return lb_addr_load(p, lb_build_addr(p, expr));
  2492. case_end;
  2493. case_ast_node(se, SelectorExpr, expr);
  2494. TypeAndValue tav = type_and_value_of_expr(expr);
  2495. GB_ASSERT(tav.mode != Addressing_Invalid);
  2496. return lb_addr_load(p, lb_build_addr(p, expr));
  2497. case_end;
  2498. case_ast_node(ise, ImplicitSelectorExpr, expr);
  2499. TypeAndValue tav = type_and_value_of_expr(expr);
  2500. GB_ASSERT(tav.mode == Addressing_Constant);
  2501. return lb_const_value(p->module, type, tv.value);
  2502. case_end;
  2503. case_ast_node(se, SelectorCallExpr, expr);
  2504. GB_ASSERT(se->modified_call);
  2505. TypeAndValue tav = type_and_value_of_expr(expr);
  2506. GB_ASSERT(tav.mode != Addressing_Invalid);
  2507. lbValue res = lb_build_call_expr(p, se->call);
  2508. ast_node(ce, CallExpr, se->call);
  2509. ce->sce_temp_data = gb_alloc_copy(permanent_allocator(), &res, gb_size_of(res));
  2510. return res;
  2511. case_end;
  2512. case_ast_node(te, TernaryIfExpr, expr);
  2513. LLVMValueRef incoming_values[2] = {};
  2514. LLVMBasicBlockRef incoming_blocks[2] = {};
  2515. GB_ASSERT(te->y != nullptr);
  2516. lbBlock *then = lb_create_block(p, "if.then");
  2517. lbBlock *done = lb_create_block(p, "if.done"); // NOTE(bill): Append later
  2518. lbBlock *else_ = lb_create_block(p, "if.else");
  2519. lbValue cond = lb_build_cond(p, te->cond, then, else_);
  2520. lb_start_block(p, then);
  2521. Type *type = default_type(type_of_expr(expr));
  2522. incoming_values[0] = lb_emit_conv(p, lb_build_expr(p, te->x), type).value;
  2523. lb_emit_jump(p, done);
  2524. lb_start_block(p, else_);
  2525. incoming_values[1] = lb_emit_conv(p, lb_build_expr(p, te->y), type).value;
  2526. lb_emit_jump(p, done);
  2527. lb_start_block(p, done);
  2528. lbValue res = {};
  2529. res.value = LLVMBuildPhi(p->builder, lb_type(p->module, type), "");
  2530. res.type = type;
  2531. GB_ASSERT(p->curr_block->preds.count >= 2);
  2532. incoming_blocks[0] = p->curr_block->preds[0]->block;
  2533. incoming_blocks[1] = p->curr_block->preds[1]->block;
  2534. LLVMAddIncoming(res.value, incoming_values, incoming_blocks, 2);
  2535. return res;
  2536. case_end;
  2537. case_ast_node(te, TernaryWhenExpr, expr);
  2538. TypeAndValue tav = type_and_value_of_expr(te->cond);
  2539. GB_ASSERT(tav.mode == Addressing_Constant);
  2540. GB_ASSERT(tav.value.kind == ExactValue_Bool);
  2541. if (tav.value.value_bool) {
  2542. return lb_build_expr(p, te->x);
  2543. } else {
  2544. return lb_build_expr(p, te->y);
  2545. }
  2546. case_end;
  2547. case_ast_node(oe, OrElseExpr, expr);
  2548. return lb_emit_or_else(p, oe->x, oe->y, tv);
  2549. case_end;
  2550. case_ast_node(oe, OrReturnExpr, expr);
  2551. return lb_emit_or_return(p, oe->expr, tv);
  2552. case_end;
  2553. case_ast_node(ta, TypeAssertion, expr);
  2554. TokenPos pos = ast_token(expr).pos;
  2555. lbValue e = lb_build_expr(p, ta->expr);
  2556. Type *t = type_deref(e.type);
  2557. if (is_type_union(t)) {
  2558. if (ta->ignores[0]) {
  2559. // NOTE(bill): This is not needed for optimization levels other than 0
  2560. return lb_emit_union_cast_only_ok_check(p, e, type, pos);
  2561. }
  2562. return lb_emit_union_cast(p, e, type, pos);
  2563. } else if (is_type_any(t)) {
  2564. return lb_emit_any_cast(p, e, type, pos);
  2565. } else {
  2566. GB_PANIC("TODO(bill): type assertion %s", type_to_string(e.type));
  2567. }
  2568. case_end;
  2569. case_ast_node(tc, TypeCast, expr);
  2570. lbValue e = lb_build_expr(p, tc->expr);
  2571. switch (tc->token.kind) {
  2572. case Token_cast:
  2573. return lb_emit_conv(p, e, type);
  2574. case Token_transmute:
  2575. return lb_emit_transmute(p, e, type);
  2576. }
  2577. GB_PANIC("Invalid AST TypeCast");
  2578. case_end;
  2579. case_ast_node(ac, AutoCast, expr);
  2580. lbValue value = lb_build_expr(p, ac->expr);
  2581. return lb_emit_conv(p, value, type);
  2582. case_end;
  2583. case_ast_node(ue, UnaryExpr, expr);
  2584. switch (ue->op.kind) {
  2585. case Token_And:
  2586. return lb_build_unary_and(p, expr);
  2587. default:
  2588. {
  2589. lbValue v = lb_build_expr(p, ue->expr);
  2590. return lb_emit_unary_arith(p, ue->op.kind, v, type);
  2591. }
  2592. }
  2593. case_end;
  2594. case_ast_node(be, BinaryExpr, expr);
  2595. return lb_build_binary_expr(p, expr);
  2596. case_end;
  2597. case_ast_node(pl, ProcLit, expr);
  2598. return lb_generate_anonymous_proc_lit(p->module, p->name, expr, p);
  2599. case_end;
  2600. case_ast_node(cl, CompoundLit, expr);
  2601. return lb_addr_load(p, lb_build_addr(p, expr));
  2602. case_end;
  2603. case_ast_node(ce, CallExpr, expr);
  2604. return lb_build_call_expr(p, expr);
  2605. case_end;
  2606. case_ast_node(se, SliceExpr, expr);
  2607. if (is_type_slice(type_of_expr(se->expr))) {
  2608. // NOTE(bill): Quick optimization
  2609. if (se->high == nullptr &&
  2610. (se->low == nullptr || lb_is_expr_constant_zero(se->low))) {
  2611. return lb_build_expr(p, se->expr);
  2612. }
  2613. }
  2614. return lb_addr_load(p, lb_build_addr(p, expr));
  2615. case_end;
  2616. case_ast_node(ie, IndexExpr, expr);
  2617. return lb_addr_load(p, lb_build_addr(p, expr));
  2618. case_end;
  2619. case_ast_node(ie, MatrixIndexExpr, expr);
  2620. return lb_addr_load(p, lb_build_addr(p, expr));
  2621. case_end;
  2622. case_ast_node(ia, InlineAsmExpr, expr);
  2623. Type *t = type_of_expr(expr);
  2624. GB_ASSERT(is_type_asm_proc(t));
  2625. String asm_string = {};
  2626. String constraints_string = {};
  2627. TypeAndValue tav;
  2628. tav = type_and_value_of_expr(ia->asm_string);
  2629. GB_ASSERT(is_type_string(tav.type));
  2630. GB_ASSERT(tav.value.kind == ExactValue_String);
  2631. asm_string = tav.value.value_string;
  2632. tav = type_and_value_of_expr(ia->constraints_string);
  2633. GB_ASSERT(is_type_string(tav.type));
  2634. GB_ASSERT(tav.value.kind == ExactValue_String);
  2635. constraints_string = tav.value.value_string;
  2636. LLVMInlineAsmDialect dialect = LLVMInlineAsmDialectATT;
  2637. switch (ia->dialect) {
  2638. case InlineAsmDialect_Default: dialect = LLVMInlineAsmDialectATT; break;
  2639. case InlineAsmDialect_ATT: dialect = LLVMInlineAsmDialectATT; break;
  2640. case InlineAsmDialect_Intel: dialect = LLVMInlineAsmDialectIntel; break;
  2641. default: GB_PANIC("Unhandled inline asm dialect"); break;
  2642. }
  2643. LLVMTypeRef func_type = LLVMGetElementType(lb_type(p->module, t));
  2644. LLVMValueRef the_asm = llvm_get_inline_asm(func_type, asm_string, constraints_string, ia->has_side_effects, ia->has_side_effects, dialect);
  2645. GB_ASSERT(the_asm != nullptr);
  2646. return {the_asm, t};
  2647. case_end;
  2648. }
  2649. GB_PANIC("lb_build_expr: %.*s", LIT(ast_strings[expr->kind]));
  2650. return {};
  2651. }
  2652. lbAddr lb_get_soa_variable_addr(lbProcedure *p, Entity *e) {
  2653. return map_must_get(&p->module->soa_values, e);
  2654. }
  2655. lbValue lb_get_using_variable(lbProcedure *p, Entity *e) {
  2656. GB_ASSERT(e->kind == Entity_Variable && e->flags & EntityFlag_Using);
  2657. String name = e->token.string;
  2658. Entity *parent = e->using_parent;
  2659. Selection sel = lookup_field(parent->type, name, false);
  2660. GB_ASSERT(sel.entity != nullptr);
  2661. lbValue *pv = map_get(&p->module->values, parent);
  2662. lbValue v = {};
  2663. if (pv == nullptr && parent->flags & EntityFlag_SoaPtrField) {
  2664. // NOTE(bill): using SOA value (probably from for-in statement)
  2665. lbAddr parent_addr = lb_get_soa_variable_addr(p, parent);
  2666. v = lb_addr_get_ptr(p, parent_addr);
  2667. } else if (pv != nullptr) {
  2668. v = *pv;
  2669. } else {
  2670. GB_ASSERT_MSG(e->using_expr != nullptr, "%.*s", LIT(name));
  2671. v = lb_build_addr_ptr(p, e->using_expr);
  2672. }
  2673. GB_ASSERT(v.value != nullptr);
  2674. GB_ASSERT_MSG(parent->type == type_deref(v.type), "%s %s", type_to_string(parent->type), type_to_string(v.type));
  2675. lbValue ptr = lb_emit_deep_field_gep(p, v, sel);
  2676. if (parent->scope) {
  2677. if ((parent->scope->flags & (ScopeFlag_File|ScopeFlag_Pkg)) == 0) {
  2678. lb_add_debug_local_variable(p, ptr.value, e->type, e->token);
  2679. }
  2680. } else {
  2681. lb_add_debug_local_variable(p, ptr.value, e->type, e->token);
  2682. }
  2683. return ptr;
  2684. }
  2685. lbAddr lb_build_addr_from_entity(lbProcedure *p, Entity *e, Ast *expr) {
  2686. GB_ASSERT(e != nullptr);
  2687. if (e->kind == Entity_Constant) {
  2688. Type *t = default_type(type_of_expr(expr));
  2689. lbValue v = lb_const_value(p->module, t, e->Constant.value);
  2690. lbAddr g = lb_add_global_generated(p->module, t, v);
  2691. return g;
  2692. }
  2693. lbValue v = {};
  2694. lbValue *found = map_get(&p->module->values, e);
  2695. if (found) {
  2696. v = *found;
  2697. } else if (e->kind == Entity_Variable && e->flags & EntityFlag_Using) {
  2698. // NOTE(bill): Calculate the using variable every time
  2699. v = lb_get_using_variable(p, e);
  2700. } else if (e->flags & EntityFlag_SoaPtrField) {
  2701. return lb_get_soa_variable_addr(p, e);
  2702. }
  2703. if (v.value == nullptr) {
  2704. return lb_addr(lb_find_value_from_entity(p->module, e));
  2705. // error(expr, "%.*s Unknown value: %.*s, entity: %p %.*s",
  2706. // LIT(p->name),
  2707. // LIT(e->token.string), e, LIT(entity_strings[e->kind]));
  2708. // GB_PANIC("Unknown value");
  2709. }
  2710. return lb_addr(v);
  2711. }
  2712. lbAddr lb_build_array_swizzle_addr(lbProcedure *p, AstCallExpr *ce, TypeAndValue const &tv) {
  2713. isize index_count = ce->args.count-1;
  2714. lbAddr addr = lb_build_addr(p, ce->args[0]);
  2715. if (index_count == 0) {
  2716. return addr;
  2717. }
  2718. Type *type = base_type(lb_addr_type(addr));
  2719. GB_ASSERT(type->kind == Type_Array);
  2720. i64 count = type->Array.count;
  2721. if (count <= 4) {
  2722. u8 indices[4] = {};
  2723. u8 index_count = 0;
  2724. for (i32 i = 1; i < ce->args.count; i++) {
  2725. TypeAndValue tv = type_and_value_of_expr(ce->args[i]);
  2726. GB_ASSERT(is_type_integer(tv.type));
  2727. GB_ASSERT(tv.value.kind == ExactValue_Integer);
  2728. i64 src_index = big_int_to_i64(&tv.value.value_integer);
  2729. indices[index_count++] = cast(u8)src_index;
  2730. }
  2731. return lb_addr_swizzle(lb_addr_get_ptr(p, addr), tv.type, index_count, indices);
  2732. }
  2733. auto indices = slice_make<i32>(permanent_allocator(), ce->args.count-1);
  2734. isize index_index = 0;
  2735. for (i32 i = 1; i < ce->args.count; i++) {
  2736. TypeAndValue tv = type_and_value_of_expr(ce->args[i]);
  2737. GB_ASSERT(is_type_integer(tv.type));
  2738. GB_ASSERT(tv.value.kind == ExactValue_Integer);
  2739. i64 src_index = big_int_to_i64(&tv.value.value_integer);
  2740. indices[index_index++] = cast(i32)src_index;
  2741. }
  2742. return lb_addr_swizzle_large(lb_addr_get_ptr(p, addr), tv.type, indices);
  2743. }
  2744. lbAddr lb_build_addr(lbProcedure *p, Ast *expr) {
  2745. expr = unparen_expr(expr);
  2746. switch (expr->kind) {
  2747. case_ast_node(i, Implicit, expr);
  2748. lbAddr v = {};
  2749. switch (i->kind) {
  2750. case Token_context:
  2751. v = lb_find_or_generate_context_ptr(p);
  2752. break;
  2753. }
  2754. GB_ASSERT(v.addr.value != nullptr);
  2755. return v;
  2756. case_end;
  2757. case_ast_node(i, Ident, expr);
  2758. if (is_blank_ident(expr)) {
  2759. lbAddr val = {};
  2760. return val;
  2761. }
  2762. String name = i->token.string;
  2763. Entity *e = entity_of_node(expr);
  2764. return lb_build_addr_from_entity(p, e, expr);
  2765. case_end;
  2766. case_ast_node(se, SelectorExpr, expr);
  2767. Ast *sel = unparen_expr(se->selector);
  2768. if (sel->kind == Ast_Ident) {
  2769. String selector = sel->Ident.token.string;
  2770. TypeAndValue tav = type_and_value_of_expr(se->expr);
  2771. if (tav.mode == Addressing_Invalid) {
  2772. // NOTE(bill): Imports
  2773. Entity *imp = entity_of_node(se->expr);
  2774. if (imp != nullptr) {
  2775. GB_ASSERT(imp->kind == Entity_ImportName);
  2776. }
  2777. return lb_build_addr(p, unparen_expr(se->selector));
  2778. }
  2779. Type *type = base_type(tav.type);
  2780. if (tav.mode == Addressing_Type) { // Addressing_Type
  2781. GB_PANIC("Unreachable");
  2782. }
  2783. if (se->swizzle_count > 0) {
  2784. Type *array_type = base_type(type_deref(tav.type));
  2785. GB_ASSERT(array_type->kind == Type_Array);
  2786. u8 swizzle_count = se->swizzle_count;
  2787. u8 swizzle_indices_raw = se->swizzle_indices;
  2788. u8 swizzle_indices[4] = {};
  2789. for (u8 i = 0; i < swizzle_count; i++) {
  2790. u8 index = swizzle_indices_raw>>(i*2) & 3;
  2791. swizzle_indices[i] = index;
  2792. }
  2793. lbValue a = {};
  2794. if (is_type_pointer(tav.type)) {
  2795. a = lb_build_expr(p, se->expr);
  2796. } else {
  2797. lbAddr addr = lb_build_addr(p, se->expr);
  2798. a = lb_addr_get_ptr(p, addr);
  2799. }
  2800. GB_ASSERT(is_type_array(expr->tav.type));
  2801. return lb_addr_swizzle(a, expr->tav.type, swizzle_count, swizzle_indices);
  2802. }
  2803. Selection sel = lookup_field(type, selector, false);
  2804. GB_ASSERT(sel.entity != nullptr);
  2805. {
  2806. lbAddr addr = lb_build_addr(p, se->expr);
  2807. if (addr.kind == lbAddr_Map) {
  2808. lbValue v = lb_addr_load(p, addr);
  2809. lbValue a = lb_address_from_load_or_generate_local(p, v);
  2810. a = lb_emit_deep_field_gep(p, a, sel);
  2811. return lb_addr(a);
  2812. } else if (addr.kind == lbAddr_Context) {
  2813. GB_ASSERT(sel.index.count > 0);
  2814. if (addr.ctx.sel.index.count >= 0) {
  2815. sel = selection_combine(addr.ctx.sel, sel);
  2816. }
  2817. addr.ctx.sel = sel;
  2818. addr.kind = lbAddr_Context;
  2819. return addr;
  2820. } else if (addr.kind == lbAddr_SoaVariable) {
  2821. lbValue index = addr.soa.index;
  2822. i32 first_index = sel.index[0];
  2823. Selection sub_sel = sel;
  2824. sub_sel.index.data += 1;
  2825. sub_sel.index.count -= 1;
  2826. lbValue arr = lb_emit_struct_ep(p, addr.addr, first_index);
  2827. Type *t = base_type(type_deref(addr.addr.type));
  2828. GB_ASSERT(is_type_soa_struct(t));
  2829. if (addr.soa.index_expr != nullptr && (!lb_is_const(addr.soa.index) || t->Struct.soa_kind != StructSoa_Fixed)) {
  2830. lbValue len = lb_soa_struct_len(p, addr.addr);
  2831. lb_emit_bounds_check(p, ast_token(addr.soa.index_expr), addr.soa.index, len);
  2832. }
  2833. lbValue item = {};
  2834. if (t->Struct.soa_kind == StructSoa_Fixed) {
  2835. item = lb_emit_array_ep(p, arr, index);
  2836. } else {
  2837. item = lb_emit_ptr_offset(p, lb_emit_load(p, arr), index);
  2838. }
  2839. if (sub_sel.index.count > 0) {
  2840. item = lb_emit_deep_field_gep(p, item, sub_sel);
  2841. }
  2842. return lb_addr(item);
  2843. } else if (addr.kind == lbAddr_Swizzle) {
  2844. GB_ASSERT(sel.index.count > 0);
  2845. // NOTE(bill): just patch the index in place
  2846. sel.index[0] = addr.swizzle.indices[sel.index[0]];
  2847. } else if (addr.kind == lbAddr_SwizzleLarge) {
  2848. GB_ASSERT(sel.index.count > 0);
  2849. // NOTE(bill): just patch the index in place
  2850. sel.index[0] = addr.swizzle.indices[sel.index[0]];
  2851. }
  2852. lbValue a = lb_addr_get_ptr(p, addr);
  2853. a = lb_emit_deep_field_gep(p, a, sel);
  2854. return lb_addr(a);
  2855. }
  2856. } else {
  2857. GB_PANIC("Unsupported selector expression");
  2858. }
  2859. case_end;
  2860. case_ast_node(se, SelectorCallExpr, expr);
  2861. GB_ASSERT(se->modified_call);
  2862. TypeAndValue tav = type_and_value_of_expr(expr);
  2863. GB_ASSERT(tav.mode != Addressing_Invalid);
  2864. lbValue e = lb_build_expr(p, expr);
  2865. return lb_addr(lb_address_from_load_or_generate_local(p, e));
  2866. case_end;
  2867. case_ast_node(ta, TypeAssertion, expr);
  2868. TokenPos pos = ast_token(expr).pos;
  2869. lbValue e = lb_build_expr(p, ta->expr);
  2870. Type *t = type_deref(e.type);
  2871. if (is_type_union(t)) {
  2872. Type *type = type_of_expr(expr);
  2873. lbAddr v = lb_add_local_generated(p, type, false);
  2874. lb_addr_store(p, v, lb_emit_union_cast(p, lb_build_expr(p, ta->expr), type, pos));
  2875. return v;
  2876. } else if (is_type_any(t)) {
  2877. Type *type = type_of_expr(expr);
  2878. return lb_emit_any_cast_addr(p, lb_build_expr(p, ta->expr), type, pos);
  2879. } else {
  2880. GB_PANIC("TODO(bill): type assertion %s", type_to_string(e.type));
  2881. }
  2882. case_end;
  2883. case_ast_node(ue, UnaryExpr, expr);
  2884. switch (ue->op.kind) {
  2885. case Token_And: {
  2886. lbValue ptr = lb_build_expr(p, expr);
  2887. return lb_addr(lb_address_from_load_or_generate_local(p, ptr));
  2888. }
  2889. default:
  2890. GB_PANIC("Invalid unary expression for lb_build_addr");
  2891. }
  2892. case_end;
  2893. case_ast_node(be, BinaryExpr, expr);
  2894. lbValue v = lb_build_expr(p, expr);
  2895. Type *t = v.type;
  2896. if (is_type_pointer(t)) {
  2897. return lb_addr(v);
  2898. }
  2899. return lb_addr(lb_address_from_load_or_generate_local(p, v));
  2900. case_end;
  2901. case_ast_node(ie, IndexExpr, expr);
  2902. Type *t = base_type(type_of_expr(ie->expr));
  2903. bool deref = is_type_pointer(t);
  2904. t = base_type(type_deref(t));
  2905. if (is_type_soa_struct(t)) {
  2906. // SOA STRUCTURES!!!!
  2907. lbValue val = lb_build_addr_ptr(p, ie->expr);
  2908. if (deref) {
  2909. val = lb_emit_load(p, val);
  2910. }
  2911. lbValue index = lb_build_expr(p, ie->index);
  2912. return lb_addr_soa_variable(val, index, ie->index);
  2913. }
  2914. if (ie->expr->tav.mode == Addressing_SoaVariable) {
  2915. // SOA Structures for slices/dynamic arrays
  2916. GB_ASSERT(is_type_pointer(type_of_expr(ie->expr)));
  2917. lbValue field = lb_build_expr(p, ie->expr);
  2918. lbValue index = lb_build_expr(p, ie->index);
  2919. if (!build_context.no_bounds_check) {
  2920. // TODO HACK(bill): Clean up this hack to get the length for bounds checking
  2921. // GB_ASSERT(LLVMIsALoadInst(field.value));
  2922. // lbValue a = {};
  2923. // a.value = LLVMGetOperand(field.value, 0);
  2924. // a.type = alloc_type_pointer(field.type);
  2925. // irInstr *b = &a->Instr;
  2926. // GB_ASSERT(b->kind == irInstr_StructElementPtr);
  2927. // lbValue base_struct = b->StructElementPtr.address;
  2928. // GB_ASSERT(is_type_soa_struct(type_deref(ir_type(base_struct))));
  2929. // lbValue len = ir_soa_struct_len(p, base_struct);
  2930. // lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2931. }
  2932. lbValue val = lb_emit_ptr_offset(p, field, index);
  2933. return lb_addr(val);
  2934. }
  2935. GB_ASSERT_MSG(is_type_indexable(t), "%s %s", type_to_string(t), expr_to_string(expr));
  2936. if (is_type_map(t)) {
  2937. lbAddr map_addr = lb_build_addr(p, ie->expr);
  2938. lbValue map_val = lb_addr_load(p, map_addr);
  2939. if (deref) {
  2940. map_val = lb_emit_load(p, map_val);
  2941. }
  2942. lbValue key = lb_build_expr(p, ie->index);
  2943. key = lb_emit_conv(p, key, t->Map.key);
  2944. Type *result_type = type_of_expr(expr);
  2945. lbValue map_ptr = lb_address_from_load_or_generate_local(p, map_val);
  2946. return lb_addr_map(map_ptr, key, t, result_type);
  2947. }
  2948. switch (t->kind) {
  2949. case Type_Array: {
  2950. lbValue array = {};
  2951. array = lb_build_addr_ptr(p, ie->expr);
  2952. if (deref) {
  2953. array = lb_emit_load(p, array);
  2954. }
  2955. lbValue index = lb_build_expr(p, ie->index);
  2956. index = lb_emit_conv(p, index, t_int);
  2957. lbValue elem = lb_emit_array_ep(p, array, index);
  2958. auto index_tv = type_and_value_of_expr(ie->index);
  2959. if (index_tv.mode != Addressing_Constant) {
  2960. lbValue len = lb_const_int(p->module, t_int, t->Array.count);
  2961. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2962. }
  2963. return lb_addr(elem);
  2964. }
  2965. case Type_EnumeratedArray: {
  2966. lbValue array = {};
  2967. array = lb_build_addr_ptr(p, ie->expr);
  2968. if (deref) {
  2969. array = lb_emit_load(p, array);
  2970. }
  2971. Type *index_type = t->EnumeratedArray.index;
  2972. auto index_tv = type_and_value_of_expr(ie->index);
  2973. lbValue index = {};
  2974. if (compare_exact_values(Token_NotEq, *t->EnumeratedArray.min_value, exact_value_i64(0))) {
  2975. if (index_tv.mode == Addressing_Constant) {
  2976. ExactValue idx = exact_value_sub(index_tv.value, *t->EnumeratedArray.min_value);
  2977. index = lb_const_value(p->module, index_type, idx);
  2978. } else {
  2979. index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  2980. index = lb_emit_arith(p, Token_Sub, index, lb_const_value(p->module, index_type, *t->EnumeratedArray.min_value), index_type);
  2981. }
  2982. } else {
  2983. index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  2984. }
  2985. lbValue elem = lb_emit_array_ep(p, array, index);
  2986. if (index_tv.mode != Addressing_Constant) {
  2987. lbValue len = lb_const_int(p->module, t_int, t->EnumeratedArray.count);
  2988. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2989. }
  2990. return lb_addr(elem);
  2991. }
  2992. case Type_Slice: {
  2993. lbValue slice = {};
  2994. slice = lb_build_expr(p, ie->expr);
  2995. if (deref) {
  2996. slice = lb_emit_load(p, slice);
  2997. }
  2998. lbValue elem = lb_slice_elem(p, slice);
  2999. lbValue index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  3000. lbValue len = lb_slice_len(p, slice);
  3001. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  3002. lbValue v = lb_emit_ptr_offset(p, elem, index);
  3003. return lb_addr(v);
  3004. }
  3005. case Type_MultiPointer: {
  3006. lbValue multi_ptr = {};
  3007. multi_ptr = lb_build_expr(p, ie->expr);
  3008. if (deref) {
  3009. multi_ptr = lb_emit_load(p, multi_ptr);
  3010. }
  3011. lbValue index = lb_build_expr(p, ie->index);
  3012. lbValue v = {};
  3013. LLVMValueRef indices[1] = {index.value};
  3014. v.value = LLVMBuildGEP(p->builder, multi_ptr.value, indices, 1, "");
  3015. v.type = alloc_type_pointer(t->MultiPointer.elem);
  3016. return lb_addr(v);
  3017. }
  3018. case Type_RelativeSlice: {
  3019. lbAddr slice_addr = {};
  3020. if (deref) {
  3021. slice_addr = lb_addr(lb_build_expr(p, ie->expr));
  3022. } else {
  3023. slice_addr = lb_build_addr(p, ie->expr);
  3024. }
  3025. lbValue slice = lb_addr_load(p, slice_addr);
  3026. lbValue elem = lb_slice_elem(p, slice);
  3027. lbValue index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  3028. lbValue len = lb_slice_len(p, slice);
  3029. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  3030. lbValue v = lb_emit_ptr_offset(p, elem, index);
  3031. return lb_addr(v);
  3032. }
  3033. case Type_DynamicArray: {
  3034. lbValue dynamic_array = {};
  3035. dynamic_array = lb_build_expr(p, ie->expr);
  3036. if (deref) {
  3037. dynamic_array = lb_emit_load(p, dynamic_array);
  3038. }
  3039. lbValue elem = lb_dynamic_array_elem(p, dynamic_array);
  3040. lbValue len = lb_dynamic_array_len(p, dynamic_array);
  3041. lbValue index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  3042. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  3043. lbValue v = lb_emit_ptr_offset(p, elem, index);
  3044. return lb_addr(v);
  3045. }
  3046. case Type_Matrix: {
  3047. lbValue matrix = {};
  3048. matrix = lb_build_addr_ptr(p, ie->expr);
  3049. if (deref) {
  3050. matrix = lb_emit_load(p, matrix);
  3051. }
  3052. lbValue index = lb_build_expr(p, ie->index);
  3053. index = lb_emit_conv(p, index, t_int);
  3054. lbValue elem = lb_emit_matrix_ep(p, matrix, lb_const_int(p->module, t_int, 0), index);
  3055. elem = lb_emit_conv(p, elem, alloc_type_pointer(type_of_expr(expr)));
  3056. auto index_tv = type_and_value_of_expr(ie->index);
  3057. if (index_tv.mode != Addressing_Constant) {
  3058. lbValue len = lb_const_int(p->module, t_int, t->Matrix.column_count);
  3059. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  3060. }
  3061. return lb_addr(elem);
  3062. }
  3063. case Type_Basic: { // Basic_string
  3064. lbValue str;
  3065. lbValue elem;
  3066. lbValue len;
  3067. lbValue index;
  3068. str = lb_build_expr(p, ie->expr);
  3069. if (deref) {
  3070. str = lb_emit_load(p, str);
  3071. }
  3072. elem = lb_string_elem(p, str);
  3073. len = lb_string_len(p, str);
  3074. index = lb_emit_conv(p, lb_build_expr(p, ie->index), t_int);
  3075. lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  3076. return lb_addr(lb_emit_ptr_offset(p, elem, index));
  3077. }
  3078. }
  3079. case_end;
  3080. case_ast_node(ie, MatrixIndexExpr, expr);
  3081. Type *t = base_type(type_of_expr(ie->expr));
  3082. bool deref = is_type_pointer(t);
  3083. t = base_type(type_deref(t));
  3084. lbValue m = {};
  3085. m = lb_build_addr_ptr(p, ie->expr);
  3086. if (deref) {
  3087. m = lb_emit_load(p, m);
  3088. }
  3089. lbValue row_index = lb_build_expr(p, ie->row_index);
  3090. lbValue column_index = lb_build_expr(p, ie->column_index);
  3091. row_index = lb_emit_conv(p, row_index, t_int);
  3092. column_index = lb_emit_conv(p, column_index, t_int);
  3093. lbValue elem = lb_emit_matrix_ep(p, m, row_index, column_index);
  3094. auto row_index_tv = type_and_value_of_expr(ie->row_index);
  3095. auto column_index_tv = type_and_value_of_expr(ie->column_index);
  3096. if (row_index_tv.mode != Addressing_Constant || column_index_tv.mode != Addressing_Constant) {
  3097. lbValue row_count = lb_const_int(p->module, t_int, t->Matrix.row_count);
  3098. lbValue column_count = lb_const_int(p->module, t_int, t->Matrix.column_count);
  3099. lb_emit_matrix_bounds_check(p, ast_token(ie->row_index), row_index, column_index, row_count, column_count);
  3100. }
  3101. return lb_addr(elem);
  3102. case_end;
  3103. case_ast_node(se, SliceExpr, expr);
  3104. lbValue low = lb_const_int(p->module, t_int, 0);
  3105. lbValue high = {};
  3106. if (se->low != nullptr) {
  3107. low = lb_correct_endianness(p, lb_build_expr(p, se->low));
  3108. }
  3109. if (se->high != nullptr) {
  3110. high = lb_correct_endianness(p, lb_build_expr(p, se->high));
  3111. }
  3112. bool no_indices = se->low == nullptr && se->high == nullptr;
  3113. lbAddr addr = lb_build_addr(p, se->expr);
  3114. lbValue base = lb_addr_load(p, addr);
  3115. Type *type = base_type(base.type);
  3116. if (is_type_pointer(type)) {
  3117. type = base_type(type_deref(type));
  3118. addr = lb_addr(base);
  3119. base = lb_addr_load(p, addr);
  3120. }
  3121. switch (type->kind) {
  3122. case Type_Slice: {
  3123. Type *slice_type = type;
  3124. lbValue len = lb_slice_len(p, base);
  3125. if (high.value == nullptr) high = len;
  3126. if (!no_indices) {
  3127. lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  3128. }
  3129. lbValue elem = lb_emit_ptr_offset(p, lb_slice_elem(p, base), low);
  3130. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  3131. lbAddr slice = lb_add_local_generated(p, slice_type, false);
  3132. lb_fill_slice(p, slice, elem, new_len);
  3133. return slice;
  3134. }
  3135. case Type_RelativeSlice:
  3136. GB_PANIC("TODO(bill): Type_RelativeSlice should be handled above already on the lb_addr_load");
  3137. break;
  3138. case Type_DynamicArray: {
  3139. Type *elem_type = type->DynamicArray.elem;
  3140. Type *slice_type = alloc_type_slice(elem_type);
  3141. lbValue len = lb_dynamic_array_len(p, base);
  3142. if (high.value == nullptr) high = len;
  3143. if (!no_indices) {
  3144. lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  3145. }
  3146. lbValue elem = lb_emit_ptr_offset(p, lb_dynamic_array_elem(p, base), low);
  3147. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  3148. lbAddr slice = lb_add_local_generated(p, slice_type, false);
  3149. lb_fill_slice(p, slice, elem, new_len);
  3150. return slice;
  3151. }
  3152. case Type_MultiPointer: {
  3153. lbAddr res = lb_add_local_generated(p, type_of_expr(expr), false);
  3154. if (se->high == nullptr) {
  3155. lbValue offset = base;
  3156. LLVMValueRef indices[1] = {low.value};
  3157. offset.value = LLVMBuildGEP(p->builder, offset.value, indices, 1, "");
  3158. lb_addr_store(p, res, offset);
  3159. } else {
  3160. low = lb_emit_conv(p, low, t_int);
  3161. high = lb_emit_conv(p, high, t_int);
  3162. lb_emit_multi_pointer_slice_bounds_check(p, se->open, low, high);
  3163. LLVMValueRef indices[1] = {low.value};
  3164. LLVMValueRef ptr = LLVMBuildGEP(p->builder, base.value, indices, 1, "");
  3165. LLVMValueRef len = LLVMBuildSub(p->builder, high.value, low.value, "");
  3166. LLVMValueRef gep0 = lb_emit_struct_ep(p, res.addr, 0).value;
  3167. LLVMValueRef gep1 = lb_emit_struct_ep(p, res.addr, 1).value;
  3168. LLVMBuildStore(p->builder, ptr, gep0);
  3169. LLVMBuildStore(p->builder, len, gep1);
  3170. }
  3171. return res;
  3172. }
  3173. case Type_Array: {
  3174. Type *slice_type = alloc_type_slice(type->Array.elem);
  3175. lbValue len = lb_const_int(p->module, t_int, type->Array.count);
  3176. if (high.value == nullptr) high = len;
  3177. bool low_const = type_and_value_of_expr(se->low).mode == Addressing_Constant;
  3178. bool high_const = type_and_value_of_expr(se->high).mode == Addressing_Constant;
  3179. if (!low_const || !high_const) {
  3180. if (!no_indices) {
  3181. lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  3182. }
  3183. }
  3184. lbValue elem = lb_emit_ptr_offset(p, lb_array_elem(p, lb_addr_get_ptr(p, addr)), low);
  3185. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  3186. lbAddr slice = lb_add_local_generated(p, slice_type, false);
  3187. lb_fill_slice(p, slice, elem, new_len);
  3188. return slice;
  3189. }
  3190. case Type_Basic: {
  3191. GB_ASSERT(type == t_string);
  3192. lbValue len = lb_string_len(p, base);
  3193. if (high.value == nullptr) high = len;
  3194. if (!no_indices) {
  3195. lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  3196. }
  3197. lbValue elem = lb_emit_ptr_offset(p, lb_string_elem(p, base), low);
  3198. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  3199. lbAddr str = lb_add_local_generated(p, t_string, false);
  3200. lb_fill_string(p, str, elem, new_len);
  3201. return str;
  3202. }
  3203. case Type_Struct:
  3204. if (is_type_soa_struct(type)) {
  3205. lbValue len = lb_soa_struct_len(p, lb_addr_get_ptr(p, addr));
  3206. if (high.value == nullptr) high = len;
  3207. if (!no_indices) {
  3208. lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  3209. }
  3210. #if 1
  3211. lbAddr dst = lb_add_local_generated(p, type_of_expr(expr), true);
  3212. if (type->Struct.soa_kind == StructSoa_Fixed) {
  3213. i32 field_count = cast(i32)type->Struct.fields.count;
  3214. for (i32 i = 0; i < field_count; i++) {
  3215. lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  3216. lbValue field_src = lb_emit_struct_ep(p, lb_addr_get_ptr(p, addr), i);
  3217. field_src = lb_emit_array_ep(p, field_src, low);
  3218. lb_emit_store(p, field_dst, field_src);
  3219. }
  3220. lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  3221. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  3222. lb_emit_store(p, len_dst, new_len);
  3223. } else if (type->Struct.soa_kind == StructSoa_Slice) {
  3224. if (no_indices) {
  3225. lb_addr_store(p, dst, base);
  3226. } else {
  3227. i32 field_count = cast(i32)type->Struct.fields.count - 1;
  3228. for (i32 i = 0; i < field_count; i++) {
  3229. lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  3230. lbValue field_src = lb_emit_struct_ev(p, base, i);
  3231. field_src = lb_emit_ptr_offset(p, field_src, low);
  3232. lb_emit_store(p, field_dst, field_src);
  3233. }
  3234. lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  3235. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  3236. lb_emit_store(p, len_dst, new_len);
  3237. }
  3238. } else if (type->Struct.soa_kind == StructSoa_Dynamic) {
  3239. i32 field_count = cast(i32)type->Struct.fields.count - 3;
  3240. for (i32 i = 0; i < field_count; i++) {
  3241. lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  3242. lbValue field_src = lb_emit_struct_ev(p, base, i);
  3243. field_src = lb_emit_ptr_offset(p, field_src, low);
  3244. lb_emit_store(p, field_dst, field_src);
  3245. }
  3246. lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  3247. lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  3248. lb_emit_store(p, len_dst, new_len);
  3249. }
  3250. return dst;
  3251. #endif
  3252. }
  3253. break;
  3254. }
  3255. GB_PANIC("Unknown slicable type");
  3256. case_end;
  3257. case_ast_node(de, DerefExpr, expr);
  3258. if (is_type_relative_pointer(type_of_expr(de->expr))) {
  3259. lbAddr addr = lb_build_addr(p, de->expr);
  3260. addr.relative.deref = true;
  3261. return addr;\
  3262. }
  3263. lbValue addr = lb_build_expr(p, de->expr);
  3264. return lb_addr(addr);
  3265. case_end;
  3266. case_ast_node(ce, CallExpr, expr);
  3267. BuiltinProcId builtin_id = BuiltinProc_Invalid;
  3268. if (ce->proc->tav.mode == Addressing_Builtin) {
  3269. Entity *e = entity_of_node(ce->proc);
  3270. if (e != nullptr) {
  3271. builtin_id = cast(BuiltinProcId)e->Builtin.id;
  3272. } else {
  3273. builtin_id = BuiltinProc_DIRECTIVE;
  3274. }
  3275. }
  3276. auto const &tv = expr->tav;
  3277. if (builtin_id == BuiltinProc_swizzle &&
  3278. is_type_array(tv.type)) {
  3279. // NOTE(bill, 2021-08-09): `swizzle` has some bizarre semantics so it needs to be
  3280. // specialized here for to be addressable
  3281. return lb_build_array_swizzle_addr(p, ce, tv);
  3282. }
  3283. // NOTE(bill): This is make sure you never need to have an 'array_ev'
  3284. lbValue e = lb_build_expr(p, expr);
  3285. #if 1
  3286. return lb_addr(lb_address_from_load_or_generate_local(p, e));
  3287. #else
  3288. lbAddr v = lb_add_local_generated(p, e.type, false);
  3289. lb_addr_store(p, v, e);
  3290. return v;
  3291. #endif
  3292. case_end;
  3293. case_ast_node(cl, CompoundLit, expr);
  3294. Type *type = type_of_expr(expr);
  3295. Type *bt = base_type(type);
  3296. lbAddr v = lb_add_local_generated(p, type, true);
  3297. Type *et = nullptr;
  3298. switch (bt->kind) {
  3299. case Type_Array: et = bt->Array.elem; break;
  3300. case Type_EnumeratedArray: et = bt->EnumeratedArray.elem; break;
  3301. case Type_Slice: et = bt->Slice.elem; break;
  3302. case Type_BitSet: et = bt->BitSet.elem; break;
  3303. case Type_SimdVector: et = bt->SimdVector.elem; break;
  3304. case Type_Matrix: et = bt->Matrix.elem; break;
  3305. }
  3306. String proc_name = {};
  3307. if (p->entity) {
  3308. proc_name = p->entity->token.string;
  3309. }
  3310. TokenPos pos = ast_token(expr).pos;
  3311. switch (bt->kind) {
  3312. default: GB_PANIC("Unknown CompoundLit type: %s", type_to_string(type)); break;
  3313. case Type_Struct: {
  3314. // TODO(bill): "constant" '#raw_union's are not initialized constantly at the moment.
  3315. // NOTE(bill): This is due to the layout of the unions when printed to LLVM-IR
  3316. bool is_raw_union = is_type_raw_union(bt);
  3317. GB_ASSERT(is_type_struct(bt) || is_raw_union);
  3318. TypeStruct *st = &bt->Struct;
  3319. if (cl->elems.count > 0) {
  3320. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3321. lbValue comp_lit_ptr = lb_addr_get_ptr(p, v);
  3322. for_array(field_index, cl->elems) {
  3323. Ast *elem = cl->elems[field_index];
  3324. lbValue field_expr = {};
  3325. Entity *field = nullptr;
  3326. isize index = field_index;
  3327. if (elem->kind == Ast_FieldValue) {
  3328. ast_node(fv, FieldValue, elem);
  3329. String name = fv->field->Ident.token.string;
  3330. Selection sel = lookup_field(bt, name, false);
  3331. index = sel.index[0];
  3332. elem = fv->value;
  3333. TypeAndValue tav = type_and_value_of_expr(elem);
  3334. } else {
  3335. TypeAndValue tav = type_and_value_of_expr(elem);
  3336. Selection sel = lookup_field_from_index(bt, st->fields[field_index]->Variable.field_index);
  3337. index = sel.index[0];
  3338. }
  3339. field = st->fields[index];
  3340. Type *ft = field->type;
  3341. if (!is_raw_union && !is_type_typeid(ft) && lb_is_elem_const(elem, ft)) {
  3342. continue;
  3343. }
  3344. field_expr = lb_build_expr(p, elem);
  3345. lbValue gep = {};
  3346. if (is_raw_union) {
  3347. gep = lb_emit_conv(p, comp_lit_ptr, alloc_type_pointer(ft));
  3348. } else {
  3349. gep = lb_emit_struct_ep(p, comp_lit_ptr, cast(i32)index);
  3350. }
  3351. Type *fet = field_expr.type;
  3352. GB_ASSERT(fet->kind != Type_Tuple);
  3353. // HACK TODO(bill): THIS IS A MASSIVE HACK!!!!
  3354. if (is_type_union(ft) && !are_types_identical(fet, ft) && !is_type_untyped(fet)) {
  3355. GB_ASSERT_MSG(union_variant_index(ft, fet) > 0, "%s", type_to_string(fet));
  3356. lb_emit_store_union_variant(p, gep, field_expr, fet);
  3357. } else {
  3358. lbValue fv = lb_emit_conv(p, field_expr, ft);
  3359. lb_emit_store(p, gep, fv);
  3360. }
  3361. }
  3362. }
  3363. break;
  3364. }
  3365. case Type_Map: {
  3366. if (cl->elems.count == 0) {
  3367. break;
  3368. }
  3369. {
  3370. auto args = array_make<lbValue>(permanent_allocator(), 3);
  3371. args[0] = lb_gen_map_header(p, v.addr, type);
  3372. args[1] = lb_const_int(p->module, t_int, 2*cl->elems.count);
  3373. args[2] = lb_emit_source_code_location(p, proc_name, pos);
  3374. lb_emit_runtime_call(p, "__dynamic_map_reserve", args);
  3375. }
  3376. for_array(field_index, cl->elems) {
  3377. Ast *elem = cl->elems[field_index];
  3378. ast_node(fv, FieldValue, elem);
  3379. lbValue key = lb_build_expr(p, fv->field);
  3380. lbValue value = lb_build_expr(p, fv->value);
  3381. lb_insert_dynamic_map_key_and_value(p, v, type, key, value, elem);
  3382. }
  3383. break;
  3384. }
  3385. case Type_Array: {
  3386. if (cl->elems.count > 0) {
  3387. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3388. auto temp_data = array_make<lbCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  3389. // NOTE(bill): Separate value, gep, store into their own chunks
  3390. for_array(i, cl->elems) {
  3391. Ast *elem = cl->elems[i];
  3392. if (elem->kind == Ast_FieldValue) {
  3393. ast_node(fv, FieldValue, elem);
  3394. if (lb_is_elem_const(fv->value, et)) {
  3395. continue;
  3396. }
  3397. if (is_ast_range(fv->field)) {
  3398. ast_node(ie, BinaryExpr, fv->field);
  3399. TypeAndValue lo_tav = ie->left->tav;
  3400. TypeAndValue hi_tav = ie->right->tav;
  3401. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  3402. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  3403. TokenKind op = ie->op.kind;
  3404. i64 lo = exact_value_to_i64(lo_tav.value);
  3405. i64 hi = exact_value_to_i64(hi_tav.value);
  3406. if (op != Token_RangeHalf) {
  3407. hi += 1;
  3408. }
  3409. lbValue value = lb_build_expr(p, fv->value);
  3410. for (i64 k = lo; k < hi; k++) {
  3411. lbCompoundLitElemTempData data = {};
  3412. data.value = value;
  3413. data.elem_index = cast(i32)k;
  3414. array_add(&temp_data, data);
  3415. }
  3416. } else {
  3417. auto tav = fv->field->tav;
  3418. GB_ASSERT(tav.mode == Addressing_Constant);
  3419. i64 index = exact_value_to_i64(tav.value);
  3420. lbValue value = lb_build_expr(p, fv->value);
  3421. lbCompoundLitElemTempData data = {};
  3422. data.value = lb_emit_conv(p, value, et);
  3423. data.expr = fv->value;
  3424. data.elem_index = cast(i32)index;
  3425. array_add(&temp_data, data);
  3426. }
  3427. } else {
  3428. if (lb_is_elem_const(elem, et)) {
  3429. continue;
  3430. }
  3431. lbCompoundLitElemTempData data = {};
  3432. data.expr = elem;
  3433. data.elem_index = cast(i32)i;
  3434. array_add(&temp_data, data);
  3435. }
  3436. }
  3437. for_array(i, temp_data) {
  3438. temp_data[i].gep = lb_emit_array_epi(p, lb_addr_get_ptr(p, v), temp_data[i].elem_index);
  3439. }
  3440. for_array(i, temp_data) {
  3441. lbValue field_expr = temp_data[i].value;
  3442. Ast *expr = temp_data[i].expr;
  3443. auto prev_hint = lb_set_copy_elision_hint(p, lb_addr(temp_data[i].gep), expr);
  3444. if (field_expr.value == nullptr) {
  3445. field_expr = lb_build_expr(p, expr);
  3446. }
  3447. Type *t = field_expr.type;
  3448. GB_ASSERT(t->kind != Type_Tuple);
  3449. lbValue ev = lb_emit_conv(p, field_expr, et);
  3450. if (!p->copy_elision_hint.used) {
  3451. temp_data[i].value = ev;
  3452. }
  3453. lb_reset_copy_elision_hint(p, prev_hint);
  3454. }
  3455. for_array(i, temp_data) {
  3456. if (temp_data[i].value.value != nullptr) {
  3457. lb_emit_store(p, temp_data[i].gep, temp_data[i].value);
  3458. }
  3459. }
  3460. }
  3461. break;
  3462. }
  3463. case Type_EnumeratedArray: {
  3464. if (cl->elems.count > 0) {
  3465. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3466. auto temp_data = array_make<lbCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  3467. // NOTE(bill): Separate value, gep, store into their own chunks
  3468. for_array(i, cl->elems) {
  3469. Ast *elem = cl->elems[i];
  3470. if (elem->kind == Ast_FieldValue) {
  3471. ast_node(fv, FieldValue, elem);
  3472. if (lb_is_elem_const(fv->value, et)) {
  3473. continue;
  3474. }
  3475. if (is_ast_range(fv->field)) {
  3476. ast_node(ie, BinaryExpr, fv->field);
  3477. TypeAndValue lo_tav = ie->left->tav;
  3478. TypeAndValue hi_tav = ie->right->tav;
  3479. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  3480. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  3481. TokenKind op = ie->op.kind;
  3482. i64 lo = exact_value_to_i64(lo_tav.value);
  3483. i64 hi = exact_value_to_i64(hi_tav.value);
  3484. if (op != Token_RangeHalf) {
  3485. hi += 1;
  3486. }
  3487. lbValue value = lb_build_expr(p, fv->value);
  3488. for (i64 k = lo; k < hi; k++) {
  3489. lbCompoundLitElemTempData data = {};
  3490. data.value = value;
  3491. data.elem_index = cast(i32)k;
  3492. array_add(&temp_data, data);
  3493. }
  3494. } else {
  3495. auto tav = fv->field->tav;
  3496. GB_ASSERT(tav.mode == Addressing_Constant);
  3497. i64 index = exact_value_to_i64(tav.value);
  3498. lbValue value = lb_build_expr(p, fv->value);
  3499. lbCompoundLitElemTempData data = {};
  3500. data.value = lb_emit_conv(p, value, et);
  3501. data.expr = fv->value;
  3502. data.elem_index = cast(i32)index;
  3503. array_add(&temp_data, data);
  3504. }
  3505. } else {
  3506. if (lb_is_elem_const(elem, et)) {
  3507. continue;
  3508. }
  3509. lbCompoundLitElemTempData data = {};
  3510. data.expr = elem;
  3511. data.elem_index = cast(i32)i;
  3512. array_add(&temp_data, data);
  3513. }
  3514. }
  3515. i32 index_offset = cast(i32)exact_value_to_i64(*bt->EnumeratedArray.min_value);
  3516. for_array(i, temp_data) {
  3517. i32 index = temp_data[i].elem_index - index_offset;
  3518. temp_data[i].gep = lb_emit_array_epi(p, lb_addr_get_ptr(p, v), index);
  3519. }
  3520. for_array(i, temp_data) {
  3521. lbValue field_expr = temp_data[i].value;
  3522. Ast *expr = temp_data[i].expr;
  3523. auto prev_hint = lb_set_copy_elision_hint(p, lb_addr(temp_data[i].gep), expr);
  3524. if (field_expr.value == nullptr) {
  3525. field_expr = lb_build_expr(p, expr);
  3526. }
  3527. Type *t = field_expr.type;
  3528. GB_ASSERT(t->kind != Type_Tuple);
  3529. lbValue ev = lb_emit_conv(p, field_expr, et);
  3530. if (!p->copy_elision_hint.used) {
  3531. temp_data[i].value = ev;
  3532. }
  3533. lb_reset_copy_elision_hint(p, prev_hint);
  3534. }
  3535. for_array(i, temp_data) {
  3536. if (temp_data[i].value.value != nullptr) {
  3537. lb_emit_store(p, temp_data[i].gep, temp_data[i].value);
  3538. }
  3539. }
  3540. }
  3541. break;
  3542. }
  3543. case Type_Slice: {
  3544. if (cl->elems.count > 0) {
  3545. lbValue slice = lb_const_value(p->module, type, exact_value_compound(expr));
  3546. lbValue data = lb_slice_elem(p, slice);
  3547. auto temp_data = array_make<lbCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  3548. for_array(i, cl->elems) {
  3549. Ast *elem = cl->elems[i];
  3550. if (elem->kind == Ast_FieldValue) {
  3551. ast_node(fv, FieldValue, elem);
  3552. if (lb_is_elem_const(fv->value, et)) {
  3553. continue;
  3554. }
  3555. if (is_ast_range(fv->field)) {
  3556. ast_node(ie, BinaryExpr, fv->field);
  3557. TypeAndValue lo_tav = ie->left->tav;
  3558. TypeAndValue hi_tav = ie->right->tav;
  3559. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  3560. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  3561. TokenKind op = ie->op.kind;
  3562. i64 lo = exact_value_to_i64(lo_tav.value);
  3563. i64 hi = exact_value_to_i64(hi_tav.value);
  3564. if (op != Token_RangeHalf) {
  3565. hi += 1;
  3566. }
  3567. lbValue value = lb_emit_conv(p, lb_build_expr(p, fv->value), et);
  3568. for (i64 k = lo; k < hi; k++) {
  3569. lbCompoundLitElemTempData data = {};
  3570. data.value = value;
  3571. data.elem_index = cast(i32)k;
  3572. array_add(&temp_data, data);
  3573. }
  3574. } else {
  3575. GB_ASSERT(fv->field->tav.mode == Addressing_Constant);
  3576. i64 index = exact_value_to_i64(fv->field->tav.value);
  3577. lbValue field_expr = lb_build_expr(p, fv->value);
  3578. GB_ASSERT(!is_type_tuple(field_expr.type));
  3579. lbValue ev = lb_emit_conv(p, field_expr, et);
  3580. lbCompoundLitElemTempData data = {};
  3581. data.value = ev;
  3582. data.elem_index = cast(i32)index;
  3583. array_add(&temp_data, data);
  3584. }
  3585. } else {
  3586. if (lb_is_elem_const(elem, et)) {
  3587. continue;
  3588. }
  3589. lbValue field_expr = lb_build_expr(p, elem);
  3590. GB_ASSERT(!is_type_tuple(field_expr.type));
  3591. lbValue ev = lb_emit_conv(p, field_expr, et);
  3592. lbCompoundLitElemTempData data = {};
  3593. data.value = ev;
  3594. data.elem_index = cast(i32)i;
  3595. array_add(&temp_data, data);
  3596. }
  3597. }
  3598. for_array(i, temp_data) {
  3599. temp_data[i].gep = lb_emit_ptr_offset(p, data, lb_const_int(p->module, t_int, temp_data[i].elem_index));
  3600. }
  3601. for_array(i, temp_data) {
  3602. lb_emit_store(p, temp_data[i].gep, temp_data[i].value);
  3603. }
  3604. {
  3605. lbValue count = {};
  3606. count.type = t_int;
  3607. if (lb_is_const(slice)) {
  3608. unsigned indices[1] = {1};
  3609. count.value = LLVMConstExtractValue(slice.value, indices, gb_count_of(indices));
  3610. } else {
  3611. count.value = LLVMBuildExtractValue(p->builder, slice.value, 1, "");
  3612. }
  3613. lb_fill_slice(p, v, data, count);
  3614. }
  3615. }
  3616. break;
  3617. }
  3618. case Type_DynamicArray: {
  3619. if (cl->elems.count == 0) {
  3620. break;
  3621. }
  3622. Type *et = bt->DynamicArray.elem;
  3623. lbValue size = lb_const_int(p->module, t_int, type_size_of(et));
  3624. lbValue align = lb_const_int(p->module, t_int, type_align_of(et));
  3625. i64 item_count = gb_max(cl->max_count, cl->elems.count);
  3626. {
  3627. auto args = array_make<lbValue>(permanent_allocator(), 5);
  3628. args[0] = lb_emit_conv(p, lb_addr_get_ptr(p, v), t_rawptr);
  3629. args[1] = size;
  3630. args[2] = align;
  3631. args[3] = lb_const_int(p->module, t_int, 2*item_count); // TODO(bill): Is this too much waste?
  3632. args[4] = lb_emit_source_code_location(p, proc_name, pos);
  3633. lb_emit_runtime_call(p, "__dynamic_array_reserve", args);
  3634. }
  3635. lbValue items = lb_generate_local_array(p, et, item_count);
  3636. // lbValue items = lb_generate_global_array(p->module, et, item_count, str_lit("dacl$"), cast(i64)cast(intptr)expr);
  3637. for_array(i, cl->elems) {
  3638. Ast *elem = cl->elems[i];
  3639. if (elem->kind == Ast_FieldValue) {
  3640. ast_node(fv, FieldValue, elem);
  3641. if (is_ast_range(fv->field)) {
  3642. ast_node(ie, BinaryExpr, fv->field);
  3643. TypeAndValue lo_tav = ie->left->tav;
  3644. TypeAndValue hi_tav = ie->right->tav;
  3645. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  3646. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  3647. TokenKind op = ie->op.kind;
  3648. i64 lo = exact_value_to_i64(lo_tav.value);
  3649. i64 hi = exact_value_to_i64(hi_tav.value);
  3650. if (op != Token_RangeHalf) {
  3651. hi += 1;
  3652. }
  3653. lbValue value = lb_emit_conv(p, lb_build_expr(p, fv->value), et);
  3654. for (i64 k = lo; k < hi; k++) {
  3655. lbValue ep = lb_emit_array_epi(p, items, cast(i32)k);
  3656. lb_emit_store(p, ep, value);
  3657. }
  3658. } else {
  3659. GB_ASSERT(fv->field->tav.mode == Addressing_Constant);
  3660. i64 field_index = exact_value_to_i64(fv->field->tav.value);
  3661. lbValue ev = lb_build_expr(p, fv->value);
  3662. lbValue value = lb_emit_conv(p, ev, et);
  3663. lbValue ep = lb_emit_array_epi(p, items, cast(i32)field_index);
  3664. lb_emit_store(p, ep, value);
  3665. }
  3666. } else {
  3667. lbValue value = lb_emit_conv(p, lb_build_expr(p, elem), et);
  3668. lbValue ep = lb_emit_array_epi(p, items, cast(i32)i);
  3669. lb_emit_store(p, ep, value);
  3670. }
  3671. }
  3672. {
  3673. auto args = array_make<lbValue>(permanent_allocator(), 6);
  3674. args[0] = lb_emit_conv(p, v.addr, t_rawptr);
  3675. args[1] = size;
  3676. args[2] = align;
  3677. args[3] = lb_emit_conv(p, items, t_rawptr);
  3678. args[4] = lb_const_int(p->module, t_int, item_count);
  3679. args[5] = lb_emit_source_code_location(p, proc_name, pos);
  3680. lb_emit_runtime_call(p, "__dynamic_array_append", args);
  3681. }
  3682. break;
  3683. }
  3684. case Type_Basic: {
  3685. GB_ASSERT(is_type_any(bt));
  3686. if (cl->elems.count > 0) {
  3687. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3688. String field_names[2] = {
  3689. str_lit("data"),
  3690. str_lit("id"),
  3691. };
  3692. Type *field_types[2] = {
  3693. t_rawptr,
  3694. t_typeid,
  3695. };
  3696. for_array(field_index, cl->elems) {
  3697. Ast *elem = cl->elems[field_index];
  3698. lbValue field_expr = {};
  3699. isize index = field_index;
  3700. if (elem->kind == Ast_FieldValue) {
  3701. ast_node(fv, FieldValue, elem);
  3702. Selection sel = lookup_field(bt, fv->field->Ident.token.string, false);
  3703. index = sel.index[0];
  3704. elem = fv->value;
  3705. } else {
  3706. TypeAndValue tav = type_and_value_of_expr(elem);
  3707. Selection sel = lookup_field(bt, field_names[field_index], false);
  3708. index = sel.index[0];
  3709. }
  3710. field_expr = lb_build_expr(p, elem);
  3711. GB_ASSERT(field_expr.type->kind != Type_Tuple);
  3712. Type *ft = field_types[index];
  3713. lbValue fv = lb_emit_conv(p, field_expr, ft);
  3714. lbValue gep = lb_emit_struct_ep(p, lb_addr_get_ptr(p, v), cast(i32)index);
  3715. lb_emit_store(p, gep, fv);
  3716. }
  3717. }
  3718. break;
  3719. }
  3720. case Type_BitSet: {
  3721. i64 sz = type_size_of(type);
  3722. if (cl->elems.count > 0 && sz > 0) {
  3723. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3724. lbValue lower = lb_const_value(p->module, t_int, exact_value_i64(bt->BitSet.lower));
  3725. for_array(i, cl->elems) {
  3726. Ast *elem = cl->elems[i];
  3727. GB_ASSERT(elem->kind != Ast_FieldValue);
  3728. if (lb_is_elem_const(elem, et)) {
  3729. continue;
  3730. }
  3731. lbValue expr = lb_build_expr(p, elem);
  3732. GB_ASSERT(expr.type->kind != Type_Tuple);
  3733. Type *it = bit_set_to_int(bt);
  3734. lbValue one = lb_const_value(p->module, it, exact_value_i64(1));
  3735. lbValue e = lb_emit_conv(p, expr, it);
  3736. e = lb_emit_arith(p, Token_Sub, e, lower, it);
  3737. e = lb_emit_arith(p, Token_Shl, one, e, it);
  3738. lbValue old_value = lb_emit_transmute(p, lb_addr_load(p, v), it);
  3739. lbValue new_value = lb_emit_arith(p, Token_Or, old_value, e, it);
  3740. new_value = lb_emit_transmute(p, new_value, type);
  3741. lb_addr_store(p, v, new_value);
  3742. }
  3743. }
  3744. break;
  3745. }
  3746. case Type_Matrix: {
  3747. if (cl->elems.count > 0) {
  3748. lb_addr_store(p, v, lb_const_value(p->module, type, exact_value_compound(expr)));
  3749. auto temp_data = array_make<lbCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  3750. // NOTE(bill): Separate value, gep, store into their own chunks
  3751. for_array(i, cl->elems) {
  3752. Ast *elem = cl->elems[i];
  3753. if (elem->kind == Ast_FieldValue) {
  3754. ast_node(fv, FieldValue, elem);
  3755. if (lb_is_elem_const(fv->value, et)) {
  3756. continue;
  3757. }
  3758. if (is_ast_range(fv->field)) {
  3759. ast_node(ie, BinaryExpr, fv->field);
  3760. TypeAndValue lo_tav = ie->left->tav;
  3761. TypeAndValue hi_tav = ie->right->tav;
  3762. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  3763. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  3764. TokenKind op = ie->op.kind;
  3765. i64 lo = exact_value_to_i64(lo_tav.value);
  3766. i64 hi = exact_value_to_i64(hi_tav.value);
  3767. if (op != Token_RangeHalf) {
  3768. hi += 1;
  3769. }
  3770. lbValue value = lb_build_expr(p, fv->value);
  3771. for (i64 k = lo; k < hi; k++) {
  3772. lbCompoundLitElemTempData data = {};
  3773. data.value = value;
  3774. data.elem_index = cast(i32)matrix_row_major_index_to_offset(bt, k);
  3775. array_add(&temp_data, data);
  3776. }
  3777. } else {
  3778. auto tav = fv->field->tav;
  3779. GB_ASSERT(tav.mode == Addressing_Constant);
  3780. i64 index = exact_value_to_i64(tav.value);
  3781. lbValue value = lb_build_expr(p, fv->value);
  3782. lbCompoundLitElemTempData data = {};
  3783. data.value = lb_emit_conv(p, value, et);
  3784. data.expr = fv->value;
  3785. data.elem_index = cast(i32)matrix_row_major_index_to_offset(bt, index);
  3786. array_add(&temp_data, data);
  3787. }
  3788. } else {
  3789. if (lb_is_elem_const(elem, et)) {
  3790. continue;
  3791. }
  3792. lbCompoundLitElemTempData data = {};
  3793. data.expr = elem;
  3794. data.elem_index = cast(i32)matrix_row_major_index_to_offset(bt, i);
  3795. array_add(&temp_data, data);
  3796. }
  3797. }
  3798. for_array(i, temp_data) {
  3799. temp_data[i].gep = lb_emit_array_epi(p, lb_addr_get_ptr(p, v), temp_data[i].elem_index);
  3800. }
  3801. for_array(i, temp_data) {
  3802. lbValue field_expr = temp_data[i].value;
  3803. Ast *expr = temp_data[i].expr;
  3804. auto prev_hint = lb_set_copy_elision_hint(p, lb_addr(temp_data[i].gep), expr);
  3805. if (field_expr.value == nullptr) {
  3806. field_expr = lb_build_expr(p, expr);
  3807. }
  3808. Type *t = field_expr.type;
  3809. GB_ASSERT(t->kind != Type_Tuple);
  3810. lbValue ev = lb_emit_conv(p, field_expr, et);
  3811. if (!p->copy_elision_hint.used) {
  3812. temp_data[i].value = ev;
  3813. }
  3814. lb_reset_copy_elision_hint(p, prev_hint);
  3815. }
  3816. for_array(i, temp_data) {
  3817. if (temp_data[i].value.value != nullptr) {
  3818. lb_emit_store(p, temp_data[i].gep, temp_data[i].value);
  3819. }
  3820. }
  3821. }
  3822. break;
  3823. }
  3824. }
  3825. return v;
  3826. case_end;
  3827. case_ast_node(tc, TypeCast, expr);
  3828. Type *type = type_of_expr(expr);
  3829. lbValue x = lb_build_expr(p, tc->expr);
  3830. lbValue e = {};
  3831. switch (tc->token.kind) {
  3832. case Token_cast:
  3833. e = lb_emit_conv(p, x, type);
  3834. break;
  3835. case Token_transmute:
  3836. e = lb_emit_transmute(p, x, type);
  3837. break;
  3838. default:
  3839. GB_PANIC("Invalid AST TypeCast");
  3840. }
  3841. lbAddr v = lb_add_local_generated(p, type, false);
  3842. lb_addr_store(p, v, e);
  3843. return v;
  3844. case_end;
  3845. case_ast_node(ac, AutoCast, expr);
  3846. return lb_build_addr(p, ac->expr);
  3847. case_end;
  3848. case_ast_node(te, TernaryIfExpr, expr);
  3849. LLVMValueRef incoming_values[2] = {};
  3850. LLVMBasicBlockRef incoming_blocks[2] = {};
  3851. GB_ASSERT(te->y != nullptr);
  3852. lbBlock *then = lb_create_block(p, "if.then");
  3853. lbBlock *done = lb_create_block(p, "if.done"); // NOTE(bill): Append later
  3854. lbBlock *else_ = lb_create_block(p, "if.else");
  3855. lbValue cond = lb_build_cond(p, te->cond, then, else_);
  3856. lb_start_block(p, then);
  3857. Type *ptr_type = alloc_type_pointer(default_type(type_of_expr(expr)));
  3858. incoming_values[0] = lb_emit_conv(p, lb_build_addr_ptr(p, te->x), ptr_type).value;
  3859. lb_emit_jump(p, done);
  3860. lb_start_block(p, else_);
  3861. incoming_values[1] = lb_emit_conv(p, lb_build_addr_ptr(p, te->y), ptr_type).value;
  3862. lb_emit_jump(p, done);
  3863. lb_start_block(p, done);
  3864. lbValue res = {};
  3865. res.value = LLVMBuildPhi(p->builder, lb_type(p->module, ptr_type), "");
  3866. res.type = ptr_type;
  3867. GB_ASSERT(p->curr_block->preds.count >= 2);
  3868. incoming_blocks[0] = p->curr_block->preds[0]->block;
  3869. incoming_blocks[1] = p->curr_block->preds[1]->block;
  3870. LLVMAddIncoming(res.value, incoming_values, incoming_blocks, 2);
  3871. return lb_addr(res);
  3872. case_end;
  3873. case_ast_node(oe, OrElseExpr, expr);
  3874. lbValue ptr = lb_address_from_load_or_generate_local(p, lb_build_expr(p, expr));
  3875. return lb_addr(ptr);
  3876. case_end;
  3877. case_ast_node(oe, OrReturnExpr, expr);
  3878. lbValue ptr = lb_address_from_load_or_generate_local(p, lb_build_expr(p, expr));
  3879. return lb_addr(ptr);
  3880. case_end;
  3881. }
  3882. TokenPos token_pos = ast_token(expr).pos;
  3883. GB_PANIC("Unexpected address expression\n"
  3884. "\tAst: %.*s @ "
  3885. "%s\n",
  3886. LIT(ast_strings[expr->kind]),
  3887. token_pos_to_string(token_pos));
  3888. return {};
  3889. }