tilde_expr.cpp 125 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949
  1. gb_internal cgValue cg_flatten_value(cgProcedure *p, cgValue value) {
  2. GB_ASSERT(value.kind != cgValue_Multi);
  3. if (value.kind == cgValue_Symbol) {
  4. GB_ASSERT(is_type_internally_pointer_like(value.type));
  5. return cg_value(tb_inst_get_symbol_address(p->func, value.symbol), value.type);
  6. } else if (value.kind == cgValue_Addr) {
  7. // TODO(bill): Is this a good idea?
  8. // this converts an lvalue to an rvalue if trivially possible
  9. TB_DataType dt = cg_data_type(value.type);
  10. if (!TB_IS_VOID_TYPE(dt)) {
  11. TB_CharUnits align = cast(TB_CharUnits)type_align_of(value.type);
  12. return cg_value(tb_inst_load(p->func, dt, value.node, align, false), value.type);
  13. }
  14. }
  15. return value;
  16. }
  17. gb_internal cgValue cg_emit_select(cgProcedure *p, cgValue const &cond, cgValue const &x, cgValue const &y) {
  18. GB_ASSERT(x.kind == y.kind);
  19. GB_ASSERT(cond.kind == cgValue_Value);
  20. cgValue res = x;
  21. res.node = tb_inst_select(p->func, cond.node, x.node, y.node);
  22. return res;
  23. }
  24. gb_internal bool cg_is_expr_untyped_const(Ast *expr) {
  25. auto const &tv = type_and_value_of_expr(expr);
  26. if (is_type_untyped(tv.type)) {
  27. return tv.value.kind != ExactValue_Invalid;
  28. }
  29. return false;
  30. }
  31. gb_internal cgValue cg_expr_untyped_const_to_typed(cgProcedure *p, Ast *expr, Type *t) {
  32. GB_ASSERT(is_type_typed(t));
  33. auto const &tv = type_and_value_of_expr(expr);
  34. return cg_const_value(p, t, tv.value);
  35. }
  36. gb_internal cgContextData *cg_push_context_onto_stack(cgProcedure *p, cgAddr ctx) {
  37. ctx.kind = cgAddr_Context;
  38. cgContextData *cd = array_add_and_get(&p->context_stack);
  39. cd->ctx = ctx;
  40. cd->scope_index = p->scope_index;
  41. return cd;
  42. }
  43. gb_internal cgAddr cg_find_or_generate_context_ptr(cgProcedure *p) {
  44. if (p->context_stack.count > 0) {
  45. return p->context_stack[p->context_stack.count-1].ctx;
  46. }
  47. Type *pt = base_type(p->type);
  48. GB_ASSERT(pt->kind == Type_Proc);
  49. GB_ASSERT(pt->Proc.calling_convention != ProcCC_Odin);
  50. cgAddr c = cg_add_local(p, t_context, nullptr, true);
  51. tb_node_append_attrib(c.addr.node, tb_function_attrib_variable(p->func, -1, "context", cg_debug_type(p->module, t_context)));
  52. c.kind = cgAddr_Context;
  53. // lb_emit_init_context(p, c);
  54. cg_push_context_onto_stack(p, c);
  55. // lb_add_debug_context_variable(p, c);
  56. return c;
  57. }
  58. gb_internal cgValue cg_find_value_from_entity(cgModule *m, Entity *e) {
  59. e = strip_entity_wrapping(e);
  60. GB_ASSERT(e != nullptr);
  61. GB_ASSERT(e->token.string != "_");
  62. if (e->kind == Entity_Procedure) {
  63. return cg_find_procedure_value_from_entity(m, e);
  64. }
  65. cgValue *found = nullptr;
  66. rw_mutex_shared_lock(&m->values_mutex);
  67. found = map_get(&m->values, e);
  68. rw_mutex_shared_unlock(&m->values_mutex);
  69. if (found) {
  70. return *found;
  71. }
  72. GB_PANIC("\n\tError in: %s, missing value '%.*s'\n", token_pos_to_string(e->token.pos), LIT(e->token.string));
  73. return {};
  74. }
  75. gb_internal cgValue cg_get_using_variable(cgProcedure *p, Entity *e) {
  76. GB_ASSERT(e->kind == Entity_Variable && e->flags & EntityFlag_Using);
  77. String name = e->token.string;
  78. Entity *parent = e->using_parent;
  79. Selection sel = lookup_field(parent->type, name, false);
  80. GB_ASSERT(sel.entity != nullptr);
  81. cgValue *pv = map_get(&p->module->values, parent);
  82. cgValue v = {};
  83. if (pv == nullptr && parent->flags & EntityFlag_SoaPtrField) {
  84. // NOTE(bill): using SOA value (probably from for-in statement)
  85. GB_PANIC("TODO(bill): cg_get_soa_variable_addr");
  86. // cgAddr parent_addr = cg_get_soa_variable_addr(p, parent);
  87. // v = cg_addr_get_ptr(p, parent_addr);
  88. } else if (pv != nullptr) {
  89. v = *pv;
  90. } else {
  91. GB_ASSERT_MSG(e->using_expr != nullptr, "%.*s %.*s", LIT(e->token.string), LIT(name));
  92. v = cg_build_addr_ptr(p, e->using_expr);
  93. }
  94. GB_ASSERT(v.node != nullptr);
  95. GB_ASSERT_MSG(parent->type == type_deref(v.type), "%s %s", type_to_string(parent->type), type_to_string(v.type));
  96. cgValue ptr = cg_emit_deep_field_gep(p, v, sel);
  97. // if (parent->scope) {
  98. // if ((parent->scope->flags & (ScopeFlag_File|ScopeFlag_Pkg)) == 0) {
  99. // cg_add_debug_local_variable(p, ptr.value, e->type, e->token);
  100. // }
  101. // } else {
  102. // cg_add_debug_local_variable(p, ptr.value, e->type, e->token);
  103. // }
  104. return ptr;
  105. }
  106. gb_internal cgAddr cg_build_addr_from_entity(cgProcedure *p, Entity *e, Ast *expr) {
  107. GB_ASSERT(e != nullptr);
  108. if (e->kind == Entity_Constant) {
  109. Type *t = default_type(type_of_expr(expr));
  110. cgValue v = cg_const_value(p, t, e->Constant.value);
  111. GB_PANIC("TODO(bill): cg_add_global_generated");
  112. // return cg_add_global_generated(p->module, t, v);
  113. return {};
  114. }
  115. cgAddr *local_found = map_get(&p->variable_map, e);
  116. if (local_found) {
  117. return *local_found;
  118. }
  119. cgValue v = {};
  120. cgModule *m = p->module;
  121. rw_mutex_lock(&m->values_mutex);
  122. cgValue *found = map_get(&m->values, e);
  123. rw_mutex_unlock(&m->values_mutex);
  124. if (found) {
  125. v = *found;
  126. } else if (e->kind == Entity_Variable && e->flags & EntityFlag_Using) {
  127. // NOTE(bill): Calculate the using variable every time
  128. v = cg_get_using_variable(p, e);
  129. } else if (e->flags & EntityFlag_SoaPtrField) {
  130. return map_must_get(&p->soa_values_map, e);
  131. }
  132. if (v.node == nullptr) {
  133. cgValue v = cg_find_value_from_entity(m, e);
  134. v = cg_flatten_value(p, v);
  135. return cg_addr(v);
  136. }
  137. return cg_addr(v);
  138. }
  139. gb_internal cgValue cg_emit_union_tag_ptr(cgProcedure *p, cgValue const &parent_ptr) {
  140. Type *t = parent_ptr.type;
  141. Type *ut = base_type(type_deref(t));
  142. GB_ASSERT_MSG(is_type_pointer(t), "%s", type_to_string(t));
  143. GB_ASSERT_MSG(ut->kind == Type_Union, "%s", type_to_string(t));
  144. GB_ASSERT(!is_type_union_maybe_pointer_original_alignment(ut));
  145. GB_ASSERT(!is_type_union_maybe_pointer(ut));
  146. GB_ASSERT(type_size_of(ut) > 0);
  147. Type *tag_type = union_tag_type(ut);
  148. i64 tag_offset = ut->Union.variant_block_size;
  149. GB_ASSERT(parent_ptr.kind == cgValue_Value);
  150. TB_Node *ptr = parent_ptr.node;
  151. TB_Node *tag_ptr = tb_inst_member_access(p->func, ptr, tag_offset);
  152. return cg_value(tag_ptr, alloc_type_pointer(tag_type));
  153. }
  154. gb_internal cgValue cg_correct_endianness(cgProcedure *p, cgValue value) {
  155. Type *src = core_type(value.type);
  156. GB_ASSERT(is_type_integer(src) || is_type_float(src));
  157. if (is_type_different_to_arch_endianness(src)) {
  158. GB_PANIC("TODO(bill): cg_correct_endianness");
  159. // Type *platform_src_type = integer_endian_type_to_platform_type(src);
  160. // value = cg_emit_byte_swap(p, value, platform_src_type);
  161. }
  162. return value;
  163. }
  164. gb_internal cgValue cg_emit_transmute(cgProcedure *p, cgValue value, Type *type) {
  165. GB_ASSERT(type_size_of(value.type) == type_size_of(type));
  166. value = cg_flatten_value(p, value);
  167. if (are_types_identical(value.type, type)) {
  168. return value;
  169. }
  170. if (are_types_identical(core_type(value.type), core_type(type))) {
  171. value.type = type;
  172. return value;
  173. }
  174. i64 src_align = type_align_of(value.type);
  175. i64 dst_align = type_align_of(type);
  176. if (dst_align > src_align) {
  177. cgAddr local = cg_add_local(p, type, nullptr, false);
  178. cgValue dst = local.addr;
  179. dst.type = alloc_type_pointer(value.type);
  180. cg_emit_store(p, dst, value);
  181. return cg_addr_load(p, local);
  182. }
  183. TB_DataType dt = cg_data_type(type);
  184. switch (value.kind) {
  185. case cgValue_Value:
  186. GB_ASSERT_MSG(!TB_IS_VOID_TYPE(dt), "%d %s -> %s", dt.type, type_to_string(value.type), type_to_string(type));
  187. value.type = type;
  188. if (value.node->dt.raw != dt.raw) {
  189. switch (value.node->dt.type) {
  190. case TB_INT:
  191. switch (value.node->dt.type) {
  192. case TB_INT:
  193. break;
  194. case TB_FLOAT:
  195. value.node = tb_inst_bitcast(p->func, value.node, dt);
  196. break;
  197. case TB_PTR:
  198. value.node = tb_inst_int2ptr(p->func, value.node);
  199. break;
  200. }
  201. break;
  202. case TB_FLOAT:
  203. switch (value.node->dt.type) {
  204. case TB_INT:
  205. value.node = tb_inst_bitcast(p->func, value.node, dt);
  206. break;
  207. case TB_FLOAT:
  208. break;
  209. case TB_PTR:
  210. value.node = tb_inst_bitcast(p->func, value.node, TB_TYPE_INTPTR);
  211. value.node = tb_inst_int2ptr(p->func, value.node);
  212. break;
  213. }
  214. break;
  215. case TB_PTR:
  216. switch (value.node->dt.type) {
  217. case TB_INT:
  218. value.node = tb_inst_ptr2int(p->func, value.node, dt);
  219. break;
  220. case TB_FLOAT:
  221. value.node = tb_inst_ptr2int(p->func, value.node, TB_TYPE_INTPTR);
  222. value.node = tb_inst_bitcast(p->func, value.node, dt);
  223. break;
  224. case TB_PTR:
  225. break;
  226. }
  227. break;
  228. }
  229. }
  230. return value;
  231. case cgValue_Addr:
  232. value.type = type;
  233. return value;
  234. case cgValue_Symbol:
  235. GB_PANIC("should be handled above");
  236. break;
  237. case cgValue_Multi:
  238. GB_PANIC("cannot transmute multiple values at once");
  239. break;
  240. }
  241. return value;
  242. }
  243. gb_internal cgValue cg_emit_byte_swap(cgProcedure *p, cgValue value, Type *end_type) {
  244. GB_ASSERT(type_size_of(value.type) == type_size_of(end_type));
  245. if (type_size_of(value.type) < 2) {
  246. return value;
  247. }
  248. if (is_type_float(value.type)) {
  249. i64 sz = type_size_of(value.type);
  250. Type *integer_type = nullptr;
  251. switch (sz) {
  252. case 2: integer_type = t_u16; break;
  253. case 4: integer_type = t_u32; break;
  254. case 8: integer_type = t_u64; break;
  255. }
  256. GB_ASSERT(integer_type != nullptr);
  257. value = cg_emit_transmute(p, value, integer_type);
  258. }
  259. GB_ASSERT(value.kind == cgValue_Value);
  260. // TODO(bill): bswap
  261. // value.node = tb_inst_bswap(p->func, value.node);
  262. return cg_emit_transmute(p, value, end_type);
  263. }
  264. gb_internal cgValue cg_emit_comp_records(cgProcedure *p, TokenKind op_kind, cgValue left, cgValue right, Type *type) {
  265. GB_ASSERT((is_type_struct(type) || is_type_union(type)) && is_type_comparable(type));
  266. cgValue left_ptr = cg_address_from_load_or_generate_local(p, left);
  267. cgValue right_ptr = cg_address_from_load_or_generate_local(p, right);
  268. cgValue res = {};
  269. if (type_size_of(type) == 0) {
  270. switch (op_kind) {
  271. case Token_CmpEq:
  272. return cg_const_bool(p, t_bool, true);
  273. case Token_NotEq:
  274. return cg_const_bool(p, t_bool, false);
  275. }
  276. GB_PANIC("invalid operator");
  277. }
  278. TEMPORARY_ALLOCATOR_GUARD();
  279. if (is_type_simple_compare(type)) {
  280. // TODO(bill): Test to see if this is actually faster!!!!
  281. auto args = slice_make<cgValue>(temporary_allocator(), 3);
  282. args[0] = cg_emit_conv(p, left_ptr, t_rawptr);
  283. args[1] = cg_emit_conv(p, right_ptr, t_rawptr);
  284. args[2] = cg_const_int(p, t_int, type_size_of(type));
  285. res = cg_emit_runtime_call(p, "memory_equal", args);
  286. } else {
  287. cgProcedure *equal_proc = cg_equal_proc_for_type(p->module, type);
  288. cgValue value = cg_value(tb_inst_get_symbol_address(p->func, equal_proc->symbol), equal_proc->type);
  289. auto args = slice_make<cgValue>(temporary_allocator(), 2);
  290. args[0] = cg_emit_conv(p, left_ptr, t_rawptr);
  291. args[1] = cg_emit_conv(p, right_ptr, t_rawptr);
  292. res = cg_emit_call(p, value, args);
  293. }
  294. if (op_kind == Token_NotEq) {
  295. res = cg_emit_unary_arith(p, Token_Not, res, res.type);
  296. }
  297. return res;
  298. }
  299. gb_internal cgValue cg_emit_comp(cgProcedure *p, TokenKind op_kind, cgValue left, cgValue right) {
  300. GB_ASSERT(gb_is_between(op_kind, Token__ComparisonBegin+1, Token__ComparisonEnd-1));
  301. Type *a = core_type(left.type);
  302. Type *b = core_type(right.type);
  303. cgValue nil_check = {};
  304. if (is_type_array_like(left.type) || is_type_array_like(right.type)) {
  305. // don't do `nil` check if it is array-like
  306. } else if (is_type_untyped_nil(left.type)) {
  307. nil_check = cg_emit_comp_against_nil(p, op_kind, right);
  308. } else if (is_type_untyped_nil(right.type)) {
  309. nil_check = cg_emit_comp_against_nil(p, op_kind, left);
  310. }
  311. if (nil_check.node != nullptr) {
  312. return nil_check;
  313. }
  314. if (are_types_identical(a, b)) {
  315. // NOTE(bill): No need for a conversion
  316. } /*else if (cg_is_const(left) || cg_is_const_nil(left)) {
  317. left = cg_emit_conv(p, left, right.type);
  318. } else if (cg_is_const(right) || cg_is_const_nil(right)) {
  319. right = cg_emit_conv(p, right, left.type);
  320. }*/ else {
  321. Type *lt = left.type;
  322. Type *rt = right.type;
  323. lt = left.type;
  324. rt = right.type;
  325. i64 ls = type_size_of(lt);
  326. i64 rs = type_size_of(rt);
  327. // NOTE(bill): Quick heuristic, larger types are usually the target type
  328. if (ls < rs) {
  329. left = cg_emit_conv(p, left, rt);
  330. } else if (ls > rs) {
  331. right = cg_emit_conv(p, right, lt);
  332. } else {
  333. if (is_type_union(rt)) {
  334. left = cg_emit_conv(p, left, rt);
  335. } else {
  336. right = cg_emit_conv(p, right, lt);
  337. }
  338. }
  339. }
  340. a = core_type(left.type);
  341. b = core_type(right.type);
  342. left = cg_flatten_value(p, left);
  343. right = cg_flatten_value(p, right);
  344. if (is_type_matrix(a) && (op_kind == Token_CmpEq || op_kind == Token_NotEq)) {
  345. GB_PANIC("TODO(bill): cg_emit_comp matrix");
  346. // Type *tl = base_type(a);
  347. // lbValue lhs = lb_address_from_load_or_generate_local(p, left);
  348. // lbValue rhs = lb_address_from_load_or_generate_local(p, right);
  349. // // TODO(bill): Test to see if this is actually faster!!!!
  350. // auto args = array_make<lbValue>(permanent_allocator(), 3);
  351. // args[0] = lb_emit_conv(p, lhs, t_rawptr);
  352. // args[1] = lb_emit_conv(p, rhs, t_rawptr);
  353. // args[2] = lb_const_int(p->module, t_int, type_size_of(tl));
  354. // lbValue val = lb_emit_runtime_call(p, "memory_compare", args);
  355. // lbValue res = lb_emit_comp(p, op_kind, val, lb_const_nil(p->module, val.type));
  356. // return lb_emit_conv(p, res, t_bool);
  357. }
  358. if (is_type_array_like(a)) {
  359. GB_PANIC("TODO(bill): cg_emit_comp is_type_array_like");
  360. // Type *tl = base_type(a);
  361. // lbValue lhs = lb_address_from_load_or_generate_local(p, left);
  362. // lbValue rhs = lb_address_from_load_or_generate_local(p, right);
  363. // TokenKind cmp_op = Token_And;
  364. // lbValue res = lb_const_bool(p->module, t_bool, true);
  365. // if (op_kind == Token_NotEq) {
  366. // res = lb_const_bool(p->module, t_bool, false);
  367. // cmp_op = Token_Or;
  368. // } else if (op_kind == Token_CmpEq) {
  369. // res = lb_const_bool(p->module, t_bool, true);
  370. // cmp_op = Token_And;
  371. // }
  372. // bool inline_array_arith = lb_can_try_to_inline_array_arith(tl);
  373. // i32 count = 0;
  374. // switch (tl->kind) {
  375. // case Type_Array: count = cast(i32)tl->Array.count; break;
  376. // case Type_EnumeratedArray: count = cast(i32)tl->EnumeratedArray.count; break;
  377. // }
  378. // if (inline_array_arith) {
  379. // // inline
  380. // lbAddr val = lb_add_local_generated(p, t_bool, false);
  381. // lb_addr_store(p, val, res);
  382. // for (i32 i = 0; i < count; i++) {
  383. // lbValue x = lb_emit_load(p, lb_emit_array_epi(p, lhs, i));
  384. // lbValue y = lb_emit_load(p, lb_emit_array_epi(p, rhs, i));
  385. // lbValue cmp = lb_emit_comp(p, op_kind, x, y);
  386. // lbValue new_res = lb_emit_arith(p, cmp_op, lb_addr_load(p, val), cmp, t_bool);
  387. // lb_addr_store(p, val, lb_emit_conv(p, new_res, t_bool));
  388. // }
  389. // return lb_addr_load(p, val);
  390. // } else {
  391. // if (is_type_simple_compare(tl) && (op_kind == Token_CmpEq || op_kind == Token_NotEq)) {
  392. // // TODO(bill): Test to see if this is actually faster!!!!
  393. // auto args = array_make<lbValue>(permanent_allocator(), 3);
  394. // args[0] = lb_emit_conv(p, lhs, t_rawptr);
  395. // args[1] = lb_emit_conv(p, rhs, t_rawptr);
  396. // args[2] = lb_const_int(p->module, t_int, type_size_of(tl));
  397. // lbValue val = lb_emit_runtime_call(p, "memory_compare", args);
  398. // lbValue res = lb_emit_comp(p, op_kind, val, lb_const_nil(p->module, val.type));
  399. // return lb_emit_conv(p, res, t_bool);
  400. // } else {
  401. // lbAddr val = lb_add_local_generated(p, t_bool, false);
  402. // lb_addr_store(p, val, res);
  403. // auto loop_data = lb_loop_start(p, count, t_i32);
  404. // {
  405. // lbValue i = loop_data.idx;
  406. // lbValue x = lb_emit_load(p, lb_emit_array_ep(p, lhs, i));
  407. // lbValue y = lb_emit_load(p, lb_emit_array_ep(p, rhs, i));
  408. // lbValue cmp = lb_emit_comp(p, op_kind, x, y);
  409. // lbValue new_res = lb_emit_arith(p, cmp_op, lb_addr_load(p, val), cmp, t_bool);
  410. // lb_addr_store(p, val, lb_emit_conv(p, new_res, t_bool));
  411. // }
  412. // lb_loop_end(p, loop_data);
  413. // return lb_addr_load(p, val);
  414. // }
  415. // }
  416. }
  417. if ((is_type_struct(a) || is_type_union(a)) && is_type_comparable(a)) {
  418. return cg_emit_comp_records(p, op_kind, left, right, a);
  419. }
  420. if ((is_type_struct(b) || is_type_union(b)) && is_type_comparable(b)) {
  421. return cg_emit_comp_records(p, op_kind, left, right, b);
  422. }
  423. if (is_type_string(a)) {
  424. if (is_type_cstring(a)) {
  425. left = cg_emit_conv(p, left, t_string);
  426. right = cg_emit_conv(p, right, t_string);
  427. }
  428. char const *runtime_procedure = nullptr;
  429. switch (op_kind) {
  430. case Token_CmpEq: runtime_procedure = "string_eq"; break;
  431. case Token_NotEq: runtime_procedure = "string_ne"; break;
  432. case Token_Lt: runtime_procedure = "string_lt"; break;
  433. case Token_Gt: runtime_procedure = "string_gt"; break;
  434. case Token_LtEq: runtime_procedure = "string_le"; break;
  435. case Token_GtEq: runtime_procedure = "string_gt"; break;
  436. }
  437. GB_ASSERT(runtime_procedure != nullptr);
  438. auto args = slice_make<cgValue>(permanent_allocator(), 2);
  439. args[0] = left;
  440. args[1] = right;
  441. return cg_emit_runtime_call(p, runtime_procedure, args);
  442. }
  443. if (is_type_complex(a)) {
  444. char const *runtime_procedure = "";
  445. i64 sz = 8*type_size_of(a);
  446. switch (sz) {
  447. case 32:
  448. switch (op_kind) {
  449. case Token_CmpEq: runtime_procedure = "complex32_eq"; break;
  450. case Token_NotEq: runtime_procedure = "complex32_ne"; break;
  451. }
  452. break;
  453. case 64:
  454. switch (op_kind) {
  455. case Token_CmpEq: runtime_procedure = "complex64_eq"; break;
  456. case Token_NotEq: runtime_procedure = "complex64_ne"; break;
  457. }
  458. break;
  459. case 128:
  460. switch (op_kind) {
  461. case Token_CmpEq: runtime_procedure = "complex128_eq"; break;
  462. case Token_NotEq: runtime_procedure = "complex128_ne"; break;
  463. }
  464. break;
  465. }
  466. GB_ASSERT(runtime_procedure != nullptr);
  467. GB_PANIC("TODO(bill): cg_emit_runtime_call");
  468. // auto args = array_make<lbValue>(permanent_allocator(), 2);
  469. // args[0] = left;
  470. // args[1] = right;
  471. // return lb_emit_runtime_call(p, runtime_procedure, args);
  472. }
  473. if (is_type_quaternion(a)) {
  474. char const *runtime_procedure = "";
  475. i64 sz = 8*type_size_of(a);
  476. switch (sz) {
  477. case 64:
  478. switch (op_kind) {
  479. case Token_CmpEq: runtime_procedure = "quaternion64_eq"; break;
  480. case Token_NotEq: runtime_procedure = "quaternion64_ne"; break;
  481. }
  482. break;
  483. case 128:
  484. switch (op_kind) {
  485. case Token_CmpEq: runtime_procedure = "quaternion128_eq"; break;
  486. case Token_NotEq: runtime_procedure = "quaternion128_ne"; break;
  487. }
  488. break;
  489. case 256:
  490. switch (op_kind) {
  491. case Token_CmpEq: runtime_procedure = "quaternion256_eq"; break;
  492. case Token_NotEq: runtime_procedure = "quaternion256_ne"; break;
  493. }
  494. break;
  495. }
  496. GB_ASSERT(runtime_procedure != nullptr);
  497. GB_PANIC("TODO(bill): cg_emit_runtime_call");
  498. // auto args = array_make<lbValue>(permanent_allocator(), 2);
  499. // args[0] = left;
  500. // args[1] = right;
  501. // return lb_emit_runtime_call(p, runtime_procedure, args);
  502. }
  503. if (is_type_bit_set(a)) {
  504. switch (op_kind) {
  505. case Token_Lt:
  506. case Token_LtEq:
  507. case Token_Gt:
  508. case Token_GtEq:
  509. {
  510. Type *it = bit_set_to_int(a);
  511. cgValue lhs = cg_emit_transmute(p, left, it);
  512. cgValue rhs = cg_emit_transmute(p, right, it);
  513. cgValue res = cg_emit_arith(p, Token_And, lhs, rhs, it);
  514. GB_ASSERT(lhs.kind == cgValue_Value);
  515. GB_ASSERT(rhs.kind == cgValue_Value);
  516. GB_ASSERT(res.kind == cgValue_Value);
  517. if (op_kind == Token_Lt || op_kind == Token_LtEq) {
  518. // (lhs & rhs) == lhs
  519. res = cg_value(tb_inst_cmp_eq(p->func, res.node, lhs.node), t_bool);
  520. } else if (op_kind == Token_Gt || op_kind == Token_GtEq) {
  521. // (lhs & rhs) == rhs
  522. res = cg_value(tb_inst_cmp_eq(p->func, res.node, rhs.node), t_bool);
  523. }
  524. // NOTE(bill): Strict subsets
  525. if (op_kind == Token_Lt || op_kind == Token_Gt) {
  526. // res &~ (lhs == rhs)
  527. cgValue eq = cg_value(tb_inst_cmp_eq(p->func, lhs.node, rhs.node), t_bool);
  528. res = cg_emit_arith(p, Token_AndNot, res, eq, t_bool);
  529. }
  530. return res;
  531. }
  532. case Token_CmpEq:
  533. GB_ASSERT(left.kind == cgValue_Value);
  534. GB_ASSERT(right.kind == cgValue_Value);
  535. return cg_value(tb_inst_cmp_eq(p->func, left.node, right.node), t_bool);
  536. case Token_NotEq:
  537. GB_ASSERT(left.kind == cgValue_Value);
  538. GB_ASSERT(right.kind == cgValue_Value);
  539. return cg_value(tb_inst_cmp_ne(p->func, left.node, right.node), t_bool);
  540. }
  541. }
  542. if (op_kind != Token_CmpEq && op_kind != Token_NotEq) {
  543. Type *t = left.type;
  544. if (is_type_integer(t) && is_type_different_to_arch_endianness(t)) {
  545. Type *platform_type = integer_endian_type_to_platform_type(t);
  546. cgValue x = cg_emit_byte_swap(p, left, platform_type);
  547. cgValue y = cg_emit_byte_swap(p, right, platform_type);
  548. left = x;
  549. right = y;
  550. } else if (is_type_float(t) && is_type_different_to_arch_endianness(t)) {
  551. Type *platform_type = integer_endian_type_to_platform_type(t);
  552. cgValue x = cg_emit_conv(p, left, platform_type);
  553. cgValue y = cg_emit_conv(p, right, platform_type);
  554. left = x;
  555. right = y;
  556. }
  557. }
  558. a = core_type(left.type);
  559. b = core_type(right.type);
  560. if (is_type_integer(a) ||
  561. is_type_boolean(a) ||
  562. is_type_pointer(a) ||
  563. is_type_multi_pointer(a) ||
  564. is_type_proc(a) ||
  565. is_type_enum(a) ||
  566. is_type_typeid(a)) {
  567. TB_Node *lhs = left.node;
  568. TB_Node *rhs = right.node;
  569. TB_Node *res = nullptr;
  570. bool is_signed = is_type_integer(left.type) && !is_type_unsigned(left.type);
  571. switch (op_kind) {
  572. case Token_CmpEq: res = tb_inst_cmp_eq(p->func, lhs, rhs); break;
  573. case Token_NotEq: res = tb_inst_cmp_ne(p->func, lhs, rhs); break;
  574. case Token_Gt: res = tb_inst_cmp_igt(p->func, lhs, rhs, is_signed); break;
  575. case Token_GtEq: res = tb_inst_cmp_ige(p->func, lhs, rhs, is_signed); break;
  576. case Token_Lt: res = tb_inst_cmp_ilt(p->func, lhs, rhs, is_signed); break;
  577. case Token_LtEq: res = tb_inst_cmp_ile(p->func, lhs, rhs, is_signed); break;
  578. }
  579. GB_ASSERT(res != nullptr);
  580. return cg_value(res, t_bool);
  581. } else if (is_type_float(a)) {
  582. TB_Node *lhs = left.node;
  583. TB_Node *rhs = right.node;
  584. TB_Node *res = nullptr;
  585. switch (op_kind) {
  586. case Token_CmpEq: res = tb_inst_cmp_eq(p->func, lhs, rhs); break;
  587. case Token_NotEq: res = tb_inst_cmp_ne(p->func, lhs, rhs); break;
  588. case Token_Gt: res = tb_inst_cmp_fgt(p->func, lhs, rhs); break;
  589. case Token_GtEq: res = tb_inst_cmp_fge(p->func, lhs, rhs); break;
  590. case Token_Lt: res = tb_inst_cmp_flt(p->func, lhs, rhs); break;
  591. case Token_LtEq: res = tb_inst_cmp_fle(p->func, lhs, rhs); break;
  592. }
  593. GB_ASSERT(res != nullptr);
  594. return cg_value(res, t_bool);
  595. } else if (is_type_simd_vector(a)) {
  596. GB_PANIC("TODO(bill): #simd vector");
  597. // LLVMValueRef mask = nullptr;
  598. // Type *elem = base_array_type(a);
  599. // if (is_type_float(elem)) {
  600. // LLVMRealPredicate pred = {};
  601. // switch (op_kind) {
  602. // case Token_CmpEq: pred = LLVMRealOEQ; break;
  603. // case Token_NotEq: pred = LLVMRealONE; break;
  604. // }
  605. // mask = LLVMBuildFCmp(p->builder, pred, left.value, right.value, "");
  606. // } else {
  607. // LLVMIntPredicate pred = {};
  608. // switch (op_kind) {
  609. // case Token_CmpEq: pred = LLVMIntEQ; break;
  610. // case Token_NotEq: pred = LLVMIntNE; break;
  611. // }
  612. // mask = LLVMBuildICmp(p->builder, pred, left.value, right.value, "");
  613. // }
  614. // GB_ASSERT_MSG(mask != nullptr, "Unhandled comparison kind %s (%s) %.*s %s (%s)", type_to_string(left.type), type_to_string(base_type(left.type)), LIT(token_strings[op_kind]), type_to_string(right.type), type_to_string(base_type(right.type)));
  615. // /* NOTE(bill, 2022-05-28):
  616. // Thanks to Per Vognsen, sign extending <N x i1> to
  617. // a vector of the same width as the input vector, bit casting to an integer,
  618. // and then comparing against zero is the better option
  619. // See: https://lists.llvm.org/pipermail/llvm-dev/2012-September/053046.html
  620. // // Example assuming 128-bit vector
  621. // %1 = <4 x float> ...
  622. // %2 = <4 x float> ...
  623. // %3 = fcmp oeq <4 x float> %1, %2
  624. // %4 = sext <4 x i1> %3 to <4 x i32>
  625. // %5 = bitcast <4 x i32> %4 to i128
  626. // %6 = icmp ne i128 %5, 0
  627. // br i1 %6, label %true1, label %false2
  628. // This will result in 1 cmpps + 1 ptest + 1 br
  629. // (even without SSE4.1, contrary to what the mail list states, because of pmovmskb)
  630. // */
  631. // unsigned count = cast(unsigned)get_array_type_count(a);
  632. // unsigned elem_sz = cast(unsigned)(type_size_of(elem)*8);
  633. // LLVMTypeRef mask_type = LLVMVectorType(LLVMIntTypeInContext(p->module->ctx, elem_sz), count);
  634. // mask = LLVMBuildSExtOrBitCast(p->builder, mask, mask_type, "");
  635. // LLVMTypeRef mask_int_type = LLVMIntTypeInContext(p->module->ctx, cast(unsigned)(8*type_size_of(a)));
  636. // LLVMValueRef mask_int = LLVMBuildBitCast(p->builder, mask, mask_int_type, "");
  637. // res.value = LLVMBuildICmp(p->builder, LLVMIntNE, mask_int, LLVMConstNull(LLVMTypeOf(mask_int)), "");
  638. // return res;
  639. }
  640. GB_PANIC("Unhandled comparison kind %s (%s) %.*s %s (%s)", type_to_string(left.type), type_to_string(base_type(left.type)), LIT(token_strings[op_kind]), type_to_string(right.type), type_to_string(base_type(right.type)));
  641. return {};
  642. }
  643. gb_internal cgValue cg_emit_comp_against_nil(cgProcedure *p, TokenKind op_kind, cgValue x) {
  644. GB_ASSERT(op_kind == Token_CmpEq || op_kind == Token_NotEq);
  645. x = cg_flatten_value(p, x);
  646. cgValue res = {};
  647. Type *t = x.type;
  648. TB_DataType dt = cg_data_type(t);
  649. Type *bt = base_type(t);
  650. TypeKind type_kind = bt->kind;
  651. switch (type_kind) {
  652. case Type_Basic:
  653. switch (bt->Basic.kind) {
  654. case Basic_rawptr:
  655. case Basic_cstring:
  656. GB_ASSERT(x.kind == cgValue_Value);
  657. if (op_kind == Token_CmpEq) {
  658. return cg_value(tb_inst_cmp_eq(p->func, x.node, tb_inst_uint(p->func, dt, 0)), t_bool);
  659. } else if (op_kind == Token_NotEq) {
  660. return cg_value(tb_inst_cmp_ne(p->func, x.node, tb_inst_uint(p->func, dt, 0)), t_bool);
  661. }
  662. break;
  663. case Basic_any:
  664. {
  665. GB_ASSERT(x.kind == cgValue_Addr);
  666. // // TODO(bill): is this correct behaviour for nil comparison for any?
  667. cgValue data = cg_emit_struct_ev(p, x, 0);
  668. cgValue id = cg_emit_struct_ev(p, x, 1);
  669. if (op_kind == Token_CmpEq) {
  670. TB_Node *a = tb_inst_cmp_eq(p->func, data.node, tb_inst_uint(p->func, data.node->dt, 0));
  671. TB_Node *b = tb_inst_cmp_eq(p->func, id.node, tb_inst_uint(p->func, id.node->dt, 0));
  672. TB_Node *c = tb_inst_or(p->func, a, b);
  673. return cg_value(c, t_bool);
  674. } else if (op_kind == Token_NotEq) {
  675. TB_Node *a = tb_inst_cmp_ne(p->func, data.node, tb_inst_uint(p->func, data.node->dt, 0));
  676. TB_Node *b = tb_inst_cmp_ne(p->func, id.node, tb_inst_uint(p->func, id.node->dt, 0));
  677. TB_Node *c = tb_inst_and(p->func, a, b);
  678. return cg_value(c, t_bool);
  679. }
  680. }
  681. break;
  682. case Basic_typeid:
  683. cgValue invalid_typeid = cg_const_value(p, t_typeid, exact_value_i64(0));
  684. return cg_emit_comp(p, op_kind, x, invalid_typeid);
  685. }
  686. break;
  687. case Type_Enum:
  688. case Type_Pointer:
  689. case Type_MultiPointer:
  690. case Type_Proc:
  691. case Type_BitSet:
  692. GB_ASSERT(x.kind == cgValue_Value);
  693. if (op_kind == Token_CmpEq) {
  694. return cg_value(tb_inst_cmp_eq(p->func, x.node, tb_inst_uint(p->func, dt, 0)), t_bool);
  695. } else if (op_kind == Token_NotEq) {
  696. return cg_value(tb_inst_cmp_ne(p->func, x.node, tb_inst_uint(p->func, dt, 0)), t_bool);
  697. }
  698. break;
  699. case Type_Slice:
  700. case Type_DynamicArray:
  701. case Type_Map:
  702. {
  703. // NOTE(bill): all of their data "pointer-like" fields are at the 0-index
  704. cgValue data = cg_emit_struct_ev(p, x, 0);
  705. if (op_kind == Token_CmpEq) {
  706. TB_Node *a = tb_inst_cmp_eq(p->func, data.node, tb_inst_uint(p->func, data.node->dt, 0));
  707. return cg_value(a, t_bool);
  708. } else if (op_kind == Token_NotEq) {
  709. TB_Node *a = tb_inst_cmp_ne(p->func, data.node, tb_inst_uint(p->func, data.node->dt, 0));
  710. return cg_value(a, t_bool);
  711. }
  712. }
  713. break;
  714. case Type_Union:
  715. {
  716. if (type_size_of(t) == 0) {
  717. return cg_const_bool(p, t_bool, op_kind == Token_CmpEq);
  718. } else if (is_type_union_maybe_pointer(t)) {
  719. cgValue tag = cg_emit_transmute(p, x, t_rawptr);
  720. return cg_emit_comp_against_nil(p, op_kind, tag);
  721. } else {
  722. GB_ASSERT("TODO(bill): cg_emit_union_tag_value");
  723. // cgValue tag = cg_emit_union_tag_value(p, x);
  724. // return cg_emit_comp(p, op_kind, tag, cg_zero(p->module, tag.type));
  725. }
  726. }
  727. break;
  728. case Type_Struct:
  729. GB_PANIC("TODO(bill): cg_emit_struct_ev");
  730. // if (is_type_soa_struct(t)) {
  731. // Type *bt = base_type(t);
  732. // if (bt->Struct.soa_kind == StructSoa_Slice) {
  733. // LLVMValueRef the_value = {};
  734. // if (bt->Struct.fields.count == 0) {
  735. // cgValue len = cg_soa_struct_len(p, x);
  736. // the_value = len.value;
  737. // } else {
  738. // cgValue first_field = cg_emit_struct_ev(p, x, 0);
  739. // the_value = first_field.value;
  740. // }
  741. // if (op_kind == Token_CmpEq) {
  742. // res.value = LLVMBuildIsNull(p->builder, the_value, "");
  743. // return res;
  744. // } else if (op_kind == Token_NotEq) {
  745. // res.value = LLVMBuildIsNotNull(p->builder, the_value, "");
  746. // return res;
  747. // }
  748. // } else if (bt->Struct.soa_kind == StructSoa_Dynamic) {
  749. // LLVMValueRef the_value = {};
  750. // if (bt->Struct.fields.count == 0) {
  751. // cgValue cap = cg_soa_struct_cap(p, x);
  752. // the_value = cap.value;
  753. // } else {
  754. // cgValue first_field = cg_emit_struct_ev(p, x, 0);
  755. // the_value = first_field.value;
  756. // }
  757. // if (op_kind == Token_CmpEq) {
  758. // res.value = LLVMBuildIsNull(p->builder, the_value, "");
  759. // return res;
  760. // } else if (op_kind == Token_NotEq) {
  761. // res.value = LLVMBuildIsNotNull(p->builder, the_value, "");
  762. // return res;
  763. // }
  764. // }
  765. // } else if (is_type_struct(t) && type_has_nil(t)) {
  766. // auto args = array_make<cgValue>(permanent_allocator(), 2);
  767. // cgValue lhs = cg_address_from_load_or_generate_local(p, x);
  768. // args[0] = cg_emit_conv(p, lhs, t_rawptr);
  769. // args[1] = cg_const_int(p->module, t_int, type_size_of(t));
  770. // cgValue val = cg_emit_runtime_call(p, "memory_compare_zero", args);
  771. // cgValue res = cg_emit_comp(p, op_kind, val, cg_const_int(p->module, t_int, 0));
  772. // return res;
  773. // }
  774. break;
  775. }
  776. GB_PANIC("Unknown handled type: %s -> %s", type_to_string(t), type_to_string(bt));
  777. return {};
  778. }
  779. gb_internal cgValue cg_emit_conv(cgProcedure *p, cgValue value, Type *t) {
  780. t = reduce_tuple_to_single_type(t);
  781. value = cg_flatten_value(p, value);
  782. Type *src_type = value.type;
  783. if (are_types_identical(t, src_type)) {
  784. return value;
  785. }
  786. if (is_type_untyped_uninit(src_type)) {
  787. // return cg_const_undef(m, t);
  788. return cg_const_nil(p, t);
  789. }
  790. if (is_type_untyped_nil(src_type)) {
  791. return cg_const_nil(p, t);
  792. }
  793. Type *src = core_type(src_type);
  794. Type *dst = core_type(t);
  795. GB_ASSERT(src != nullptr);
  796. GB_ASSERT(dst != nullptr);
  797. if (are_types_identical(src, dst)) {
  798. return cg_emit_transmute(p, value, t);
  799. }
  800. TB_DataType st = cg_data_type(src);
  801. if (value.kind == cgValue_Value && !TB_IS_VOID_TYPE(value.node->dt)) {
  802. st = value.node->dt;
  803. }
  804. TB_DataType dt = cg_data_type(t);
  805. if (is_type_integer(src) && is_type_integer(dst)) {
  806. GB_ASSERT(src->kind == Type_Basic &&
  807. dst->kind == Type_Basic);
  808. GB_ASSERT(value.kind == cgValue_Value);
  809. i64 sz = type_size_of(default_type(src));
  810. i64 dz = type_size_of(default_type(dst));
  811. if (sz == dz) {
  812. if (dz > 1 && !types_have_same_internal_endian(src, dst)) {
  813. return cg_emit_byte_swap(p, value, t);
  814. }
  815. value.type = t;
  816. return value;
  817. }
  818. if (sz > 1 && is_type_different_to_arch_endianness(src)) {
  819. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  820. value = cg_emit_byte_swap(p, value, platform_src_type);
  821. }
  822. TB_Node* (*op)(TB_Function* f, TB_Node* src, TB_DataType dt) = tb_inst_trunc;
  823. if (dz < sz) {
  824. op = tb_inst_trunc;
  825. } else if (dz == sz) {
  826. op = tb_inst_bitcast;
  827. } else if (dz > sz) {
  828. op = is_type_unsigned(src) ? tb_inst_zxt : tb_inst_sxt; // zero extent
  829. }
  830. if (dz > 1 && is_type_different_to_arch_endianness(dst)) {
  831. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  832. cgValue res = cg_value(op(p->func, value.node, cg_data_type(platform_dst_type)), platform_dst_type);
  833. return cg_emit_byte_swap(p, res, t);
  834. } else {
  835. return cg_value(op(p->func, value.node, dt), t);
  836. }
  837. }
  838. // boolean -> boolean/integer
  839. if (is_type_boolean(src) && (is_type_boolean(dst) || is_type_integer(dst))) {
  840. TB_Node *v = tb_inst_cmp_ne(p->func, value.node, tb_inst_uint(p->func, st, 0));
  841. return cg_value(tb_inst_zxt(p->func, v, dt), t);
  842. }
  843. // integer -> boolean
  844. if (is_type_integer(src) && is_type_boolean(dst)) {
  845. TB_Node *v = tb_inst_cmp_ne(p->func, value.node, tb_inst_uint(p->func, st, 0));
  846. return cg_value(tb_inst_zxt(p->func, v, dt), t);
  847. }
  848. if (is_type_cstring(src) && is_type_u8_ptr(dst)) {
  849. return cg_emit_transmute(p, value, dst);
  850. }
  851. if (is_type_u8_ptr(src) && is_type_cstring(dst)) {
  852. return cg_emit_transmute(p, value, dst);
  853. }
  854. if (is_type_cstring(src) && is_type_u8_multi_ptr(dst)) {
  855. return cg_emit_transmute(p, value, dst);
  856. }
  857. if (is_type_u8_multi_ptr(src) && is_type_cstring(dst)) {
  858. return cg_emit_transmute(p, value, dst);
  859. }
  860. if (is_type_cstring(src) && is_type_rawptr(dst)) {
  861. return cg_emit_transmute(p, value, dst);
  862. }
  863. if (is_type_rawptr(src) && is_type_cstring(dst)) {
  864. return cg_emit_transmute(p, value, dst);
  865. }
  866. if (are_types_identical(src, t_cstring) && are_types_identical(dst, t_string)) {
  867. TEMPORARY_ALLOCATOR_GUARD();
  868. cgValue c = cg_emit_conv(p, value, t_cstring);
  869. auto args = slice_make<cgValue>(temporary_allocator(), 1);
  870. args[0] = c;
  871. cgValue s = cg_emit_runtime_call(p, "cstring_to_string", args);
  872. return cg_emit_conv(p, s, dst);
  873. }
  874. // float -> float
  875. if (is_type_float(src) && is_type_float(dst)) {
  876. i64 sz = type_size_of(src);
  877. i64 dz = type_size_of(dst);
  878. if (sz == 2 || dz == 2) {
  879. GB_PANIC("TODO(bill): f16 conversions");
  880. }
  881. if (dz == sz) {
  882. if (types_have_same_internal_endian(src, dst)) {
  883. return cg_value(value.node, t);
  884. } else {
  885. return cg_emit_byte_swap(p, value, t);
  886. }
  887. }
  888. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  889. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  890. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  891. cgValue res = {};
  892. res = cg_emit_conv(p, value, platform_src_type);
  893. res = cg_emit_conv(p, res, platform_dst_type);
  894. if (is_type_different_to_arch_endianness(dst)) {
  895. res = cg_emit_byte_swap(p, res, t);
  896. }
  897. return cg_emit_conv(p, res, t);
  898. }
  899. if (dz >= sz) {
  900. return cg_value(tb_inst_fpxt(p->func, value.node, dt), t);
  901. }
  902. return cg_value(tb_inst_trunc(p->func, value.node, dt), t);
  903. }
  904. if (is_type_complex(src) && is_type_complex(dst)) {
  905. GB_PANIC("TODO(bill): complex -> complex");
  906. }
  907. if (is_type_quaternion(src) && is_type_quaternion(dst)) {
  908. // @QuaternionLayout
  909. GB_PANIC("TODO(bill): quaternion -> quaternion");
  910. }
  911. if (is_type_integer(src) && is_type_complex(dst)) {
  912. GB_PANIC("TODO(bill): int -> complex");
  913. }
  914. if (is_type_float(src) && is_type_complex(dst)) {
  915. GB_PANIC("TODO(bill): float -> complex");
  916. }
  917. if (is_type_integer(src) && is_type_quaternion(dst)) {
  918. GB_PANIC("TODO(bill): int -> quaternion");
  919. }
  920. if (is_type_float(src) && is_type_quaternion(dst)) {
  921. GB_PANIC("TODO(bill): float -> quaternion");
  922. }
  923. if (is_type_complex(src) && is_type_quaternion(dst)) {
  924. GB_PANIC("TODO(bill): complex -> quaternion");
  925. }
  926. // float <-> integer
  927. if (is_type_float(src) && is_type_integer(dst)) {
  928. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  929. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  930. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  931. cgValue res = {};
  932. res = cg_emit_conv(p, value, platform_src_type);
  933. res = cg_emit_conv(p, res, platform_dst_type);
  934. return cg_emit_conv(p, res, t);
  935. }
  936. // if (is_type_integer_128bit(dst)) {
  937. // TEMPORARY_ALLOCATOR_GUARD();
  938. // auto args = array_make<lbValue>(temporary_allocator(), 1);
  939. // args[0] = value;
  940. // char const *call = "fixunsdfdi";
  941. // if (is_type_unsigned(dst)) {
  942. // call = "fixunsdfti";
  943. // }
  944. // lbValue res_i128 = lb_emit_runtime_call(p, call, args);
  945. // return lb_emit_conv(p, res_i128, t);
  946. // }
  947. bool is_signed = !is_type_unsigned(dst);
  948. return cg_value(tb_inst_float2int(p->func, value.node, dt, is_signed), t);
  949. }
  950. if (is_type_integer(src) && is_type_float(dst)) {
  951. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  952. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  953. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  954. cgValue res = {};
  955. res = cg_emit_conv(p, value, platform_src_type);
  956. res = cg_emit_conv(p, res, platform_dst_type);
  957. if (is_type_different_to_arch_endianness(dst)) {
  958. res = cg_emit_byte_swap(p, res, t);
  959. }
  960. return cg_emit_conv(p, res, t);
  961. }
  962. // if (is_type_integer_128bit(src)) {
  963. // TEMPORARY_ALLOCATOR_GUARD();
  964. // auto args = array_make<lbValue>(temporary_allocator(), 1);
  965. // args[0] = value;
  966. // char const *call = "floattidf";
  967. // if (is_type_unsigned(src)) {
  968. // call = "floattidf_unsigned";
  969. // }
  970. // lbValue res_f64 = lb_emit_runtime_call(p, call, args);
  971. // return lb_emit_conv(p, res_f64, t);
  972. // }
  973. bool is_signed = !is_type_unsigned(dst);
  974. return cg_value(tb_inst_int2float(p->func, value.node, dt, is_signed), t);
  975. }
  976. if (is_type_simd_vector(dst)) {
  977. GB_PANIC("TODO(bill): ? -> #simd vector");
  978. }
  979. // Pointer <-> uintptr
  980. if (is_type_pointer(src) && is_type_uintptr(dst)) {
  981. return cg_value(tb_inst_ptr2int(p->func, value.node, dt), t);
  982. }
  983. if (is_type_uintptr(src) && is_type_pointer(dst)) {
  984. return cg_value(tb_inst_int2ptr(p->func, value.node), t);
  985. }
  986. if (is_type_multi_pointer(src) && is_type_uintptr(dst)) {
  987. return cg_value(tb_inst_ptr2int(p->func, value.node, dt), t);
  988. }
  989. if (is_type_uintptr(src) && is_type_multi_pointer(dst)) {
  990. return cg_value(tb_inst_int2ptr(p->func, value.node), t);
  991. }
  992. if (is_type_union(dst)) {
  993. GB_PANIC("TODO(bill): ? -> union");
  994. }
  995. // NOTE(bill): This has to be done before 'Pointer <-> Pointer' as it's
  996. // subtype polymorphism casting
  997. if (check_is_assignable_to_using_subtype(src_type, t)) {
  998. GB_PANIC("TODO(bill): ? -> subtyping");
  999. }
  1000. // Pointer <-> Pointer
  1001. if (is_type_pointer(src) && is_type_pointer(dst)) {
  1002. return cg_value(value.node, t);
  1003. }
  1004. if (is_type_multi_pointer(src) && is_type_pointer(dst)) {
  1005. return cg_value(value.node, t);
  1006. }
  1007. if (is_type_pointer(src) && is_type_multi_pointer(dst)) {
  1008. return cg_value(value.node, t);
  1009. }
  1010. if (is_type_multi_pointer(src) && is_type_multi_pointer(dst)) {
  1011. return cg_value(value.node, t);
  1012. }
  1013. // proc <-> proc
  1014. if (is_type_proc(src) && is_type_proc(dst)) {
  1015. return cg_value(value.node, t);
  1016. }
  1017. // pointer -> proc
  1018. if (is_type_pointer(src) && is_type_proc(dst)) {
  1019. return cg_value(value.node, t);
  1020. }
  1021. // proc -> pointer
  1022. if (is_type_proc(src) && is_type_pointer(dst)) {
  1023. return cg_value(value.node, t);
  1024. }
  1025. // []byte/[]u8 <-> string
  1026. if (is_type_u8_slice(src) && is_type_string(dst)) {
  1027. return cg_emit_transmute(p, value, t);
  1028. }
  1029. if (is_type_string(src) && is_type_u8_slice(dst)) {
  1030. return cg_emit_transmute(p, value, t);
  1031. }
  1032. if (is_type_matrix(dst) && !is_type_matrix(src)) {
  1033. GB_PANIC("TODO(bill): !matrix -> matrix");
  1034. }
  1035. if (is_type_matrix(dst) && is_type_matrix(src)) {
  1036. GB_PANIC("TODO(bill): matrix -> matrix");
  1037. }
  1038. if (is_type_any(dst)) {
  1039. if (is_type_untyped_nil(src) ||
  1040. is_type_untyped_uninit(src)) {
  1041. return cg_const_nil(p, t);
  1042. }
  1043. cgAddr result = cg_add_local(p, t, nullptr, false);
  1044. Type *st = default_type(src_type);
  1045. cgValue data = cg_address_from_load_or_generate_local(p, value);
  1046. GB_ASSERT(is_type_pointer(data.type));
  1047. GB_ASSERT(is_type_typed(st));
  1048. data = cg_emit_conv(p, data, t_rawptr);
  1049. cgValue id = cg_typeid(p, st);
  1050. cgValue data_ptr = cg_emit_struct_ep(p, result.addr, 0);
  1051. cgValue id_ptr = cg_emit_struct_ep(p, result.addr, 1);
  1052. cg_emit_store(p, data_ptr, data);
  1053. cg_emit_store(p, id_ptr, id);
  1054. return cg_addr_load(p, result);
  1055. }
  1056. i64 src_sz = type_size_of(src);
  1057. i64 dst_sz = type_size_of(dst);
  1058. if (src_sz == dst_sz) {
  1059. // bit_set <-> integer
  1060. if (is_type_integer(src) && is_type_bit_set(dst)) {
  1061. cgValue v = cg_emit_conv(p, value, bit_set_to_int(dst));
  1062. return cg_emit_transmute(p, v, t);
  1063. }
  1064. if (is_type_bit_set(src) && is_type_integer(dst)) {
  1065. cgValue bs = cg_emit_transmute(p, value, bit_set_to_int(src));
  1066. return cg_emit_conv(p, bs, dst);
  1067. }
  1068. // typeid <-> integer
  1069. if (is_type_integer(src) && is_type_typeid(dst)) {
  1070. return cg_emit_transmute(p, value, dst);
  1071. }
  1072. if (is_type_typeid(src) && is_type_integer(dst)) {
  1073. return cg_emit_transmute(p, value, dst);
  1074. }
  1075. }
  1076. if (is_type_untyped(src)) {
  1077. if (is_type_string(src) && is_type_string(dst)) {
  1078. cgAddr result = cg_add_local(p, t, nullptr, false);
  1079. cg_addr_store(p, result, value);
  1080. return cg_addr_load(p, result);
  1081. }
  1082. }
  1083. gb_printf_err("%.*s\n", LIT(p->name));
  1084. gb_printf_err("cg_emit_conv: src -> dst\n");
  1085. gb_printf_err("Not Identical %s != %s\n", type_to_string(src_type), type_to_string(t));
  1086. gb_printf_err("Not Identical %s != %s\n", type_to_string(src), type_to_string(dst));
  1087. gb_printf_err("Not Identical %p != %p\n", src_type, t);
  1088. gb_printf_err("Not Identical %p != %p\n", src, dst);
  1089. GB_PANIC("Invalid type conversion: '%s' to '%s' for procedure '%.*s'",
  1090. type_to_string(src_type), type_to_string(t),
  1091. LIT(p->name));
  1092. return {};
  1093. }
  1094. gb_internal cgValue cg_emit_arith(cgProcedure *p, TokenKind op, cgValue lhs, cgValue rhs, Type *type) {
  1095. if (is_type_array_like(lhs.type) || is_type_array_like(rhs.type)) {
  1096. GB_PANIC("TODO(bill): cg_emit_arith_array");
  1097. } else if (is_type_matrix(lhs.type) || is_type_matrix(rhs.type)) {
  1098. GB_PANIC("TODO(bill): cg_emit_arith_matrix");
  1099. } else if (is_type_complex(type)) {
  1100. GB_PANIC("TODO(bill): cg_emit_arith complex");
  1101. } else if (is_type_quaternion(type)) {
  1102. GB_PANIC("TODO(bill): cg_emit_arith quaternion");
  1103. }
  1104. lhs = cg_flatten_value(p, cg_emit_conv(p, lhs, type));
  1105. rhs = cg_flatten_value(p, cg_emit_conv(p, rhs, type));
  1106. GB_ASSERT(lhs.kind == cgValue_Value);
  1107. GB_ASSERT(rhs.kind == cgValue_Value);
  1108. if (is_type_integer(type) && is_type_different_to_arch_endianness(type)) {
  1109. switch (op) {
  1110. case Token_AndNot:
  1111. case Token_And:
  1112. case Token_Or:
  1113. case Token_Xor:
  1114. goto handle_op;
  1115. }
  1116. Type *platform_type = integer_endian_type_to_platform_type(type);
  1117. cgValue x = cg_emit_byte_swap(p, lhs, integer_endian_type_to_platform_type(lhs.type));
  1118. cgValue y = cg_emit_byte_swap(p, rhs, integer_endian_type_to_platform_type(rhs.type));
  1119. cgValue res = cg_emit_arith(p, op, x, y, platform_type);
  1120. return cg_emit_byte_swap(p, res, type);
  1121. }
  1122. if (is_type_float(type) && is_type_different_to_arch_endianness(type)) {
  1123. Type *platform_type = integer_endian_type_to_platform_type(type);
  1124. cgValue x = cg_emit_conv(p, lhs, integer_endian_type_to_platform_type(lhs.type));
  1125. cgValue y = cg_emit_conv(p, rhs, integer_endian_type_to_platform_type(rhs.type));
  1126. cgValue res = cg_emit_arith(p, op, x, y, platform_type);
  1127. return cg_emit_byte_swap(p, res, type);
  1128. }
  1129. handle_op:;
  1130. // NOTE(bill): Bit Set Aliases for + and -
  1131. if (is_type_bit_set(type)) {
  1132. switch (op) {
  1133. case Token_Add: op = Token_Or; break;
  1134. case Token_Sub: op = Token_AndNot; break;
  1135. }
  1136. }
  1137. TB_ArithmeticBehavior arith_behavior = cast(TB_ArithmeticBehavior)0;
  1138. Type *integral_type = type;
  1139. if (is_type_simd_vector(integral_type)) {
  1140. GB_PANIC("TODO(bill): cg_emit_arith #simd vector");
  1141. // integral_type = core_array_type(integral_type);
  1142. }
  1143. switch (op) {
  1144. case Token_Add:
  1145. if (is_type_float(integral_type)) {
  1146. return cg_value(tb_inst_fadd(p->func, lhs.node, rhs.node), type);
  1147. }
  1148. return cg_value(tb_inst_add(p->func, lhs.node, rhs.node, arith_behavior), type);
  1149. case Token_Sub:
  1150. if (is_type_float(integral_type)) {
  1151. return cg_value(tb_inst_fsub(p->func, lhs.node, rhs.node), type);
  1152. }
  1153. return cg_value(tb_inst_sub(p->func, lhs.node, rhs.node, arith_behavior), type);
  1154. case Token_Mul:
  1155. if (is_type_float(integral_type)) {
  1156. return cg_value(tb_inst_fmul(p->func, lhs.node, rhs.node), type);
  1157. }
  1158. return cg_value(tb_inst_mul(p->func, lhs.node, rhs.node, arith_behavior), type);
  1159. case Token_Quo:
  1160. if (is_type_float(integral_type)) {
  1161. return cg_value(tb_inst_fdiv(p->func, lhs.node, rhs.node), type);
  1162. }
  1163. return cg_value(tb_inst_div(p->func, lhs.node, rhs.node, !is_type_unsigned(integral_type)), type);
  1164. case Token_Mod:
  1165. if (is_type_float(integral_type)) {
  1166. GB_PANIC("TODO(bill): float %% float");
  1167. }
  1168. return cg_value(tb_inst_mod(p->func, lhs.node, rhs.node, !is_type_unsigned(integral_type)), type);
  1169. case Token_ModMod:
  1170. if (is_type_unsigned(integral_type)) {
  1171. return cg_value(tb_inst_mod(p->func, lhs.node, rhs.node, false), type);
  1172. } else {
  1173. TB_Node *a = tb_inst_mod(p->func, lhs.node, rhs.node, true);
  1174. TB_Node *b = tb_inst_add(p->func, a, rhs.node, arith_behavior);
  1175. TB_Node *c = tb_inst_mod(p->func, b, rhs.node, true);
  1176. return cg_value(c, type);
  1177. }
  1178. case Token_And:
  1179. return cg_value(tb_inst_and(p->func, lhs.node, rhs.node), type);
  1180. case Token_Or:
  1181. return cg_value(tb_inst_or(p->func, lhs.node, rhs.node), type);
  1182. case Token_Xor:
  1183. return cg_value(tb_inst_xor(p->func, lhs.node, rhs.node), type);
  1184. case Token_Shl:
  1185. {
  1186. rhs = cg_emit_conv(p, rhs, lhs.type);
  1187. TB_DataType dt = cg_data_type(lhs.type);
  1188. TB_Node *lhsval = lhs.node;
  1189. TB_Node *bits = rhs.node;
  1190. TB_Node *bit_size = tb_inst_uint(p->func, dt, 8*type_size_of(lhs.type));
  1191. TB_Node *zero = tb_inst_uint(p->func, dt, 0);
  1192. TB_Node *width_test = tb_inst_cmp_ilt(p->func, bits, bit_size, false);
  1193. TB_Node *res = tb_inst_shl(p->func, lhsval, bits, arith_behavior);
  1194. res = tb_inst_select(p->func, width_test, res, zero);
  1195. return cg_value(res, type);
  1196. }
  1197. case Token_Shr:
  1198. {
  1199. rhs = cg_emit_conv(p, rhs, lhs.type);
  1200. TB_DataType dt = cg_data_type(lhs.type);
  1201. TB_Node *lhsval = lhs.node;
  1202. TB_Node *bits = rhs.node;
  1203. TB_Node *bit_size = tb_inst_uint(p->func, dt, 8*type_size_of(lhs.type));
  1204. TB_Node *zero = tb_inst_uint(p->func, dt, 0);
  1205. TB_Node *width_test = tb_inst_cmp_ilt(p->func, bits, bit_size, false);
  1206. TB_Node *res = nullptr;
  1207. if (is_type_unsigned(integral_type)) {
  1208. res = tb_inst_shr(p->func, lhsval, bits);
  1209. } else {
  1210. res = tb_inst_sar(p->func, lhsval, bits);
  1211. }
  1212. res = tb_inst_select(p->func, width_test, res, zero);
  1213. return cg_value(res, type);
  1214. }
  1215. case Token_AndNot:
  1216. return cg_value(tb_inst_and(p->func, lhs.node, tb_inst_not(p->func, rhs.node)), type);
  1217. }
  1218. GB_PANIC("unhandled operator of cg_emit_arith");
  1219. return {};
  1220. }
  1221. gb_internal void cg_fill_slice(cgProcedure *p, cgAddr const &slice, cgValue data, cgValue len) {
  1222. cgValue slice_ptr = cg_addr_get_ptr(p, slice);
  1223. cgValue data_ptr = cg_emit_struct_ep(p, slice_ptr, 0);
  1224. cgValue len_ptr = cg_emit_struct_ep(p, slice_ptr, 1);
  1225. data = cg_emit_conv(p, data, type_deref(data_ptr.type));
  1226. len = cg_emit_conv(p, len, t_int);
  1227. cg_emit_store(p, data_ptr, data);
  1228. cg_emit_store(p, len_ptr, len);
  1229. }
  1230. gb_internal cgAddr cg_build_addr_slice_expr(cgProcedure *p, Ast *expr) {
  1231. ast_node(se, SliceExpr, expr);
  1232. cgValue low = cg_const_int(p, t_int, 0);
  1233. cgValue high = {};
  1234. if (se->low != nullptr) {
  1235. low = cg_correct_endianness(p, cg_build_expr(p, se->low));
  1236. }
  1237. if (se->high != nullptr) {
  1238. high = cg_correct_endianness(p, cg_build_expr(p, se->high));
  1239. }
  1240. bool no_indices = se->low == nullptr && se->high == nullptr;
  1241. gb_unused(no_indices);
  1242. cgAddr addr = cg_build_addr(p, se->expr);
  1243. cgValue base = cg_addr_load(p, addr);
  1244. Type *type = base_type(base.type);
  1245. if (is_type_pointer(type)) {
  1246. type = base_type(type_deref(type));
  1247. addr = cg_addr(base);
  1248. base = cg_addr_load(p, addr);
  1249. }
  1250. switch (type->kind) {
  1251. case Type_Basic:
  1252. case Type_Slice: {
  1253. if (type->kind == Type_Basic) {
  1254. GB_ASSERT(type->Basic.kind == Basic_string);
  1255. }
  1256. Type *slice_type = type;
  1257. if (high.node == nullptr) {
  1258. cgValue len = cg_builtin_len(p, base);
  1259. high = len;
  1260. }
  1261. if (!no_indices) {
  1262. // cg_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  1263. }
  1264. cgValue elem = cg_emit_ptr_offset(p, cg_builtin_raw_data(p, base), low);
  1265. cgValue new_len = cg_emit_arith(p, Token_Sub, high, low, t_int);
  1266. cgAddr slice = cg_add_local(p, slice_type, nullptr, true);
  1267. cg_fill_slice(p, slice, elem, new_len);
  1268. return slice;
  1269. }
  1270. case Type_RelativeMultiPointer:
  1271. GB_PANIC("TODO(bill): Type_RelativeMultiPointer should be handled above already on the cg_addr_load");
  1272. break;
  1273. case Type_DynamicArray: {
  1274. // Type *elem_type = type->DynamicArray.elem;
  1275. // Type *slice_type = alloc_type_slice(elem_type);
  1276. // lbValue len = lb_dynamic_array_len(p, base);
  1277. // if (high.value == nullptr) high = len;
  1278. // if (!no_indices) {
  1279. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  1280. // }
  1281. // lbValue elem = lb_emit_ptr_offset(p, lb_dynamic_array_elem(p, base), low);
  1282. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  1283. // lbAddr slice = lb_add_local_generated(p, slice_type, false);
  1284. // lb_fill_slice(p, slice, elem, new_len);
  1285. // return slice;
  1286. GB_PANIC("cg_build_addr_slice_expr Type_DynamicArray");
  1287. break;
  1288. }
  1289. case Type_MultiPointer: {
  1290. Type *res_type = type_of_expr(expr);
  1291. if (se->high == nullptr) {
  1292. cgAddr res = cg_add_local(p, res_type, nullptr, false);
  1293. GB_ASSERT(base.kind == cgValue_Value);
  1294. GB_ASSERT(low.kind == cgValue_Value);
  1295. i64 stride = type_size_of(type->MultiPointer.elem);
  1296. cgValue offset = cg_value(tb_inst_array_access(p->func, base.node, low.node, stride), base.type);
  1297. cg_addr_store(p, res, offset);
  1298. return res;
  1299. } else {
  1300. cgAddr res = cg_add_local(p, res_type, nullptr, true);
  1301. low = cg_emit_conv(p, low, t_int);
  1302. high = cg_emit_conv(p, high, t_int);
  1303. // cg_emit_multi_pointer_slice_bounds_check(p, se->open, low, high);
  1304. i64 stride = type_size_of(type->MultiPointer.elem);
  1305. TB_Node *offset = tb_inst_array_access(p->func, base.node, low.node, stride);
  1306. TB_Node *len = tb_inst_sub(p->func, high.node, low.node, cast(TB_ArithmeticBehavior)0);
  1307. TB_Node *data_ptr = tb_inst_member_access(p->func, res.addr.node, type_offset_of(res_type, 0));
  1308. TB_Node *len_ptr = tb_inst_member_access(p->func, res.addr.node, type_offset_of(res_type, 1));
  1309. tb_inst_store(p->func, TB_TYPE_PTR, data_ptr, offset, cast(TB_CharUnits)build_context.ptr_size, false);
  1310. tb_inst_store(p->func, TB_TYPE_INT, len_ptr, len, cast(TB_CharUnits)build_context.int_size, false);
  1311. return res;
  1312. }
  1313. }
  1314. case Type_Array: {
  1315. Type *slice_type = type_of_expr(expr);
  1316. GB_ASSERT(is_type_slice(slice_type));
  1317. cgValue len = cg_const_int(p, t_int, type->Array.count);
  1318. if (high.node == nullptr) high = len;
  1319. // bool low_const = type_and_value_of_expr(se->low).mode == Addressing_Constant;
  1320. // bool high_const = type_and_value_of_expr(se->high).mode == Addressing_Constant;
  1321. // if (!low_const || !high_const) {
  1322. // if (!no_indices) {
  1323. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  1324. // }
  1325. // }
  1326. cgValue elem = cg_emit_ptr_offset(p, cg_builtin_raw_data(p, cg_addr_get_ptr(p, addr)), low);
  1327. cgValue new_len = cg_emit_arith(p, Token_Sub, high, low, t_int);
  1328. cgAddr slice = cg_add_local(p, slice_type, nullptr, true);
  1329. cg_fill_slice(p, slice, elem, new_len);
  1330. return slice;
  1331. }
  1332. case Type_Struct:
  1333. // if (is_type_soa_struct(type)) {
  1334. // lbValue len = lb_soa_struct_len(p, lb_addr_get_ptr(p, addr));
  1335. // if (high.value == nullptr) high = len;
  1336. // if (!no_indices) {
  1337. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  1338. // }
  1339. // #if 1
  1340. // lbAddr dst = lb_add_local_generated(p, type_of_expr(expr), true);
  1341. // if (type->Struct.soa_kind == StructSoa_Fixed) {
  1342. // i32 field_count = cast(i32)type->Struct.fields.count;
  1343. // for (i32 i = 0; i < field_count; i++) {
  1344. // lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  1345. // lbValue field_src = lb_emit_struct_ep(p, lb_addr_get_ptr(p, addr), i);
  1346. // field_src = lb_emit_array_ep(p, field_src, low);
  1347. // lb_emit_store(p, field_dst, field_src);
  1348. // }
  1349. // lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  1350. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  1351. // lb_emit_store(p, len_dst, new_len);
  1352. // } else if (type->Struct.soa_kind == StructSoa_Slice) {
  1353. // if (no_indices) {
  1354. // lb_addr_store(p, dst, base);
  1355. // } else {
  1356. // i32 field_count = cast(i32)type->Struct.fields.count - 1;
  1357. // for (i32 i = 0; i < field_count; i++) {
  1358. // lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  1359. // lbValue field_src = lb_emit_struct_ev(p, base, i);
  1360. // field_src = lb_emit_ptr_offset(p, field_src, low);
  1361. // lb_emit_store(p, field_dst, field_src);
  1362. // }
  1363. // lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  1364. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  1365. // lb_emit_store(p, len_dst, new_len);
  1366. // }
  1367. // } else if (type->Struct.soa_kind == StructSoa_Dynamic) {
  1368. // i32 field_count = cast(i32)type->Struct.fields.count - 3;
  1369. // for (i32 i = 0; i < field_count; i++) {
  1370. // lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  1371. // lbValue field_src = lb_emit_struct_ev(p, base, i);
  1372. // field_src = lb_emit_ptr_offset(p, field_src, low);
  1373. // lb_emit_store(p, field_dst, field_src);
  1374. // }
  1375. // lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  1376. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  1377. // lb_emit_store(p, len_dst, new_len);
  1378. // }
  1379. // return dst;
  1380. // #endif
  1381. // }
  1382. GB_PANIC("cg_build_addr_slice_expr Type_Struct");
  1383. break;
  1384. }
  1385. GB_PANIC("Unknown slicable type");
  1386. return {};
  1387. }
  1388. gb_internal cgValue cg_emit_unary_arith(cgProcedure *p, TokenKind op, cgValue x, Type *type) {
  1389. switch (op) {
  1390. case Token_Add:
  1391. return x;
  1392. case Token_Not: // Boolean not
  1393. case Token_Xor: // Bitwise not
  1394. case Token_Sub: // Number negation
  1395. break;
  1396. case Token_Pointer:
  1397. GB_PANIC("This should be handled elsewhere");
  1398. break;
  1399. }
  1400. x = cg_flatten_value(p, x);
  1401. if (is_type_array_like(x.type)) {
  1402. GB_PANIC("TODO(bill): cg_emit_unary_arith is_type_array_like");
  1403. // // IMPORTANT TODO(bill): This is very wasteful with regards to stack memory
  1404. // Type *tl = base_type(x.type);
  1405. // cgValue val = cg_address_from_load_or_generate_local(p, x);
  1406. // GB_ASSERT(is_type_array_like(type));
  1407. // Type *elem_type = base_array_type(type);
  1408. // // NOTE(bill): Doesn't need to be zero because it will be initialized in the loops
  1409. // cgAddr res_addr = cg_add_local(p, type, nullptr, false);
  1410. // cgValue res = cg_addr_get_ptr(p, res_addr);
  1411. // bool inline_array_arith = cg_can_try_to_inline_array_arith(type);
  1412. // i32 count = cast(i32)get_array_type_count(tl);
  1413. // LLVMTypeRef vector_type = nullptr;
  1414. // if (op != Token_Not && cg_try_vector_cast(p->module, val, &vector_type)) {
  1415. // LLVMValueRef vp = LLVMBuildPointerCast(p->builder, val.value, LLVMPointerType(vector_type, 0), "");
  1416. // LLVMValueRef v = LLVMBuildLoad2(p->builder, vector_type, vp, "");
  1417. // LLVMValueRef opv = nullptr;
  1418. // switch (op) {
  1419. // case Token_Xor:
  1420. // opv = LLVMBuildNot(p->builder, v, "");
  1421. // break;
  1422. // case Token_Sub:
  1423. // if (is_type_float(elem_type)) {
  1424. // opv = LLVMBuildFNeg(p->builder, v, "");
  1425. // } else {
  1426. // opv = LLVMBuildNeg(p->builder, v, "");
  1427. // }
  1428. // break;
  1429. // }
  1430. // if (opv != nullptr) {
  1431. // LLVMSetAlignment(res.value, cast(unsigned)cg_alignof(vector_type));
  1432. // LLVMValueRef res_ptr = LLVMBuildPointerCast(p->builder, res.value, LLVMPointerType(vector_type, 0), "");
  1433. // LLVMBuildStore(p->builder, opv, res_ptr);
  1434. // return cg_emit_conv(p, cg_emit_load(p, res), type);
  1435. // }
  1436. // }
  1437. // if (inline_array_arith) {
  1438. // // inline
  1439. // for (i32 i = 0; i < count; i++) {
  1440. // cgValue e = cg_emit_load(p, cg_emit_array_epi(p, val, i));
  1441. // cgValue z = cg_emit_unary_arith(p, op, e, elem_type);
  1442. // cg_emit_store(p, cg_emit_array_epi(p, res, i), z);
  1443. // }
  1444. // } else {
  1445. // auto loop_data = cg_loop_start(p, count, t_i32);
  1446. // cgValue e = cg_emit_load(p, cg_emit_array_ep(p, val, loop_data.idx));
  1447. // cgValue z = cg_emit_unary_arith(p, op, e, elem_type);
  1448. // cg_emit_store(p, cg_emit_array_ep(p, res, loop_data.idx), z);
  1449. // cg_loop_end(p, loop_data);
  1450. // }
  1451. // return cg_emit_load(p, res);
  1452. }
  1453. if (op == Token_Xor) {
  1454. GB_ASSERT(x.kind == cgValue_Value);
  1455. cgValue cmp = cg_value(tb_inst_not(p->func, x.node), x.type);
  1456. return cg_emit_conv(p, cmp, type);
  1457. }
  1458. if (op == Token_Not) {
  1459. TB_Node *zero = cg_const_nil(p, x.type).node;
  1460. cgValue cmp = cg_value(tb_inst_cmp_ne(p->func, x.node, zero), x.type);
  1461. return cg_emit_conv(p, cmp, type);
  1462. }
  1463. if (op == Token_Sub && is_type_integer(type) && is_type_different_to_arch_endianness(type)) {
  1464. Type *platform_type = integer_endian_type_to_platform_type(type);
  1465. cgValue v = cg_emit_byte_swap(p, x, platform_type);
  1466. cgValue res = cg_value(tb_inst_neg(p->func, v.node), platform_type);
  1467. return cg_emit_byte_swap(p, res, type);
  1468. }
  1469. if (op == Token_Sub && is_type_float(type) && is_type_different_to_arch_endianness(type)) {
  1470. Type *platform_type = integer_endian_type_to_platform_type(type);
  1471. cgValue v = cg_emit_byte_swap(p, x, platform_type);
  1472. cgValue res = cg_value(tb_inst_neg(p->func, v.node), platform_type);
  1473. return cg_emit_byte_swap(p, res, type);
  1474. }
  1475. cgValue res = {};
  1476. if (op == Token_Sub) { // Number negation
  1477. if (is_type_integer(x.type)) {
  1478. res = cg_value(tb_inst_neg(p->func, x.node), x.type);
  1479. } else if (is_type_float(x.type)) {
  1480. res = cg_value(tb_inst_neg(p->func, x.node), x.type);
  1481. } else if (is_type_complex(x.type)) {
  1482. GB_PANIC("TODO(bill): neg complex");
  1483. // LLVMValueRef v0 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 0, ""), "");
  1484. // LLVMValueRef v1 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 1, ""), "");
  1485. // cgAddr addr = cg_add_local_generated(p, x.type, false);
  1486. // LLVMTypeRef type = llvm_addr_type(p->module, addr.addr);
  1487. // LLVMBuildStore(p->builder, v0, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 0, ""));
  1488. // LLVMBuildStore(p->builder, v1, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 1, ""));
  1489. // return cg_addr_load(p, addr);
  1490. } else if (is_type_quaternion(x.type)) {
  1491. GB_PANIC("TODO(bill): neg quaternion");
  1492. // LLVMValueRef v0 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 0, ""), "");
  1493. // LLVMValueRef v1 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 1, ""), "");
  1494. // LLVMValueRef v2 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 2, ""), "");
  1495. // LLVMValueRef v3 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 3, ""), "");
  1496. // cgAddr addr = cg_add_local_generated(p, x.type, false);
  1497. // LLVMTypeRef type = llvm_addr_type(p->module, addr.addr);
  1498. // LLVMBuildStore(p->builder, v0, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 0, ""));
  1499. // LLVMBuildStore(p->builder, v1, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 1, ""));
  1500. // LLVMBuildStore(p->builder, v2, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 2, ""));
  1501. // LLVMBuildStore(p->builder, v3, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 3, ""));
  1502. // return cg_addr_load(p, addr);
  1503. } else if (is_type_simd_vector(x.type)) {
  1504. GB_PANIC("TODO(bill): neg simd");
  1505. // Type *elem = base_array_type(x.type);
  1506. // if (is_type_float(elem)) {
  1507. // res.value = LLVMBuildFNeg(p->builder, x.value, "");
  1508. // } else {
  1509. // res.value = LLVMBuildNeg(p->builder, x.value, "");
  1510. // }
  1511. } else if (is_type_matrix(x.type)) {
  1512. GB_PANIC("TODO(bill): neg matrix");
  1513. // cgValue zero = {};
  1514. // zero.value = LLVMConstNull(cg_type(p->module, type));
  1515. // zero.type = type;
  1516. // return cg_emit_arith_matrix(p, Token_Sub, zero, x, type, true);
  1517. } else {
  1518. GB_PANIC("Unhandled type %s", type_to_string(x.type));
  1519. }
  1520. res.type = x.type;
  1521. return res;
  1522. }
  1523. return res;
  1524. }
  1525. gb_internal void cg_emit_if(cgProcedure *p, cgValue const &cond, TB_Node *true_region, TB_Node *false_region) {
  1526. GB_ASSERT(cond.kind == cgValue_Value);
  1527. tb_inst_if(p->func, cond.node, true_region, false_region);
  1528. }
  1529. struct cgLoopData {
  1530. cgAddr index_addr;
  1531. cgValue index;
  1532. TB_Node *body;
  1533. TB_Node *done;
  1534. TB_Node *loop;
  1535. };
  1536. gb_internal cgLoopData cg_loop_start(cgProcedure *p, isize count, Type *index_type) {
  1537. cgLoopData data = {};
  1538. cgValue max = cg_const_int(p, index_type, count);
  1539. data.index_addr = cg_add_local(p, index_type, nullptr, true);
  1540. data.body = cg_control_region(p, "loop_body");
  1541. data.done = cg_control_region(p, "loop_done");
  1542. data.loop = cg_control_region(p, "loop_loop");
  1543. cg_emit_goto(p, data.loop);
  1544. tb_inst_set_control(p->func, data.loop);
  1545. data.index = cg_addr_load(p, data.index_addr);
  1546. cgValue cond = cg_emit_comp(p, Token_Lt, data.index, max);
  1547. cg_emit_if(p, cond, data.body, data.done);
  1548. tb_inst_set_control(p->func, data.body);
  1549. return data;
  1550. }
  1551. gb_internal void cg_loop_end(cgProcedure *p, cgLoopData const &data) {
  1552. if (data.index_addr.addr.node != nullptr) {
  1553. cg_emit_increment(p, data.index_addr.addr);
  1554. cg_emit_goto(p, data.loop);
  1555. tb_inst_set_control(p->func, data.done);
  1556. }
  1557. }
  1558. gb_internal void cg_build_try_lhs_rhs(cgProcedure *p, Ast *arg, Type *final_type, cgValue *lhs_, cgValue *rhs_) {
  1559. cgValue lhs = {};
  1560. cgValue rhs = {};
  1561. cgValue value = cg_build_expr(p, arg);
  1562. if (value.kind == cgValue_Multi) {
  1563. auto const &values = value.multi->values;
  1564. if (values.count == 2) {
  1565. lhs = values[0];
  1566. rhs = values[1];
  1567. } else {
  1568. rhs = values[values.count-1];
  1569. if (values.count > 1) {
  1570. lhs = cg_value_multi(slice(values, 0, values.count-1), final_type);
  1571. }
  1572. }
  1573. } else {
  1574. rhs = value;
  1575. }
  1576. GB_ASSERT(rhs.node != nullptr);
  1577. if (lhs_) *lhs_ = lhs;
  1578. if (rhs_) *rhs_ = rhs;
  1579. }
  1580. gb_internal cgValue cg_emit_try_has_value(cgProcedure *p, cgValue rhs) {
  1581. cgValue has_value = {};
  1582. if (is_type_boolean(rhs.type)) {
  1583. has_value = rhs;
  1584. } else {
  1585. GB_ASSERT_MSG(type_has_nil(rhs.type), "%s", type_to_string(rhs.type));
  1586. has_value = cg_emit_comp_against_nil(p, Token_CmpEq, rhs);
  1587. }
  1588. GB_ASSERT(has_value.node != nullptr);
  1589. return has_value;
  1590. }
  1591. gb_internal cgValue cg_build_or_return(cgProcedure *p, Ast *arg, Type *final_type) {
  1592. cgValue lhs = {};
  1593. cgValue rhs = {};
  1594. cg_build_try_lhs_rhs(p, arg, final_type, &lhs, &rhs);
  1595. TB_Node *return_region = cg_control_region(p, "or_return_return");
  1596. TB_Node *continue_region = cg_control_region(p, "or_return_continue");
  1597. cgValue cond = cg_emit_try_has_value(p, rhs);
  1598. cg_emit_if(p, cond, continue_region, return_region);
  1599. tb_inst_set_control(p->func, return_region);
  1600. {
  1601. Type *proc_type = base_type(p->type);
  1602. Type *results = proc_type->Proc.results;
  1603. GB_ASSERT(results != nullptr && results->kind == Type_Tuple);
  1604. TypeTuple *tuple = &results->Tuple;
  1605. GB_ASSERT(tuple->variables.count != 0);
  1606. Entity *end_entity = tuple->variables[tuple->variables.count-1];
  1607. rhs = cg_emit_conv(p, rhs, end_entity->type);
  1608. if (p->type->Proc.has_named_results) {
  1609. GB_ASSERT(end_entity->token.string.len != 0);
  1610. // NOTE(bill): store the named values before returning
  1611. cgAddr found = map_must_get(&p->variable_map, end_entity);
  1612. cg_addr_store(p, found, rhs);
  1613. cg_build_return_stmt(p, {});
  1614. } else {
  1615. GB_ASSERT(tuple->variables.count == 1);
  1616. Slice<cgValue> results = {};
  1617. results.data = &rhs;
  1618. results.count = 1;;
  1619. cg_build_return_stmt_internal(p, results);
  1620. }
  1621. }
  1622. tb_inst_set_control(p->func, continue_region);
  1623. if (final_type != nullptr && !is_type_tuple(final_type)) {
  1624. return cg_emit_conv(p, lhs, final_type);
  1625. }
  1626. return {};
  1627. }
  1628. gb_internal cgValue cg_build_or_else(cgProcedure *p, Ast *arg, Ast *else_expr, Type *final_type) {
  1629. if (arg->state_flags & StateFlag_DirectiveWasFalse) {
  1630. return cg_build_expr(p, else_expr);
  1631. }
  1632. cgValue lhs = {};
  1633. cgValue rhs = {};
  1634. cg_build_try_lhs_rhs(p, arg, final_type, &lhs, &rhs);
  1635. GB_ASSERT(else_expr != nullptr);
  1636. if (is_diverging_expr(else_expr)) {
  1637. TB_Node *then = cg_control_region(p, "or_else_then");
  1638. TB_Node *else_ = cg_control_region(p, "or_else_else");
  1639. cg_emit_if(p, cg_emit_try_has_value(p, rhs), then, else_);
  1640. // NOTE(bill): else block needs to be straight afterwards to make sure that the actual value is used
  1641. // from the then block
  1642. tb_inst_set_control(p->func, else_);
  1643. cg_build_expr(p, else_expr);
  1644. tb_inst_set_control(p->func, then);
  1645. return cg_emit_conv(p, lhs, final_type);
  1646. } else {
  1647. TB_Node *incoming_values[2] = {};
  1648. TB_Node *incoming_regions[2] = {};
  1649. TB_Node *then = cg_control_region(p, "or_else_then");
  1650. TB_Node *done = cg_control_region(p, "or_else_done"); // NOTE(bill): Append later
  1651. TB_Node *else_ = cg_control_region(p, "or_else_else");
  1652. cg_emit_if(p, cg_emit_try_has_value(p, rhs), then, else_);
  1653. tb_inst_set_control(p->func, then);
  1654. cgValue x = cg_emit_conv(p, lhs, final_type);
  1655. incoming_values[0] = x.node;
  1656. incoming_regions[0] = tb_inst_get_control(p->func);
  1657. tb_inst_goto(p->func, done);
  1658. tb_inst_set_control(p->func, else_);
  1659. cgValue y = cg_emit_conv(p, cg_build_expr(p, else_expr), final_type);
  1660. incoming_values[1] = y.node;
  1661. incoming_regions[1] = tb_inst_get_control(p->func);
  1662. tb_inst_goto(p->func, done);
  1663. tb_inst_set_control(p->func, done);
  1664. GB_ASSERT(x.kind == y.kind);
  1665. GB_ASSERT(incoming_values[0]->dt.raw == incoming_values[1]->dt.raw);
  1666. cgValue res = {};
  1667. res.kind = x.kind;
  1668. res.type = final_type;
  1669. res.node = tb_inst_incomplete_phi(p->func, incoming_values[0]->dt, done, 2);
  1670. tb_inst_add_phi_operand(p->func, res.node, incoming_regions[0], incoming_values[0]);
  1671. tb_inst_add_phi_operand(p->func, res.node, incoming_regions[1], incoming_values[1]);
  1672. return res;
  1673. }
  1674. }
  1675. gb_internal isize cg_control_region_pred_count(TB_Node *region) {
  1676. GB_ASSERT(region->type == TB_REGION);
  1677. GB_ASSERT(region->input_count > 0);
  1678. return region->input_count;
  1679. }
  1680. gb_internal cgValue cg_build_logical_binary_expr(cgProcedure *p, TokenKind op, Ast *left, Ast *right, Type *final_type) {
  1681. TB_Node *rhs = cg_control_region(p, "logical_cmp_rhs");
  1682. TB_Node *done = cg_control_region(p, "logical_cmp_done");
  1683. cgValue short_circuit = {};
  1684. if (op == Token_CmpAnd) {
  1685. cg_build_cond(p, left, rhs, done);
  1686. short_circuit = cg_const_bool(p, t_bool, false);
  1687. } else if (op == Token_CmpOr) {
  1688. cg_build_cond(p, left, done, rhs);
  1689. short_circuit = cg_const_bool(p, t_bool, true);
  1690. }
  1691. if (rhs->input_count == 0) {
  1692. tb_inst_set_control(p->func, done);
  1693. return cg_emit_conv(p, short_circuit, final_type);
  1694. }
  1695. if (done->input_count == 0) {
  1696. tb_inst_set_control(p->func, rhs);
  1697. return cg_build_expr(p, right);
  1698. }
  1699. tb_inst_set_control(p->func, rhs);
  1700. cgValue edge = cg_build_expr(p, right);
  1701. TB_Node *edge_region = tb_inst_get_control(p->func);
  1702. tb_inst_goto(p->func, done);
  1703. tb_inst_set_control(p->func, done);
  1704. TB_DataType dt = edge.node->dt;
  1705. TB_Node *phi = tb_inst_incomplete_phi(p->func, dt, done, done->input_count);
  1706. for (size_t i = 0; i < done->input_count; i++) {
  1707. TB_Node *val = short_circuit.node;
  1708. TB_Node *region = done->inputs[i];
  1709. if (region == edge_region) {
  1710. val = edge.node;
  1711. }
  1712. tb_inst_add_phi_operand(p->func, phi, region, val);
  1713. }
  1714. return cg_emit_conv(p, cg_value(phi, t_bool), final_type);
  1715. }
  1716. gb_internal cgValue cg_build_binary_expr(cgProcedure *p, Ast *expr) {
  1717. ast_node(be, BinaryExpr, expr);
  1718. TypeAndValue tv = type_and_value_of_expr(expr);
  1719. if (is_type_matrix(be->left->tav.type) || is_type_matrix(be->right->tav.type)) {
  1720. cgValue left = cg_build_expr(p, be->left);
  1721. cgValue right = cg_build_expr(p, be->right);
  1722. GB_PANIC("TODO(bill): cg_emit_arith_matrix");
  1723. // return cg_emit_arith_matrix(p, be->op.kind, left, right, default_type(tv.type), false);
  1724. }
  1725. switch (be->op.kind) {
  1726. case Token_Add:
  1727. case Token_Sub:
  1728. case Token_Mul:
  1729. case Token_Quo:
  1730. case Token_Mod:
  1731. case Token_ModMod:
  1732. case Token_And:
  1733. case Token_Or:
  1734. case Token_Xor:
  1735. case Token_AndNot: {
  1736. Type *type = default_type(tv.type);
  1737. cgValue left = cg_build_expr(p, be->left);
  1738. cgValue right = cg_build_expr(p, be->right);
  1739. return cg_emit_arith(p, be->op.kind, left, right, type);
  1740. }
  1741. case Token_Shl:
  1742. case Token_Shr: {
  1743. cgValue left, right;
  1744. Type *type = default_type(tv.type);
  1745. left = cg_build_expr(p, be->left);
  1746. if (cg_is_expr_untyped_const(be->right)) {
  1747. // NOTE(bill): RHS shift operands can still be untyped
  1748. // Just bypass the standard cg_build_expr
  1749. right = cg_expr_untyped_const_to_typed(p, be->right, type);
  1750. } else {
  1751. right = cg_build_expr(p, be->right);
  1752. }
  1753. return cg_emit_arith(p, be->op.kind, left, right, type);
  1754. }
  1755. case Token_CmpEq:
  1756. case Token_NotEq:
  1757. if (is_type_untyped_nil(be->right->tav.type)) {
  1758. // `x == nil` or `x != nil`
  1759. cgValue left = cg_build_expr(p, be->left);
  1760. cgValue cmp = cg_emit_comp_against_nil(p, be->op.kind, left);
  1761. Type *type = default_type(tv.type);
  1762. return cg_emit_conv(p, cmp, type);
  1763. } else if (is_type_untyped_nil(be->left->tav.type)) {
  1764. // `nil == x` or `nil != x`
  1765. cgValue right = cg_build_expr(p, be->right);
  1766. cgValue cmp = cg_emit_comp_against_nil(p, be->op.kind, right);
  1767. Type *type = default_type(tv.type);
  1768. return cg_emit_conv(p, cmp, type);
  1769. }/* else if (cg_is_empty_string_constant(be->right)) {
  1770. // `x == ""` or `x != ""`
  1771. cgValue s = cg_build_expr(p, be->left);
  1772. s = cg_emit_conv(p, s, t_string);
  1773. cgValue len = cg_string_len(p, s);
  1774. cgValue cmp = cg_emit_comp(p, be->op.kind, len, cg_const_int(p->module, t_int, 0));
  1775. Type *type = default_type(tv.type);
  1776. return cg_emit_conv(p, cmp, type);
  1777. } else if (cg_is_empty_string_constant(be->left)) {
  1778. // `"" == x` or `"" != x`
  1779. cgValue s = cg_build_expr(p, be->right);
  1780. s = cg_emit_conv(p, s, t_string);
  1781. cgValue len = cg_string_len(p, s);
  1782. cgValue cmp = cg_emit_comp(p, be->op.kind, len, cg_const_int(p->module, t_int, 0));
  1783. Type *type = default_type(tv.type);
  1784. return cg_emit_conv(p, cmp, type);
  1785. }*/
  1786. /*fallthrough*/
  1787. case Token_Lt:
  1788. case Token_LtEq:
  1789. case Token_Gt:
  1790. case Token_GtEq:
  1791. {
  1792. cgValue left = {};
  1793. cgValue right = {};
  1794. if (be->left->tav.mode == Addressing_Type) {
  1795. left = cg_typeid(p, be->left->tav.type);
  1796. }
  1797. if (be->right->tav.mode == Addressing_Type) {
  1798. right = cg_typeid(p, be->right->tav.type);
  1799. }
  1800. if (left.node == nullptr) left = cg_build_expr(p, be->left);
  1801. if (right.node == nullptr) right = cg_build_expr(p, be->right);
  1802. cgValue cmp = cg_emit_comp(p, be->op.kind, left, right);
  1803. Type *type = default_type(tv.type);
  1804. return cg_emit_conv(p, cmp, type);
  1805. }
  1806. case Token_CmpAnd:
  1807. case Token_CmpOr:
  1808. return cg_build_logical_binary_expr(p, be->op.kind, be->left, be->right, tv.type);
  1809. case Token_in:
  1810. case Token_not_in:
  1811. {
  1812. cgValue left = cg_build_expr(p, be->left);
  1813. cgValue right = cg_build_expr(p, be->right);
  1814. Type *rt = base_type(right.type);
  1815. if (is_type_pointer(rt)) {
  1816. right = cg_emit_load(p, right);
  1817. rt = base_type(type_deref(rt));
  1818. }
  1819. switch (rt->kind) {
  1820. case Type_Map:
  1821. {
  1822. cgValue map_ptr = cg_address_from_load_or_generate_local(p, right);
  1823. cgValue key = left;
  1824. cgValue ptr = cg_internal_dynamic_map_get_ptr(p, map_ptr, key);
  1825. if (be->op.kind == Token_in) {
  1826. return cg_emit_conv(p, cg_emit_comp_against_nil(p, Token_NotEq, ptr), t_bool);
  1827. } else {
  1828. return cg_emit_conv(p, cg_emit_comp_against_nil(p, Token_CmpEq, ptr), t_bool);
  1829. }
  1830. }
  1831. break;
  1832. case Type_BitSet:
  1833. {
  1834. Type *key_type = rt->BitSet.elem;
  1835. GB_ASSERT(are_types_identical(left.type, key_type));
  1836. Type *it = bit_set_to_int(rt);
  1837. left = cg_emit_conv(p, left, it);
  1838. if (is_type_different_to_arch_endianness(it)) {
  1839. left = cg_emit_byte_swap(p, left, integer_endian_type_to_platform_type(it));
  1840. }
  1841. cgValue lower = cg_const_value(p, left.type, exact_value_i64(rt->BitSet.lower));
  1842. cgValue key = cg_emit_arith(p, Token_Sub, left, lower, left.type);
  1843. cgValue bit = cg_emit_arith(p, Token_Shl, cg_const_int(p, left.type, 1), key, left.type);
  1844. bit = cg_emit_conv(p, bit, it);
  1845. cgValue old_value = cg_emit_transmute(p, right, it);
  1846. cgValue new_value = cg_emit_arith(p, Token_And, old_value, bit, it);
  1847. GB_PANIC("TODO(bill): cg_emit_comp");
  1848. // TokenKind op = (be->op.kind == Token_in) ? Token_NotEq : Token_CmpEq;
  1849. // return cg_emit_conv(p, cg_emit_comp(p, op, new_value, cg_const_int(p, new_value.type, 0)), t_bool);
  1850. }
  1851. break;
  1852. default:
  1853. GB_PANIC("Invalid 'in' type");
  1854. }
  1855. break;
  1856. }
  1857. break;
  1858. default:
  1859. GB_PANIC("Invalid binary expression");
  1860. break;
  1861. }
  1862. return {};
  1863. }
  1864. gb_internal cgValue cg_build_cond(cgProcedure *p, Ast *cond, TB_Node *true_block, TB_Node *false_block) {
  1865. cond = unparen_expr(cond);
  1866. GB_ASSERT(cond != nullptr);
  1867. GB_ASSERT(true_block != nullptr);
  1868. GB_ASSERT(false_block != nullptr);
  1869. // Use to signal not to do compile time short circuit for consts
  1870. cgValue no_comptime_short_circuit = {};
  1871. switch (cond->kind) {
  1872. case_ast_node(ue, UnaryExpr, cond);
  1873. if (ue->op.kind == Token_Not) {
  1874. cgValue cond_val = cg_build_cond(p, ue->expr, false_block, true_block);
  1875. return cond_val;
  1876. // if (cond_val.value && LLVMIsConstant(cond_val.value)) {
  1877. // return cg_const_bool(p->module, cond_val.type, LLVMConstIntGetZExtValue(cond_val.value) == 0);
  1878. // }
  1879. // return no_comptime_short_circuit;
  1880. }
  1881. case_end;
  1882. case_ast_node(be, BinaryExpr, cond);
  1883. if (be->op.kind == Token_CmpAnd) {
  1884. TB_Node *block = cg_control_region(p, "cmp_and");
  1885. cg_build_cond(p, be->left, block, false_block);
  1886. tb_inst_set_control(p->func, block);
  1887. cg_build_cond(p, be->right, true_block, false_block);
  1888. return no_comptime_short_circuit;
  1889. } else if (be->op.kind == Token_CmpOr) {
  1890. TB_Node *block = cg_control_region(p, "cmp_or");
  1891. cg_build_cond(p, be->left, true_block, block);
  1892. tb_inst_set_control(p->func, block);
  1893. cg_build_cond(p, be->right, true_block, false_block);
  1894. return no_comptime_short_circuit;
  1895. }
  1896. case_end;
  1897. }
  1898. cgValue v = {};
  1899. if (cg_is_expr_untyped_const(cond)) {
  1900. v = cg_expr_untyped_const_to_typed(p, cond, t_bool);
  1901. } else {
  1902. v = cg_build_expr(p, cond);
  1903. }
  1904. cg_emit_if(p, v, true_block, false_block);
  1905. return v;
  1906. }
  1907. gb_internal cgValue cg_build_expr_internal(cgProcedure *p, Ast *expr);
  1908. gb_internal cgValue cg_build_expr(cgProcedure *p, Ast *expr) {
  1909. cg_set_debug_pos_from_node(p, expr);
  1910. u16 prev_state_flags = p->state_flags;
  1911. defer (p->state_flags = prev_state_flags);
  1912. if (expr->state_flags != 0) {
  1913. u16 in = expr->state_flags;
  1914. u16 out = p->state_flags;
  1915. if (in & StateFlag_bounds_check) {
  1916. out |= StateFlag_bounds_check;
  1917. out &= ~StateFlag_no_bounds_check;
  1918. } else if (in & StateFlag_no_bounds_check) {
  1919. out |= StateFlag_no_bounds_check;
  1920. out &= ~StateFlag_bounds_check;
  1921. }
  1922. if (in & StateFlag_type_assert) {
  1923. out |= StateFlag_type_assert;
  1924. out &= ~StateFlag_no_type_assert;
  1925. } else if (in & StateFlag_no_type_assert) {
  1926. out |= StateFlag_no_type_assert;
  1927. out &= ~StateFlag_type_assert;
  1928. }
  1929. p->state_flags = out;
  1930. }
  1931. // IMPORTANT NOTE(bill):
  1932. // Selector Call Expressions (foo->bar(...))
  1933. // must only evaluate `foo` once as it gets transformed into
  1934. // `foo.bar(foo, ...)`
  1935. // And if `foo` is a procedure call or something more complex, storing the value
  1936. // once is a very good idea
  1937. // If a stored value is found, it must be removed from the cache
  1938. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  1939. // cgValue *pp = map_get(&p->selector_values, expr);
  1940. // if (pp != nullptr) {
  1941. // cgValue res = *pp;
  1942. // map_remove(&p->selector_values, expr);
  1943. // return res;
  1944. // }
  1945. // cgAddr *pa = map_get(&p->selector_addr, expr);
  1946. // if (pa != nullptr) {
  1947. // cgAddr res = *pa;
  1948. // map_remove(&p->selector_addr, expr);
  1949. // return cg_addr_load(p, res);
  1950. // }
  1951. }
  1952. cgValue res = cg_build_expr_internal(p, expr);
  1953. if (res.kind == cgValue_Symbol) {
  1954. GB_ASSERT(is_type_internally_pointer_like(res.type));
  1955. res = cg_value(tb_inst_get_symbol_address(p->func, res.symbol), res.type);
  1956. }
  1957. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  1958. // map_set(&p->selector_values, expr, res);
  1959. }
  1960. return res;
  1961. }
  1962. gb_internal cgValue cg_find_ident(cgProcedure *p, Entity *e, Ast *expr) {
  1963. cgAddr *found_addr = map_get(&p->variable_map, e);
  1964. if (found_addr) {
  1965. return cg_addr_load(p, *found_addr);
  1966. }
  1967. cgValue *found = nullptr;
  1968. rw_mutex_shared_lock(&p->module->values_mutex);
  1969. found = map_get(&p->module->values, e);
  1970. rw_mutex_shared_unlock(&p->module->values_mutex);
  1971. if (found) {
  1972. auto v = *found;
  1973. // NOTE(bill): This is because pointers are already pointers in LLVM
  1974. if (is_type_proc(v.type)) {
  1975. return v;
  1976. }
  1977. return cg_emit_load(p, v);
  1978. } else if (e != nullptr && e->kind == Entity_Variable) {
  1979. return cg_addr_load(p, cg_build_addr(p, expr));
  1980. }
  1981. if (e->kind == Entity_Procedure) {
  1982. return cg_find_procedure_value_from_entity(p->module, e);
  1983. }
  1984. String pkg = {};
  1985. if (e->pkg) {
  1986. pkg = e->pkg->name;
  1987. }
  1988. gb_printf_err("Error in: %s\n", token_pos_to_string(ast_token(expr).pos));
  1989. GB_PANIC("nullptr value for expression from identifier: %.*s.%.*s (%p) : %s @ %p", LIT(pkg), LIT(e->token.string), e, type_to_string(e->type), expr);
  1990. return {};
  1991. }
  1992. cgAddr cg_build_addr_compound_lit(cgProcedure *p, Ast *expr) {
  1993. struct cgCompoundLitElemTempData {
  1994. Ast * expr;
  1995. cgValue value;
  1996. i64 elem_index;
  1997. i64 elem_length;
  1998. cgValue gep;
  1999. };
  2000. auto const &populate = [](cgProcedure *p, Slice<Ast *> const &elems, Array<cgCompoundLitElemTempData> *temp_data, Type *compound_type) {
  2001. Type *bt = base_type(compound_type);
  2002. Type *et = nullptr;
  2003. switch (bt->kind) {
  2004. case Type_Array: et = bt->Array.elem; break;
  2005. case Type_EnumeratedArray: et = bt->EnumeratedArray.elem; break;
  2006. case Type_Slice: et = bt->Slice.elem; break;
  2007. case Type_BitSet: et = bt->BitSet.elem; break;
  2008. case Type_DynamicArray: et = bt->DynamicArray.elem; break;
  2009. case Type_SimdVector: et = bt->SimdVector.elem; break;
  2010. case Type_Matrix: et = bt->Matrix.elem; break;
  2011. }
  2012. GB_ASSERT(et != nullptr);
  2013. // NOTE(bill): Separate value, gep, store into their own chunks
  2014. for_array(i, elems) {
  2015. Ast *elem = elems[i];
  2016. if (elem->kind == Ast_FieldValue) {
  2017. ast_node(fv, FieldValue, elem);
  2018. if (is_ast_range(fv->field)) {
  2019. ast_node(ie, BinaryExpr, fv->field);
  2020. TypeAndValue lo_tav = ie->left->tav;
  2021. TypeAndValue hi_tav = ie->right->tav;
  2022. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  2023. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  2024. TokenKind op = ie->op.kind;
  2025. i64 lo = exact_value_to_i64(lo_tav.value);
  2026. i64 hi = exact_value_to_i64(hi_tav.value);
  2027. if (op != Token_RangeHalf) {
  2028. hi += 1;
  2029. }
  2030. cgValue value = cg_emit_conv(p, cg_build_expr(p, fv->value), et);
  2031. GB_ASSERT((hi-lo) > 0);
  2032. if (bt->kind == Type_Matrix) {
  2033. GB_PANIC("TODO(bill): Type_Matrix");
  2034. // for (i64 k = lo; k < hi; k++) {
  2035. // cgCompoundLitElemTempData data = {};
  2036. // data.value = value;
  2037. // data.elem_index = matrix_row_major_index_to_offset(bt, k);
  2038. // array_add(temp_data, data);
  2039. // }
  2040. } else {
  2041. enum {MAX_ELEMENT_AMOUNT = 32};
  2042. if ((hi-lo) <= MAX_ELEMENT_AMOUNT) {
  2043. for (i64 k = lo; k < hi; k++) {
  2044. cgCompoundLitElemTempData data = {};
  2045. data.value = value;
  2046. data.elem_index = k;
  2047. array_add(temp_data, data);
  2048. }
  2049. } else {
  2050. cgCompoundLitElemTempData data = {};
  2051. data.value = value;
  2052. data.elem_index = lo;
  2053. data.elem_length = hi-lo;
  2054. array_add(temp_data, data);
  2055. }
  2056. }
  2057. } else {
  2058. auto tav = fv->field->tav;
  2059. GB_ASSERT(tav.mode == Addressing_Constant);
  2060. i64 index = exact_value_to_i64(tav.value);
  2061. cgValue value = cg_emit_conv(p, cg_build_expr(p, fv->value), et);
  2062. GB_ASSERT(!is_type_tuple(value.type));
  2063. cgCompoundLitElemTempData data = {};
  2064. data.value = value;
  2065. data.expr = fv->value;
  2066. if (bt->kind == Type_Matrix) {
  2067. GB_PANIC("TODO(bill): Type_Matrix");
  2068. // data.elem_index = matrix_row_major_index_to_offset(bt, index);
  2069. } else {
  2070. data.elem_index = index;
  2071. }
  2072. array_add(temp_data, data);
  2073. }
  2074. } else {
  2075. // if (bt->kind != Type_DynamicArray && lb_is_elem_const(elem, et)) {
  2076. // continue;
  2077. // }
  2078. cgValue field_expr = cg_build_expr(p, elem);
  2079. GB_ASSERT(!is_type_tuple(field_expr.type));
  2080. cgValue ev = cg_emit_conv(p, field_expr, et);
  2081. cgCompoundLitElemTempData data = {};
  2082. data.value = ev;
  2083. if (bt->kind == Type_Matrix) {
  2084. GB_PANIC("TODO(bill): Type_Matrix");
  2085. // data.elem_index = matrix_row_major_index_to_offset(bt, i);
  2086. } else {
  2087. data.elem_index = i;
  2088. }
  2089. array_add(temp_data, data);
  2090. }
  2091. }
  2092. };
  2093. auto const &assign_array = [](cgProcedure *p, Array<cgCompoundLitElemTempData> const &temp_data) {
  2094. for (auto const &td : temp_data) if (td.value.node != nullptr) {
  2095. if (td.elem_length > 0) {
  2096. GB_PANIC("TODO(bill): range");
  2097. // auto loop_data = cg_loop_start(p, cast(isize)td.elem_length, t_i32);
  2098. // {
  2099. // cgValue dst = td.gep;
  2100. // dst = cg_emit_ptr_offset(p, dst, loop_data.idx);
  2101. // cg_emit_store(p, dst, td.value);
  2102. // }
  2103. // cg_loop_end(p, loop_data);
  2104. } else {
  2105. cg_emit_store(p, td.gep, td.value);
  2106. }
  2107. }
  2108. };
  2109. ast_node(cl, CompoundLit, expr);
  2110. Type *type = type_of_expr(expr);
  2111. Type *bt = base_type(type);
  2112. cgAddr v = {};
  2113. if (p->is_startup) {
  2114. v = cg_add_global(p, type, nullptr);
  2115. } else {
  2116. v = cg_add_local(p, type, nullptr, true);
  2117. }
  2118. if (cl->elems.count == 0) {
  2119. // No need to create it
  2120. return v;
  2121. }
  2122. TEMPORARY_ALLOCATOR_GUARD();
  2123. Type *et = nullptr;
  2124. switch (bt->kind) {
  2125. case Type_Array: et = bt->Array.elem; break;
  2126. case Type_EnumeratedArray: et = bt->EnumeratedArray.elem; break;
  2127. case Type_Slice: et = bt->Slice.elem; break;
  2128. case Type_BitSet: et = bt->BitSet.elem; break;
  2129. case Type_SimdVector: et = bt->SimdVector.elem; break;
  2130. case Type_Matrix: et = bt->Matrix.elem; break;
  2131. }
  2132. String proc_name = {};
  2133. if (p->entity) {
  2134. proc_name = p->entity->token.string;
  2135. }
  2136. TokenPos pos = ast_token(expr).pos;
  2137. switch (bt->kind) {
  2138. default: GB_PANIC("Unknown CompoundLit type: %s", type_to_string(type)); break;
  2139. case Type_Struct: {
  2140. TypeStruct *st = &bt->Struct;
  2141. cgValue comp_lit_ptr = cg_addr_get_ptr(p, v);
  2142. for_array(field_index, cl->elems) {
  2143. Ast *elem = cl->elems[field_index];
  2144. cgValue field_expr = {};
  2145. Entity *field = nullptr;
  2146. isize index = field_index;
  2147. if (elem->kind == Ast_FieldValue) {
  2148. ast_node(fv, FieldValue, elem);
  2149. String name = fv->field->Ident.token.string;
  2150. Selection sel = lookup_field(bt, name, false);
  2151. GB_ASSERT(!sel.indirect);
  2152. elem = fv->value;
  2153. if (sel.index.count > 1) {
  2154. cgValue dst = cg_emit_deep_field_gep(p, comp_lit_ptr, sel);
  2155. field_expr = cg_build_expr(p, elem);
  2156. field_expr = cg_emit_conv(p, field_expr, sel.entity->type);
  2157. cg_emit_store(p, dst, field_expr);
  2158. continue;
  2159. }
  2160. index = sel.index[0];
  2161. } else {
  2162. Selection sel = lookup_field_from_index(bt, st->fields[field_index]->Variable.field_index);
  2163. GB_ASSERT(sel.index.count == 1);
  2164. GB_ASSERT(!sel.indirect);
  2165. index = sel.index[0];
  2166. }
  2167. field = st->fields[index];
  2168. Type *ft = field->type;
  2169. field_expr = cg_build_expr(p, elem);
  2170. cgValue gep = {};
  2171. if (st->is_raw_union) {
  2172. gep = cg_emit_conv(p, comp_lit_ptr, alloc_type_pointer(ft));
  2173. } else {
  2174. gep = cg_emit_struct_ep(p, comp_lit_ptr, cast(i32)index);
  2175. }
  2176. Type *fet = field_expr.type;
  2177. GB_ASSERT(fet->kind != Type_Tuple);
  2178. // HACK TODO(bill): THIS IS A MASSIVE HACK!!!!
  2179. if (is_type_union(ft) && !are_types_identical(fet, ft) && !is_type_untyped(fet)) {
  2180. GB_ASSERT_MSG(union_variant_index(ft, fet) >= 0, "%s", type_to_string(fet));
  2181. GB_PANIC("TODO(bill): cg_emit_store_union_variant");
  2182. // cg_emit_store_union_variant(p, gep, field_expr, fet);
  2183. } else {
  2184. cgValue fv = cg_emit_conv(p, field_expr, ft);
  2185. cg_emit_store(p, gep, fv);
  2186. }
  2187. }
  2188. return v;
  2189. }
  2190. case Type_Map: {
  2191. GB_ASSERT(!build_context.no_dynamic_literals);
  2192. GB_PANIC("TODO(bill): map literals");
  2193. // cgValue err = cg_dynamic_map_reserve(p, v.addr, 2*cl->elems.count, pos);
  2194. // gb_unused(err);
  2195. // for (Ast *elem : cl->elems) {
  2196. // ast_node(fv, FieldValue, elem);
  2197. // cgValue key = cg_build_expr(p, fv->field);
  2198. // cgValue value = cg_build_expr(p, fv->value);
  2199. // cg_internal_dynamic_map_set(p, v.addr, type, key, value, elem);
  2200. // }
  2201. break;
  2202. }
  2203. case Type_Array: {
  2204. auto temp_data = array_make<cgCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  2205. populate(p, cl->elems, &temp_data, type);
  2206. cgValue dst_ptr = cg_addr_get_ptr(p, v);
  2207. for_array(i, temp_data) {
  2208. i32 index = cast(i32)(temp_data[i].elem_index);
  2209. temp_data[i].gep = cg_emit_array_epi(p, dst_ptr, index);
  2210. }
  2211. assign_array(p, temp_data);
  2212. break;
  2213. }
  2214. case Type_EnumeratedArray: {
  2215. auto temp_data = array_make<cgCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  2216. populate(p, cl->elems, &temp_data, type);
  2217. cgValue dst_ptr = cg_addr_get_ptr(p, v);
  2218. i64 index_offset = exact_value_to_i64(*bt->EnumeratedArray.min_value);
  2219. for_array(i, temp_data) {
  2220. i32 index = cast(i32)(temp_data[i].elem_index - index_offset);
  2221. temp_data[i].gep = cg_emit_array_epi(p, dst_ptr, index);
  2222. }
  2223. assign_array(p, temp_data);
  2224. break;
  2225. }
  2226. case Type_Slice: {
  2227. isize count = gb_max(cl->elems.count, cl->max_count);
  2228. TB_CharUnits backing_size = cast(TB_CharUnits)(type_size_of(bt->Slice.elem) * count);
  2229. TB_CharUnits align = cast(TB_CharUnits)type_align_of(bt->Slice.elem);
  2230. TB_Node *backing = nullptr;
  2231. if (p->is_startup) {
  2232. TB_Global *global = tb_global_create(p->module->mod, 0, "", nullptr, TB_LINKAGE_PRIVATE);
  2233. tb_global_set_storage(p->module->mod, tb_module_get_data(p->module->mod), global, backing_size, align, 0);
  2234. backing = tb_inst_get_symbol_address(p->func, cast(TB_Symbol *)global);
  2235. } else {
  2236. backing = tb_inst_local(p->func, backing_size, align);
  2237. }
  2238. cgValue data = cg_value(backing, alloc_type_multi_pointer(bt->Slice.elem));
  2239. auto temp_data = array_make<cgCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  2240. populate(p, cl->elems, &temp_data, type);
  2241. for_array(i, temp_data) {
  2242. temp_data[i].gep = cg_emit_ptr_offset(p, data, cg_const_int(p, t_int, temp_data[i].elem_index));
  2243. }
  2244. assign_array(p, temp_data);
  2245. cg_fill_slice(p, v, data, cg_const_int(p, t_int, count));
  2246. return v;
  2247. }
  2248. case Type_DynamicArray: {
  2249. GB_ASSERT(!build_context.no_dynamic_literals);
  2250. Type *et = bt->DynamicArray.elem;
  2251. cgValue size = cg_const_int(p, t_int, type_size_of(et));
  2252. cgValue align = cg_const_int(p, t_int, type_align_of(et));
  2253. i64 item_count = gb_max(cl->max_count, cl->elems.count);
  2254. {
  2255. auto args = slice_make<cgValue>(temporary_allocator(), 5);
  2256. args[0] = cg_emit_conv(p, cg_addr_get_ptr(p, v), t_rawptr);
  2257. args[1] = size;
  2258. args[2] = align;
  2259. args[3] = cg_const_int(p, t_int, item_count);
  2260. args[4] = cg_emit_source_code_location_as_global(p, proc_name, pos);
  2261. cg_emit_runtime_call(p, "__dynamic_array_reserve", args);
  2262. }
  2263. Type *array_type = alloc_type_array(et, item_count);
  2264. cgAddr items_addr = cg_add_local(p, array_type, nullptr, true);
  2265. cgValue items = cg_addr_get_ptr(p, items_addr);
  2266. auto temp_data = array_make<cgCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  2267. populate(p, cl->elems, &temp_data, type);
  2268. for_array(i, temp_data) {
  2269. temp_data[i].gep = cg_emit_array_epi(p, items, temp_data[i].elem_index);
  2270. }
  2271. assign_array(p, temp_data);
  2272. {
  2273. auto args = slice_make<cgValue>(temporary_allocator(), 6);
  2274. args[0] = cg_emit_conv(p, v.addr, t_rawptr);
  2275. args[1] = size;
  2276. args[2] = align;
  2277. args[3] = cg_emit_conv(p, items, t_rawptr);
  2278. args[4] = cg_const_int(p, t_int, item_count);
  2279. args[5] = cg_emit_source_code_location_as_global(p, proc_name, pos);
  2280. cg_emit_runtime_call(p, "__dynamic_array_append", args);
  2281. }
  2282. break;
  2283. }
  2284. case Type_Basic: {
  2285. GB_ASSERT(is_type_any(bt));
  2286. String field_names[2] = {
  2287. str_lit("data"),
  2288. str_lit("id"),
  2289. };
  2290. Type *field_types[2] = {
  2291. t_rawptr,
  2292. t_typeid,
  2293. };
  2294. for_array(field_index, cl->elems) {
  2295. Ast *elem = cl->elems[field_index];
  2296. cgValue field_expr = {};
  2297. isize index = field_index;
  2298. if (elem->kind == Ast_FieldValue) {
  2299. ast_node(fv, FieldValue, elem);
  2300. Selection sel = lookup_field(bt, fv->field->Ident.token.string, false);
  2301. index = sel.index[0];
  2302. elem = fv->value;
  2303. } else {
  2304. TypeAndValue tav = type_and_value_of_expr(elem);
  2305. Selection sel = lookup_field(bt, field_names[field_index], false);
  2306. index = sel.index[0];
  2307. }
  2308. field_expr = cg_build_expr(p, elem);
  2309. GB_ASSERT(field_expr.type->kind != Type_Tuple);
  2310. Type *ft = field_types[index];
  2311. cgValue fv = cg_emit_conv(p, field_expr, ft);
  2312. cgValue gep = cg_emit_struct_ep(p, cg_addr_get_ptr(p, v), index);
  2313. cg_emit_store(p, gep, fv);
  2314. }
  2315. break;
  2316. }
  2317. case Type_BitSet: {
  2318. i64 sz = type_size_of(type);
  2319. if (sz == 0) {
  2320. return v;
  2321. }
  2322. cgValue lower = cg_const_value(p, t_int, exact_value_i64(bt->BitSet.lower));
  2323. Type *it = bit_set_to_int(bt);
  2324. cgValue one = cg_const_value(p, it, exact_value_i64(1));
  2325. for (Ast *elem : cl->elems) {
  2326. GB_ASSERT(elem->kind != Ast_FieldValue);
  2327. cgValue expr = cg_build_expr(p, elem);
  2328. GB_ASSERT(expr.type->kind != Type_Tuple);
  2329. cgValue e = cg_emit_conv(p, expr, it);
  2330. e = cg_emit_arith(p, Token_Sub, e, lower, it);
  2331. e = cg_emit_arith(p, Token_Shl, one, e, it);
  2332. cgValue old_value = cg_emit_transmute(p, cg_addr_load(p, v), it);
  2333. cgValue new_value = cg_emit_arith(p, Token_Or, old_value, e, it);
  2334. new_value = cg_emit_transmute(p, new_value, type);
  2335. cg_addr_store(p, v, new_value);
  2336. }
  2337. return v;
  2338. }
  2339. case Type_Matrix: {
  2340. auto temp_data = array_make<cgCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  2341. populate(p, cl->elems, &temp_data, type);
  2342. cgValue dst_ptr = cg_addr_get_ptr(p, v);
  2343. for_array(i, temp_data) {
  2344. temp_data[i].gep = cg_emit_array_epi(p, dst_ptr, temp_data[i].elem_index);
  2345. }
  2346. assign_array(p, temp_data);
  2347. break;
  2348. }
  2349. case Type_SimdVector: {
  2350. // auto temp_data = array_make<cgCompoundLitElemTempData>(temporary_allocator(), 0, cl->elems.count);
  2351. // populate(p, cl->elems, &temp_data, type);
  2352. // // TODO(bill): reduce the need for individual `insertelement` if a `shufflevector`
  2353. // // might be a better option
  2354. // for (auto const &td : temp_data) if (td.value.node != nullptr) {
  2355. // if (td.elem_length > 0) {
  2356. // for (i64 k = 0; k < td.elem_length; k++) {
  2357. // LLVMValueRef index = cg_const_int(p->module, t_u32, td.elem_index + k).value;
  2358. // vector_value.value = LLVMBuildInsertElement(p->builder, vector_value.value, td.value.value, index, "");
  2359. // }
  2360. // } else {
  2361. // LLVMValueRef index = cg_const_int(p->module, t_u32, td.elem_index).value;
  2362. // vector_value.value = LLVMBuildInsertElement(p->builder, vector_value.value, td.value.value, index, "");
  2363. // }
  2364. // }
  2365. break;
  2366. }
  2367. }
  2368. return v;
  2369. }
  2370. gb_internal cgValue cg_build_unary_and(cgProcedure *p, Ast *expr) {
  2371. ast_node(ue, UnaryExpr, expr);
  2372. auto tv = type_and_value_of_expr(expr);
  2373. Ast *ue_expr = unparen_expr(ue->expr);
  2374. if (ue_expr->kind == Ast_IndexExpr && tv.mode == Addressing_OptionalOkPtr && is_type_tuple(tv.type)) {
  2375. GB_PANIC("TODO(bill): &m[k]");
  2376. // Type *tuple = tv.type;
  2377. // Type *map_type = type_of_expr(ue_expr->IndexExpr.expr);
  2378. // Type *ot = base_type(map_type);
  2379. // Type *t = base_type(type_deref(ot));
  2380. // bool deref = t != ot;
  2381. // GB_ASSERT(t->kind == Type_Map);
  2382. // ast_node(ie, IndexExpr, ue_expr);
  2383. // cgValue map_val = cg_build_addr_ptr(p, ie->expr);
  2384. // if (deref) {
  2385. // map_val = cg_emit_load(p, map_val);
  2386. // }
  2387. // cgValue key = lb_build_expr(p, ie->index);
  2388. // key = lb_emit_conv(p, key, t->Map.key);
  2389. // lbAddr addr = lb_addr_map(map_val, key, t, alloc_type_pointer(t->Map.value));
  2390. // lbValue ptr = lb_addr_get_ptr(p, addr);
  2391. // lbValue ok = lb_emit_comp_against_nil(p, Token_NotEq, ptr);
  2392. // ok = lb_emit_conv(p, ok, tuple->Tuple.variables[1]->type);
  2393. // lbAddr res = lb_add_local_generated(p, tuple, false);
  2394. // lbValue gep0 = lb_emit_struct_ep(p, res.addr, 0);
  2395. // lbValue gep1 = lb_emit_struct_ep(p, res.addr, 1);
  2396. // lb_emit_store(p, gep0, ptr);
  2397. // lb_emit_store(p, gep1, ok);
  2398. // return lb_addr_load(p, res);
  2399. } else if (is_type_soa_pointer(tv.type)) {
  2400. GB_PANIC("TODO(bill): &soa[i]");
  2401. // ast_node(ie, IndexExpr, ue_expr);
  2402. // lbValue addr = lb_build_addr_ptr(p, ie->expr);
  2403. // lbValue index = lb_build_expr(p, ie->index);
  2404. // if (!build_context.no_bounds_check) {
  2405. // // TODO(bill): soa bounds checking
  2406. // }
  2407. // return lb_make_soa_pointer(p, tv.type, addr, index);
  2408. } else if (ue_expr->kind == Ast_CompoundLit) {
  2409. cgAddr addr = cg_build_addr_compound_lit(p, expr);
  2410. return addr.addr;
  2411. } else if (ue_expr->kind == Ast_TypeAssertion) {
  2412. GB_PANIC("TODO(bill): &v.(T)");
  2413. // if (is_type_tuple(tv.type)) {
  2414. // Type *tuple = tv.type;
  2415. // Type *ptr_type = tuple->Tuple.variables[0]->type;
  2416. // Type *ok_type = tuple->Tuple.variables[1]->type;
  2417. // ast_node(ta, TypeAssertion, ue_expr);
  2418. // TokenPos pos = ast_token(expr).pos;
  2419. // Type *type = type_of_expr(ue_expr);
  2420. // GB_ASSERT(!is_type_tuple(type));
  2421. // lbValue e = lb_build_expr(p, ta->expr);
  2422. // Type *t = type_deref(e.type);
  2423. // if (is_type_union(t)) {
  2424. // lbValue v = e;
  2425. // if (!is_type_pointer(v.type)) {
  2426. // v = lb_address_from_load_or_generate_local(p, v);
  2427. // }
  2428. // Type *src_type = type_deref(v.type);
  2429. // Type *dst_type = type;
  2430. // lbValue src_tag = {};
  2431. // lbValue dst_tag = {};
  2432. // if (is_type_union_maybe_pointer(src_type)) {
  2433. // src_tag = lb_emit_comp_against_nil(p, Token_NotEq, v);
  2434. // dst_tag = lb_const_bool(p->module, t_bool, true);
  2435. // } else {
  2436. // src_tag = lb_emit_load(p, lb_emit_union_tag_ptr(p, v));
  2437. // dst_tag = lb_const_union_tag(p->module, src_type, dst_type);
  2438. // }
  2439. // lbValue ok = lb_emit_comp(p, Token_CmpEq, src_tag, dst_tag);
  2440. // lbValue data_ptr = lb_emit_conv(p, v, ptr_type);
  2441. // lbAddr res = lb_add_local_generated(p, tuple, true);
  2442. // lbValue gep0 = lb_emit_struct_ep(p, res.addr, 0);
  2443. // lbValue gep1 = lb_emit_struct_ep(p, res.addr, 1);
  2444. // lb_emit_store(p, gep0, lb_emit_select(p, ok, data_ptr, lb_const_nil(p->module, ptr_type)));
  2445. // lb_emit_store(p, gep1, lb_emit_conv(p, ok, ok_type));
  2446. // return lb_addr_load(p, res);
  2447. // } else if (is_type_any(t)) {
  2448. // lbValue v = e;
  2449. // if (is_type_pointer(v.type)) {
  2450. // v = lb_emit_load(p, v);
  2451. // }
  2452. // lbValue data_ptr = lb_emit_conv(p, lb_emit_struct_ev(p, v, 0), ptr_type);
  2453. // lbValue any_id = lb_emit_struct_ev(p, v, 1);
  2454. // lbValue id = lb_typeid(p->module, type);
  2455. // lbValue ok = lb_emit_comp(p, Token_CmpEq, any_id, id);
  2456. // lbAddr res = lb_add_local_generated(p, tuple, false);
  2457. // lbValue gep0 = lb_emit_struct_ep(p, res.addr, 0);
  2458. // lbValue gep1 = lb_emit_struct_ep(p, res.addr, 1);
  2459. // lb_emit_store(p, gep0, lb_emit_select(p, ok, data_ptr, lb_const_nil(p->module, ptr_type)));
  2460. // lb_emit_store(p, gep1, lb_emit_conv(p, ok, ok_type));
  2461. // return lb_addr_load(p, res);
  2462. // } else {
  2463. // GB_PANIC("TODO(bill): type assertion %s", type_to_string(type));
  2464. // }
  2465. // } else {
  2466. // GB_ASSERT(is_type_pointer(tv.type));
  2467. // ast_node(ta, TypeAssertion, ue_expr);
  2468. // TokenPos pos = ast_token(expr).pos;
  2469. // Type *type = type_of_expr(ue_expr);
  2470. // GB_ASSERT(!is_type_tuple(type));
  2471. // lbValue e = lb_build_expr(p, ta->expr);
  2472. // Type *t = type_deref(e.type);
  2473. // if (is_type_union(t)) {
  2474. // lbValue v = e;
  2475. // if (!is_type_pointer(v.type)) {
  2476. // v = lb_address_from_load_or_generate_local(p, v);
  2477. // }
  2478. // Type *src_type = type_deref(v.type);
  2479. // Type *dst_type = type;
  2480. // if ((p->state_flags & StateFlag_no_type_assert) == 0) {
  2481. // lbValue src_tag = {};
  2482. // lbValue dst_tag = {};
  2483. // if (is_type_union_maybe_pointer(src_type)) {
  2484. // src_tag = lb_emit_comp_against_nil(p, Token_NotEq, v);
  2485. // dst_tag = lb_const_bool(p->module, t_bool, true);
  2486. // } else {
  2487. // src_tag = lb_emit_load(p, lb_emit_union_tag_ptr(p, v));
  2488. // dst_tag = lb_const_union_tag(p->module, src_type, dst_type);
  2489. // }
  2490. // isize arg_count = 6;
  2491. // if (build_context.no_rtti) {
  2492. // arg_count = 4;
  2493. // }
  2494. // lbValue ok = lb_emit_comp(p, Token_CmpEq, src_tag, dst_tag);
  2495. // auto args = array_make<lbValue>(permanent_allocator(), arg_count);
  2496. // args[0] = ok;
  2497. // args[1] = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id));
  2498. // args[2] = lb_const_int(p->module, t_i32, pos.line);
  2499. // args[3] = lb_const_int(p->module, t_i32, pos.column);
  2500. // if (!build_context.no_rtti) {
  2501. // args[4] = lb_typeid(p->module, src_type);
  2502. // args[5] = lb_typeid(p->module, dst_type);
  2503. // }
  2504. // lb_emit_runtime_call(p, "type_assertion_check", args);
  2505. // }
  2506. // lbValue data_ptr = v;
  2507. // return lb_emit_conv(p, data_ptr, tv.type);
  2508. // } else if (is_type_any(t)) {
  2509. // lbValue v = e;
  2510. // if (is_type_pointer(v.type)) {
  2511. // v = lb_emit_load(p, v);
  2512. // }
  2513. // lbValue data_ptr = lb_emit_struct_ev(p, v, 0);
  2514. // if ((p->state_flags & StateFlag_no_type_assert) == 0) {
  2515. // GB_ASSERT(!build_context.no_rtti);
  2516. // lbValue any_id = lb_emit_struct_ev(p, v, 1);
  2517. // lbValue id = lb_typeid(p->module, type);
  2518. // lbValue ok = lb_emit_comp(p, Token_CmpEq, any_id, id);
  2519. // auto args = array_make<lbValue>(permanent_allocator(), 6);
  2520. // args[0] = ok;
  2521. // args[1] = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id));
  2522. // args[2] = lb_const_int(p->module, t_i32, pos.line);
  2523. // args[3] = lb_const_int(p->module, t_i32, pos.column);
  2524. // args[4] = any_id;
  2525. // args[5] = id;
  2526. // lb_emit_runtime_call(p, "type_assertion_check", args);
  2527. // }
  2528. // return lb_emit_conv(p, data_ptr, tv.type);
  2529. // } else {
  2530. // GB_PANIC("TODO(bill): type assertion %s", type_to_string(type));
  2531. // }
  2532. // }
  2533. }
  2534. return cg_build_addr_ptr(p, ue->expr);
  2535. }
  2536. gb_internal cgValue cg_emit_cast_union(cgProcedure *p, cgValue value, Type *type, TokenPos pos) {
  2537. Type *src_type = value.type;
  2538. bool is_ptr = is_type_pointer(src_type);
  2539. bool is_tuple = true;
  2540. Type *tuple = type;
  2541. if (type->kind != Type_Tuple) {
  2542. is_tuple = false;
  2543. tuple = make_optional_ok_type(type);
  2544. }
  2545. if (is_ptr) {
  2546. value = cg_emit_load(p, value);
  2547. }
  2548. Type *src = base_type(type_deref(src_type));
  2549. GB_ASSERT_MSG(is_type_union(src), "%s", type_to_string(src_type));
  2550. Type *dst = tuple->Tuple.variables[0]->type;
  2551. cgValue value_ = cg_address_from_load_or_generate_local(p, value);
  2552. if ((p->state_flags & StateFlag_no_type_assert) != 0 && !is_tuple) {
  2553. // just do a bit cast of the data at the front
  2554. cgValue ptr = cg_emit_conv(p, value_, alloc_type_pointer(type));
  2555. return cg_emit_load(p, ptr);
  2556. }
  2557. cgValue tag = {};
  2558. cgValue dst_tag = {};
  2559. cgValue cond = {};
  2560. cgValue data = {};
  2561. cgValue gep0 = cg_add_local(p, tuple->Tuple.variables[0]->type, nullptr, true).addr;
  2562. cgValue gep1 = cg_add_local(p, tuple->Tuple.variables[1]->type, nullptr, true).addr;
  2563. if (is_type_union_maybe_pointer(src)) {
  2564. data = cg_emit_load(p, cg_emit_conv(p, value_, gep0.type));
  2565. } else {
  2566. tag = cg_emit_load(p, cg_emit_union_tag_ptr(p, value_));
  2567. dst_tag = cg_const_union_tag(p, src, dst);
  2568. }
  2569. TB_Node *ok_block = cg_control_region(p, "union_cast_ok");
  2570. TB_Node *end_block = cg_control_region(p, "union_cast_end");
  2571. if (data.node != nullptr) {
  2572. GB_ASSERT(is_type_union_maybe_pointer(src));
  2573. cond = cg_emit_comp_against_nil(p, Token_NotEq, data);
  2574. } else {
  2575. cond = cg_emit_comp(p, Token_CmpEq, tag, dst_tag);
  2576. }
  2577. cg_emit_if(p, cond, ok_block, end_block);
  2578. tb_inst_set_control(p->func, ok_block);
  2579. if (data.node == nullptr) {
  2580. data = cg_emit_load(p, cg_emit_conv(p, value_, gep0.type));
  2581. }
  2582. cg_emit_store(p, gep0, data);
  2583. cg_emit_store(p, gep1, cg_const_bool(p, t_bool, true));
  2584. cg_emit_goto(p, end_block);
  2585. tb_inst_set_control(p->func, end_block);
  2586. if (!is_tuple) {
  2587. GB_ASSERT((p->state_flags & StateFlag_no_type_assert) == 0);
  2588. // NOTE(bill): Panic on invalid conversion
  2589. Type *dst_type = tuple->Tuple.variables[0]->type;
  2590. isize arg_count = 7;
  2591. if (build_context.no_rtti) {
  2592. arg_count = 4;
  2593. }
  2594. cgValue ok = cg_emit_load(p, gep1);
  2595. auto args = slice_make<cgValue>(permanent_allocator(), arg_count);
  2596. args[0] = ok;
  2597. args[1] = cg_const_string(p, t_string, get_file_path_string(pos.file_id));
  2598. args[2] = cg_const_int(p, t_i32, pos.line);
  2599. args[3] = cg_const_int(p, t_i32, pos.column);
  2600. if (!build_context.no_rtti) {
  2601. args[4] = cg_typeid(p, src_type);
  2602. args[5] = cg_typeid(p, dst_type);
  2603. args[6] = cg_emit_conv(p, value_, t_rawptr);
  2604. }
  2605. cg_emit_runtime_call(p, "type_assertion_check2", args);
  2606. return cg_emit_load(p, gep0);
  2607. }
  2608. return cg_value_multi2(cg_emit_load(p, gep0), cg_emit_load(p, gep1), tuple);
  2609. }
  2610. gb_internal cgValue cg_emit_cast_any(cgProcedure *p, cgValue value, Type *type, TokenPos pos) {
  2611. Type *src_type = value.type;
  2612. if (is_type_pointer(src_type)) {
  2613. value = cg_emit_load(p, value);
  2614. }
  2615. bool is_tuple = true;
  2616. Type *tuple = type;
  2617. if (type->kind != Type_Tuple) {
  2618. is_tuple = false;
  2619. tuple = make_optional_ok_type(type);
  2620. }
  2621. Type *dst_type = tuple->Tuple.variables[0]->type;
  2622. if ((p->state_flags & StateFlag_no_type_assert) != 0 && !is_tuple) {
  2623. // just do a bit cast of the data at the front
  2624. cgValue ptr = cg_emit_struct_ev(p, value, 0);
  2625. ptr = cg_emit_conv(p, ptr, alloc_type_pointer(type));
  2626. return cg_emit_load(p, ptr);
  2627. }
  2628. cgValue dst_typeid = cg_typeid(p, dst_type);
  2629. cgValue any_typeid = cg_emit_struct_ev(p, value, 1);
  2630. TB_Node *ok_block = cg_control_region(p, "any_cast_ok");
  2631. TB_Node *end_block = cg_control_region(p, "any_cast_end");
  2632. cgValue cond = cg_emit_comp(p, Token_CmpEq, any_typeid, dst_typeid);
  2633. cg_emit_if(p, cond, ok_block, end_block);
  2634. tb_inst_set_control(p->func, ok_block);
  2635. cgValue gep0 = cg_add_local(p, tuple->Tuple.variables[0]->type, nullptr, true).addr;
  2636. cgValue gep1 = cg_add_local(p, tuple->Tuple.variables[1]->type, nullptr, true).addr;
  2637. cgValue any_data = cg_emit_struct_ev(p, value, 0);
  2638. cgValue ptr = cg_emit_conv(p, any_data, alloc_type_pointer(dst_type));
  2639. cg_emit_store(p, gep0, cg_emit_load(p, ptr));
  2640. cg_emit_store(p, gep1, cg_const_bool(p, t_bool, true));
  2641. cg_emit_goto(p, end_block);
  2642. tb_inst_set_control(p->func, end_block);
  2643. if (!is_tuple) {
  2644. // NOTE(bill): Panic on invalid conversion
  2645. cgValue ok = cg_emit_load(p, gep1);
  2646. isize arg_count = 7;
  2647. if (build_context.no_rtti) {
  2648. arg_count = 4;
  2649. }
  2650. auto args = slice_make<cgValue>(permanent_allocator(), arg_count);
  2651. args[0] = ok;
  2652. args[1] = cg_const_string(p, t_string, get_file_path_string(pos.file_id));
  2653. args[2] = cg_const_int(p, t_i32, pos.line);
  2654. args[3] = cg_const_int(p, t_i32, pos.column);
  2655. if (!build_context.no_rtti) {
  2656. args[4] = any_typeid;
  2657. args[5] = dst_typeid;
  2658. args[6] = cg_emit_struct_ev(p, value, 0);
  2659. }
  2660. cg_emit_runtime_call(p, "type_assertion_check2", args);
  2661. return cg_emit_load(p, gep0);
  2662. }
  2663. return cg_value_multi2(cg_emit_load(p, gep0), cg_emit_load(p, gep1), tuple);
  2664. }
  2665. gb_internal cgValue cg_build_type_assertion(cgProcedure *p, Ast *expr, Type *type) {
  2666. ast_node(ta, TypeAssertion, expr);
  2667. TokenPos pos = ast_token(expr).pos;
  2668. cgValue e = cg_build_expr(p, ta->expr);
  2669. Type *t = type_deref(e.type);
  2670. if (is_type_union(t)) {
  2671. return cg_emit_cast_union(p, e, type, pos);
  2672. } else if (is_type_any(t)) {
  2673. return cg_emit_cast_any(p, e, type, pos);
  2674. }
  2675. GB_PANIC("TODO(bill): type assertion %s", type_to_string(e.type));
  2676. return {};
  2677. }
  2678. gb_internal cgValue cg_build_expr_internal(cgProcedure *p, Ast *expr) {
  2679. expr = unparen_expr(expr);
  2680. TokenPos expr_pos = ast_token(expr).pos;
  2681. TypeAndValue tv = type_and_value_of_expr(expr);
  2682. Type *type = type_of_expr(expr);
  2683. GB_ASSERT_MSG(tv.mode != Addressing_Invalid, "invalid expression '%s' (tv.mode = %d, tv.type = %s) @ %s\n Current Proc: %.*s : %s", expr_to_string(expr), tv.mode, type_to_string(tv.type), token_pos_to_string(expr_pos), LIT(p->name), type_to_string(p->type));
  2684. if (tv.value.kind != ExactValue_Invalid &&
  2685. expr->kind != Ast_CompoundLit) {
  2686. // NOTE(bill): The commented out code below is just for debug purposes only
  2687. // if (is_type_untyped(type)) {
  2688. // gb_printf_err("%s %s : %s @ %p\n", token_pos_to_string(expr_pos), expr_to_string(expr), type_to_string(expr->tav.type), expr);
  2689. // GB_PANIC("%s\n", type_to_string(tv.type));
  2690. // }
  2691. // NOTE(bill): Short on constant values
  2692. return cg_const_value(p, type, tv.value);
  2693. } else if (tv.mode == Addressing_Type) {
  2694. // NOTE(bill, 2023-01-16): is this correct? I hope so at least
  2695. return cg_typeid(p, tv.type);
  2696. }
  2697. switch (expr->kind) {
  2698. case_ast_node(bl, BasicLit, expr);
  2699. TokenPos pos = bl->token.pos;
  2700. GB_PANIC("Non-constant basic literal %s - %.*s", token_pos_to_string(pos), LIT(token_strings[bl->token.kind]));
  2701. case_end;
  2702. case_ast_node(bd, BasicDirective, expr);
  2703. TokenPos pos = bd->token.pos;
  2704. GB_PANIC("Non-constant basic literal %s - %.*s", token_pos_to_string(pos), LIT(bd->name.string));
  2705. case_end;
  2706. case_ast_node(i, Ident, expr);
  2707. Entity *e = entity_from_expr(expr);
  2708. e = strip_entity_wrapping(e);
  2709. GB_ASSERT_MSG(e != nullptr, "%s in %.*s %p", expr_to_string(expr), LIT(p->name), expr);
  2710. if (e->kind == Entity_Builtin) {
  2711. Token token = ast_token(expr);
  2712. GB_PANIC("TODO(bill): lb_build_expr Entity_Builtin '%.*s'\n"
  2713. "\t at %s", LIT(builtin_procs[e->Builtin.id].name),
  2714. token_pos_to_string(token.pos));
  2715. return {};
  2716. } else if (e->kind == Entity_Nil) {
  2717. // TODO(bill): is this correct?
  2718. return cg_value(cast(TB_Node *)nullptr, e->type);
  2719. }
  2720. GB_ASSERT(e->kind != Entity_ProcGroup);
  2721. cgAddr *addr = map_get(&p->variable_map, e);
  2722. if (addr) {
  2723. return cg_addr_load(p, *addr);
  2724. }
  2725. return cg_find_ident(p, e, expr);
  2726. case_end;
  2727. case_ast_node(i, Implicit, expr);
  2728. return cg_addr_load(p, cg_build_addr(p, expr));
  2729. case_end;
  2730. case_ast_node(u, Uninit, expr);
  2731. if (is_type_untyped(type)) {
  2732. return cg_value(cast(TB_Node *)nullptr, t_untyped_uninit);
  2733. }
  2734. return cg_value(tb_inst_poison(p->func), type);
  2735. case_end;
  2736. case_ast_node(de, DerefExpr, expr);
  2737. return cg_addr_load(p, cg_build_addr(p, expr));
  2738. case_end;
  2739. case_ast_node(se, SelectorExpr, expr);
  2740. TypeAndValue tav = type_and_value_of_expr(expr);
  2741. GB_ASSERT(tav.mode != Addressing_Invalid);
  2742. return cg_addr_load(p, cg_build_addr(p, expr));
  2743. case_end;
  2744. case_ast_node(ise, ImplicitSelectorExpr, expr);
  2745. TypeAndValue tav = type_and_value_of_expr(expr);
  2746. GB_ASSERT(tav.mode == Addressing_Constant);
  2747. return cg_const_value(p, type, tv.value);
  2748. case_end;
  2749. case_ast_node(se, SelectorCallExpr, expr);
  2750. GB_ASSERT(se->modified_call);
  2751. return cg_build_call_expr(p, se->call);
  2752. case_end;
  2753. case_ast_node(i, CallExpr, expr);
  2754. return cg_build_call_expr(p, expr);
  2755. case_end;
  2756. case_ast_node(cl, CompoundLit, expr);
  2757. cgAddr addr = cg_build_addr_compound_lit(p, expr);
  2758. return cg_addr_load(p, addr);
  2759. case_end;
  2760. case_ast_node(te, TernaryIfExpr, expr);
  2761. cgValue incoming_values[2] = {};
  2762. TB_Node *incoming_regions[2] = {};
  2763. TB_Node *then = cg_control_region(p, "if_then");
  2764. TB_Node *done = cg_control_region(p, "if_done");
  2765. TB_Node *else_ = cg_control_region(p, "if_else");
  2766. cg_build_cond(p, te->cond, then, else_);
  2767. tb_inst_set_control(p->func, then);
  2768. Type *type = default_type(type_of_expr(expr));
  2769. incoming_values [0] = cg_emit_conv(p, cg_build_expr(p, te->x), type);
  2770. incoming_regions[0] = tb_inst_get_control(p->func);
  2771. cg_emit_goto(p, done);
  2772. tb_inst_set_control(p->func, else_);
  2773. incoming_values [1] = cg_emit_conv(p, cg_build_expr(p, te->y), type);
  2774. incoming_regions[1] = tb_inst_get_control(p->func);
  2775. cg_emit_goto(p, done);
  2776. tb_inst_set_control(p->func, done);
  2777. GB_ASSERT(incoming_values[0].kind == cgValue_Value ||
  2778. incoming_values[0].kind == cgValue_Addr);
  2779. GB_ASSERT(incoming_values[0].kind == incoming_values[1].kind);
  2780. cgValue res = {};
  2781. res.kind = incoming_values[0].kind;
  2782. res.type = type;
  2783. TB_DataType dt = cg_data_type(type);
  2784. if (res.kind == cgValue_Addr) {
  2785. dt = TB_TYPE_PTR;
  2786. }
  2787. res.node = tb_inst_incomplete_phi(p->func, dt, done, 2);
  2788. tb_inst_add_phi_operand(p->func, res.node, incoming_regions[0], incoming_values[0].node);
  2789. tb_inst_add_phi_operand(p->func, res.node, incoming_regions[1], incoming_values[1].node);
  2790. return res;
  2791. case_end;
  2792. case_ast_node(te, TernaryWhenExpr, expr);
  2793. TypeAndValue tav = type_and_value_of_expr(te->cond);
  2794. GB_ASSERT(tav.mode == Addressing_Constant);
  2795. GB_ASSERT(tav.value.kind == ExactValue_Bool);
  2796. if (tav.value.value_bool) {
  2797. return cg_build_expr(p, te->x);
  2798. } else {
  2799. return cg_build_expr(p, te->y);
  2800. }
  2801. case_end;
  2802. case_ast_node(tc, TypeCast, expr);
  2803. cgValue e = cg_build_expr(p, tc->expr);
  2804. switch (tc->token.kind) {
  2805. case Token_cast:
  2806. return cg_emit_conv(p, e, type);
  2807. case Token_transmute:
  2808. return cg_emit_transmute(p, e, type);
  2809. }
  2810. GB_PANIC("Invalid AST TypeCast");
  2811. case_end;
  2812. case_ast_node(ac, AutoCast, expr);
  2813. cgValue value = cg_build_expr(p, ac->expr);
  2814. return cg_emit_conv(p, value, type);
  2815. case_end;
  2816. case_ast_node(se, SliceExpr, expr);
  2817. if (is_type_slice(type_of_expr(se->expr))) {
  2818. // NOTE(bill): Quick optimization
  2819. if (se->high == nullptr &&
  2820. (se->low == nullptr || cg_is_expr_constant_zero(se->low))) {
  2821. return cg_build_expr(p, se->expr);
  2822. }
  2823. }
  2824. return cg_addr_load(p, cg_build_addr(p, expr));
  2825. case_end;
  2826. case_ast_node(ie, IndexExpr, expr);
  2827. return cg_addr_load(p, cg_build_addr(p, expr));
  2828. case_end;
  2829. case_ast_node(ie, MatrixIndexExpr, expr);
  2830. return cg_addr_load(p, cg_build_addr(p, expr));
  2831. case_end;
  2832. case_ast_node(ue, UnaryExpr, expr);
  2833. if (ue->op.kind == Token_And) {
  2834. return cg_build_unary_and(p, expr);
  2835. }
  2836. cgValue v = cg_build_expr(p, ue->expr);
  2837. return cg_emit_unary_arith(p, ue->op.kind, v, type);
  2838. case_end;
  2839. case_ast_node(be, BinaryExpr, expr);
  2840. return cg_build_binary_expr(p, expr);
  2841. case_end;
  2842. case_ast_node(oe, OrReturnExpr, expr);
  2843. return cg_build_or_return(p, oe->expr, tv.type);
  2844. case_end;
  2845. case_ast_node(oe, OrElseExpr, expr);
  2846. return cg_build_or_else(p, oe->x, oe->y, tv.type);
  2847. case_end;
  2848. case_ast_node(ta, TypeAssertion, expr);
  2849. return cg_build_type_assertion(p, expr, tv.type);
  2850. case_end;
  2851. case_ast_node(pl, ProcLit, expr);
  2852. cgProcedure *anon = cg_procedure_generate_anonymous(p->module, expr, p);
  2853. GB_ASSERT(anon != nullptr);
  2854. GB_ASSERT(anon->symbol != nullptr);
  2855. return cg_value(tb_inst_get_symbol_address(p->func, anon->symbol), type);
  2856. case_end;
  2857. }
  2858. TokenPos token_pos = ast_token(expr).pos;
  2859. GB_PANIC("Unexpected expression\n"
  2860. "\tAst: %.*s @ "
  2861. "%s\n",
  2862. LIT(ast_strings[expr->kind]),
  2863. token_pos_to_string(token_pos));
  2864. return {};
  2865. }
  2866. gb_internal cgValue cg_map_data_uintptr(cgProcedure *p, cgValue value) {
  2867. GB_ASSERT(is_type_map(value.type) || are_types_identical(value.type, t_raw_map));
  2868. cgValue data = cg_emit_struct_ev(p, value, 0);
  2869. u64 mask_value = 0;
  2870. if (build_context.ptr_size == 4) {
  2871. mask_value = 0xfffffffful & ~(MAP_CACHE_LINE_SIZE-1);
  2872. } else {
  2873. mask_value = 0xffffffffffffffffull & ~(MAP_CACHE_LINE_SIZE-1);
  2874. }
  2875. cgValue mask = cg_const_int(p, t_uintptr, mask_value);
  2876. return cg_emit_arith(p, Token_And, data, mask, t_uintptr);
  2877. }
  2878. gb_internal cgValue cg_gen_map_key_hash(cgProcedure *p, cgValue const &map_ptr, cgValue key, cgValue *key_ptr_) {
  2879. TEMPORARY_ALLOCATOR_GUARD();
  2880. cgValue key_ptr = cg_address_from_load_or_generate_local(p, key);
  2881. key_ptr = cg_emit_conv(p, key_ptr, t_rawptr);
  2882. if (key_ptr_) *key_ptr_ = key_ptr;
  2883. Type* key_type = base_type(type_deref(map_ptr.type))->Map.key;
  2884. cgValue hasher = cg_hasher_proc_value_for_type(p, key_type);
  2885. Slice<cgValue> args = {};
  2886. args = slice_make<cgValue>(temporary_allocator(), 1);
  2887. args[0] = cg_map_data_uintptr(p, cg_emit_load(p, map_ptr));
  2888. cgValue seed = cg_emit_runtime_call(p, "map_seed_from_map_data", args);
  2889. args = slice_make<cgValue>(temporary_allocator(), 2);
  2890. args[0] = key_ptr;
  2891. args[1] = seed;
  2892. return cg_emit_call(p, hasher, args);
  2893. }
  2894. gb_internal cgValue cg_internal_dynamic_map_get_ptr(cgProcedure *p, cgValue const &map_ptr, cgValue const &key) {
  2895. TEMPORARY_ALLOCATOR_GUARD();
  2896. Type *map_type = base_type(type_deref(map_ptr.type));
  2897. GB_ASSERT(map_type->kind == Type_Map);
  2898. cgValue ptr = {};
  2899. cgValue key_ptr = {};
  2900. cgValue hash = cg_gen_map_key_hash(p, map_ptr, key, &key_ptr);
  2901. auto args = slice_make<cgValue>(temporary_allocator(), 4);
  2902. args[0] = cg_emit_transmute(p, map_ptr, t_raw_map_ptr);
  2903. args[1] = cg_builtin_map_info(p, map_type);
  2904. args[2] = hash;
  2905. args[3] = key_ptr;
  2906. ptr = cg_emit_runtime_call(p, "__dynamic_map_get", args);
  2907. return cg_emit_conv(p, ptr, alloc_type_pointer(map_type->Map.value));
  2908. }
  2909. gb_internal void cg_internal_dynamic_map_set(cgProcedure *p, cgValue const &map_ptr, Type *map_type,
  2910. cgValue const &map_key, cgValue const &map_value, Ast *node) {
  2911. TEMPORARY_ALLOCATOR_GUARD();
  2912. map_type = base_type(map_type);
  2913. GB_ASSERT(map_type->kind == Type_Map);
  2914. cgValue key_ptr = {};
  2915. cgValue hash = cg_gen_map_key_hash(p, map_ptr, map_key, &key_ptr);
  2916. cgValue v = cg_emit_conv(p, map_value, map_type->Map.value);
  2917. cgValue value_ptr = cg_address_from_load_or_generate_local(p, v);
  2918. auto args = slice_make<cgValue>(temporary_allocator(), 6);
  2919. args[0] = cg_emit_conv(p, map_ptr, t_raw_map_ptr);
  2920. args[1] = cg_builtin_map_info(p, map_type);
  2921. args[2] = hash;
  2922. args[3] = cg_emit_conv(p, key_ptr, t_rawptr);
  2923. args[4] = cg_emit_conv(p, value_ptr, t_rawptr);
  2924. args[5] = cg_emit_source_code_location_as_global(p, node);
  2925. cg_emit_runtime_call(p, "__dynamic_map_set", args);
  2926. }
  2927. gb_internal cgValue cg_build_addr_ptr(cgProcedure *p, Ast *expr) {
  2928. cgAddr addr = cg_build_addr(p, expr);
  2929. return cg_addr_get_ptr(p, addr);
  2930. }
  2931. gb_internal cgAddr cg_build_addr_internal(cgProcedure *p, Ast *expr);
  2932. gb_internal cgAddr cg_build_addr(cgProcedure *p, Ast *expr) {
  2933. expr = unparen_expr(expr);
  2934. // IMPORTANT NOTE(bill):
  2935. // Selector Call Expressions (foo->bar(...))
  2936. // must only evaluate `foo` once as it gets transformed into
  2937. // `foo.bar(foo, ...)`
  2938. // And if `foo` is a procedure call or something more complex, storing the value
  2939. // once is a very good idea
  2940. // If a stored value is found, it must be removed from the cache
  2941. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  2942. // lbAddr *pp = map_get(&p->selector_addr, expr);
  2943. // if (pp != nullptr) {
  2944. // lbAddr res = *pp;
  2945. // map_remove(&p->selector_addr, expr);
  2946. // return res;
  2947. // }
  2948. }
  2949. cgAddr addr = cg_build_addr_internal(p, expr);
  2950. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  2951. // map_set(&p->selector_addr, expr, addr);
  2952. }
  2953. return addr;
  2954. }
  2955. gb_internal cgAddr cg_build_addr_index_expr(cgProcedure *p, Ast *expr) {
  2956. ast_node(ie, IndexExpr, expr);
  2957. Type *t = base_type(type_of_expr(ie->expr));
  2958. bool deref = is_type_pointer(t);
  2959. t = base_type(type_deref(t));
  2960. if (is_type_soa_struct(t)) {
  2961. cgValue val = cg_build_addr_ptr(p, ie->expr);
  2962. if (deref) {
  2963. val = cg_emit_load(p, val);
  2964. }
  2965. cgValue index = cg_build_expr(p, ie->index);
  2966. return cg_addr_soa_variable(val, index, ie->index);
  2967. }
  2968. if (ie->expr->tav.mode == Addressing_SoaVariable) {
  2969. GB_PANIC("TODO(bill): #soa");
  2970. // // SOA Structures for slices/dynamic arrays
  2971. // GB_ASSERT(is_type_pointer(type_of_expr(ie->expr)));
  2972. // lbValue field = lb_build_expr(p, ie->expr);
  2973. // lbValue index = lb_build_expr(p, ie->index);
  2974. // if (!build_context.no_bounds_check) {
  2975. // // TODO HACK(bill): Clean up this hack to get the length for bounds checking
  2976. // // GB_ASSERT(LLVMIsALoadInst(field.value));
  2977. // // lbValue a = {};
  2978. // // a.value = LLVMGetOperand(field.value, 0);
  2979. // // a.type = alloc_type_pointer(field.type);
  2980. // // irInstr *b = &a->Instr;
  2981. // // GB_ASSERT(b->kind == irInstr_StructElementPtr);
  2982. // // lbValue base_struct = b->StructElementPtr.address;
  2983. // // GB_ASSERT(is_type_soa_struct(type_deref(ir_type(base_struct))));
  2984. // // lbValue len = ir_soa_struct_len(p, base_struct);
  2985. // // lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  2986. // }
  2987. // lbValue val = lb_emit_ptr_offset(p, field, index);
  2988. // return lb_addr(val);
  2989. }
  2990. GB_ASSERT_MSG(is_type_indexable(t), "%s %s", type_to_string(t), expr_to_string(expr));
  2991. if (is_type_map(t)) {
  2992. cgAddr map_addr = cg_build_addr(p, ie->expr);
  2993. cgValue key = cg_build_expr(p, ie->index);
  2994. key = cg_emit_conv(p, key, t->Map.key);
  2995. Type *result_type = type_of_expr(expr);
  2996. cgValue map_ptr = cg_addr_get_ptr(p, map_addr);
  2997. if (is_type_pointer(type_deref(map_ptr.type))) {
  2998. map_ptr = cg_emit_load(p, map_ptr);
  2999. }
  3000. return cg_addr_map(map_ptr, key, t, result_type);
  3001. }
  3002. switch (t->kind) {
  3003. case Type_Array: {
  3004. cgValue array = {};
  3005. array = cg_build_addr_ptr(p, ie->expr);
  3006. if (deref) {
  3007. array = cg_emit_load(p, array);
  3008. }
  3009. cgValue index = cg_build_expr(p, ie->index);
  3010. index = cg_emit_conv(p, index, t_int);
  3011. cgValue elem = cg_emit_array_ep(p, array, index);
  3012. auto index_tv = type_and_value_of_expr(ie->index);
  3013. if (index_tv.mode != Addressing_Constant) {
  3014. // cgValue len = cg_const_int(p->module, t_int, t->Array.count);
  3015. // cg_emit_bounds_check(p, ast_token(ie->index), index, len);
  3016. }
  3017. return cg_addr(elem);
  3018. }
  3019. case Type_EnumeratedArray: {
  3020. cgValue array = {};
  3021. array = cg_build_addr_ptr(p, ie->expr);
  3022. if (deref) {
  3023. array = cg_emit_load(p, array);
  3024. }
  3025. Type *index_type = t->EnumeratedArray.index;
  3026. auto index_tv = type_and_value_of_expr(ie->index);
  3027. cgValue index = {};
  3028. if (compare_exact_values(Token_NotEq, *t->EnumeratedArray.min_value, exact_value_i64(0))) {
  3029. if (index_tv.mode == Addressing_Constant) {
  3030. ExactValue idx = exact_value_sub(index_tv.value, *t->EnumeratedArray.min_value);
  3031. index = cg_const_value(p, index_type, idx);
  3032. } else {
  3033. index = cg_emit_arith(p, Token_Sub,
  3034. cg_build_expr(p, ie->index),
  3035. cg_const_value(p, index_type, *t->EnumeratedArray.min_value),
  3036. index_type);
  3037. index = cg_emit_conv(p, index, t_int);
  3038. }
  3039. } else {
  3040. index = cg_emit_conv(p, cg_build_expr(p, ie->index), t_int);
  3041. }
  3042. cgValue elem = cg_emit_array_ep(p, array, index);
  3043. if (index_tv.mode != Addressing_Constant) {
  3044. // cgValue len = cg_const_int(p->module, t_int, t->EnumeratedArray.count);
  3045. // cg_emit_bounds_check(p, ast_token(ie->index), index, len);
  3046. }
  3047. return cg_addr(elem);
  3048. }
  3049. case Type_Slice: {
  3050. cgValue slice = {};
  3051. slice = cg_build_expr(p, ie->expr);
  3052. if (deref) {
  3053. slice = cg_emit_load(p, slice);
  3054. }
  3055. cgValue elem = cg_builtin_raw_data(p, slice);
  3056. cgValue index = cg_emit_conv(p, cg_build_expr(p, ie->index), t_int);
  3057. // cgValue len = cg_builtin_len(p, slice);
  3058. // cg_emit_bounds_check(p, ast_token(ie->index), index, len);
  3059. cgValue v = cg_emit_ptr_offset(p, elem, index);
  3060. v.type = alloc_type_pointer(type_deref(v.type, true));
  3061. return cg_addr(v);
  3062. }
  3063. case Type_MultiPointer: {
  3064. cgValue multi_ptr = {};
  3065. multi_ptr = cg_build_expr(p, ie->expr);
  3066. if (deref) {
  3067. multi_ptr = cg_emit_load(p, multi_ptr);
  3068. }
  3069. cgValue index = cg_build_expr(p, ie->index);
  3070. index = cg_emit_conv(p, index, t_int);
  3071. cgValue v = cg_emit_ptr_offset(p, multi_ptr, index);
  3072. v.type = alloc_type_pointer(type_deref(v.type, true));
  3073. return cg_addr(v);
  3074. }
  3075. case Type_RelativeMultiPointer: {
  3076. cgValue multi_ptr = {};
  3077. multi_ptr = cg_build_expr(p, ie->expr);
  3078. if (deref) {
  3079. multi_ptr = cg_emit_load(p, multi_ptr);
  3080. }
  3081. cgValue index = cg_build_expr(p, ie->index);
  3082. index = cg_emit_conv(p, index, t_int);
  3083. cgValue v = cg_emit_ptr_offset(p, multi_ptr, index);
  3084. v.type = alloc_type_pointer(type_deref(v.type, true));
  3085. return cg_addr(v);
  3086. }
  3087. case Type_DynamicArray: {
  3088. cgValue dynamic_array = {};
  3089. dynamic_array = cg_build_expr(p, ie->expr);
  3090. if (deref) {
  3091. dynamic_array = cg_emit_load(p, dynamic_array);
  3092. }
  3093. cgValue elem = cg_builtin_raw_data(p, dynamic_array);
  3094. cgValue index = cg_emit_conv(p, cg_build_expr(p, ie->index), t_int);
  3095. // cgValue len = cg_dynamic_array_len(p, dynamic_array);
  3096. // cg_emit_bounds_check(p, ast_token(ie->index), index, len);
  3097. cgValue v = cg_emit_ptr_offset(p, elem, index);
  3098. v.type = alloc_type_pointer(type_deref(v.type, true));
  3099. return cg_addr(v);
  3100. }
  3101. case Type_Matrix: {
  3102. GB_PANIC("TODO(bill): matrix");
  3103. // lbValue matrix = {};
  3104. // matrix = lb_build_addr_ptr(p, ie->expr);
  3105. // if (deref) {
  3106. // matrix = lb_emit_load(p, matrix);
  3107. // }
  3108. // lbValue index = lb_build_expr(p, ie->index);
  3109. // index = lb_emit_conv(p, index, t_int);
  3110. // lbValue elem = lb_emit_matrix_ep(p, matrix, lb_const_int(p->module, t_int, 0), index);
  3111. // elem = lb_emit_conv(p, elem, alloc_type_pointer(type_of_expr(expr)));
  3112. // auto index_tv = type_and_value_of_expr(ie->index);
  3113. // if (index_tv.mode != Addressing_Constant) {
  3114. // lbValue len = lb_const_int(p->module, t_int, t->Matrix.column_count);
  3115. // lb_emit_bounds_check(p, ast_token(ie->index), index, len);
  3116. // }
  3117. // return lb_addr(elem);
  3118. }
  3119. case Type_Basic: { // Basic_string
  3120. cgValue str;
  3121. cgValue elem;
  3122. cgValue len;
  3123. cgValue index;
  3124. str = cg_build_expr(p, ie->expr);
  3125. if (deref) {
  3126. str = cg_emit_load(p, str);
  3127. }
  3128. elem = cg_builtin_raw_data(p, str);
  3129. len = cg_builtin_len(p, str);
  3130. index = cg_emit_conv(p, cg_build_expr(p, ie->index), t_int);
  3131. // cg_emit_bounds_check(p, ast_token(ie->index), index, len);
  3132. cgValue v = cg_emit_ptr_offset(p, elem, index);
  3133. v.type = alloc_type_pointer(type_deref(v.type, true));
  3134. return cg_addr(v);
  3135. }
  3136. }
  3137. return {};
  3138. }
  3139. gb_internal cgAddr cg_build_addr_internal(cgProcedure *p, Ast *expr) {
  3140. switch (expr->kind) {
  3141. case_ast_node(i, Implicit, expr);
  3142. cgAddr v = {};
  3143. switch (i->kind) {
  3144. case Token_context:
  3145. v = cg_find_or_generate_context_ptr(p);
  3146. break;
  3147. }
  3148. GB_ASSERT(v.addr.node != nullptr);
  3149. return v;
  3150. case_end;
  3151. case_ast_node(i, Ident, expr);
  3152. if (is_blank_ident(expr)) {
  3153. cgAddr val = {};
  3154. return val;
  3155. }
  3156. String name = i->token.string;
  3157. Entity *e = entity_of_node(expr);
  3158. return cg_build_addr_from_entity(p, e, expr);
  3159. case_end;
  3160. case_ast_node(de, DerefExpr, expr);
  3161. Type *t = type_of_expr(de->expr);
  3162. if (is_type_relative_pointer(t)) {
  3163. cgAddr addr = cg_build_addr(p, de->expr);
  3164. addr.relative.deref = true;
  3165. return addr;
  3166. } else if (is_type_soa_pointer(t)) {
  3167. cgValue value = cg_build_expr(p, de->expr);
  3168. cgValue ptr = cg_emit_struct_ev(p, value, 0);
  3169. cgValue idx = cg_emit_struct_ev(p, value, 1);
  3170. return cg_addr_soa_variable(ptr, idx, nullptr);
  3171. }
  3172. cgValue addr = cg_build_expr(p, de->expr);
  3173. return cg_addr(addr);
  3174. case_end;
  3175. case_ast_node(ie, IndexExpr, expr);
  3176. return cg_build_addr_index_expr(p, expr);
  3177. case_end;
  3178. case_ast_node(se, SliceExpr, expr);
  3179. return cg_build_addr_slice_expr(p, expr);
  3180. case_end;
  3181. case_ast_node(se, SelectorExpr, expr);
  3182. Ast *sel_node = unparen_expr(se->selector);
  3183. if (sel_node->kind != Ast_Ident) {
  3184. GB_PANIC("Unsupported selector expression");
  3185. }
  3186. String selector = sel_node->Ident.token.string;
  3187. TypeAndValue tav = type_and_value_of_expr(se->expr);
  3188. if (tav.mode == Addressing_Invalid) {
  3189. // NOTE(bill): Imports
  3190. Entity *imp = entity_of_node(se->expr);
  3191. if (imp != nullptr) {
  3192. GB_ASSERT(imp->kind == Entity_ImportName);
  3193. }
  3194. return cg_build_addr(p, unparen_expr(se->selector));
  3195. }
  3196. Type *type = base_type(tav.type);
  3197. if (tav.mode == Addressing_Type) { // Addressing_Type
  3198. Selection sel = lookup_field(tav.type, selector, true);
  3199. if (sel.pseudo_field) {
  3200. GB_ASSERT(sel.entity->kind == Entity_Procedure);
  3201. return cg_addr(cg_find_value_from_entity(p->module, sel.entity));
  3202. }
  3203. GB_PANIC("Unreachable %.*s", LIT(selector));
  3204. }
  3205. if (se->swizzle_count > 0) {
  3206. Type *array_type = base_type(type_deref(tav.type));
  3207. GB_ASSERT(array_type->kind == Type_Array);
  3208. u8 swizzle_count = se->swizzle_count;
  3209. u8 swizzle_indices_raw = se->swizzle_indices;
  3210. u8 swizzle_indices[4] = {};
  3211. for (u8 i = 0; i < swizzle_count; i++) {
  3212. u8 index = swizzle_indices_raw>>(i*2) & 3;
  3213. swizzle_indices[i] = index;
  3214. }
  3215. cgValue a = {};
  3216. if (is_type_pointer(tav.type)) {
  3217. a = cg_build_expr(p, se->expr);
  3218. } else {
  3219. cgAddr addr = cg_build_addr(p, se->expr);
  3220. a = cg_addr_get_ptr(p, addr);
  3221. }
  3222. GB_ASSERT(is_type_array(expr->tav.type));
  3223. GB_PANIC("TODO(bill): cg_addr_swizzle");
  3224. // return cg_addr_swizzle(a, expr->tav.type, swizzle_count, swizzle_indices);
  3225. }
  3226. Selection sel = lookup_field(type, selector, false);
  3227. GB_ASSERT(sel.entity != nullptr);
  3228. if (sel.pseudo_field) {
  3229. GB_ASSERT(sel.entity->kind == Entity_Procedure);
  3230. Entity *e = entity_of_node(sel_node);
  3231. return cg_addr(cg_find_value_from_entity(p->module, e));
  3232. }
  3233. {
  3234. cgAddr addr = cg_build_addr(p, se->expr);
  3235. if (addr.kind == cgAddr_Map) {
  3236. cgValue v = cg_addr_load(p, addr);
  3237. cgValue a = cg_address_from_load_or_generate_local(p, v);
  3238. a = cg_emit_deep_field_gep(p, a, sel);
  3239. return cg_addr(a);
  3240. } else if (addr.kind == cgAddr_Context) {
  3241. GB_ASSERT(sel.index.count > 0);
  3242. if (addr.ctx.sel.index.count >= 0) {
  3243. sel = selection_combine(addr.ctx.sel, sel);
  3244. }
  3245. addr.ctx.sel = sel;
  3246. addr.kind = cgAddr_Context;
  3247. return addr;
  3248. } else if (addr.kind == cgAddr_SoaVariable) {
  3249. cgValue index = addr.soa.index;
  3250. i64 first_index = sel.index[0];
  3251. Selection sub_sel = sel;
  3252. sub_sel.index.data += 1;
  3253. sub_sel.index.count -= 1;
  3254. cgValue arr = cg_emit_struct_ep(p, addr.addr, first_index);
  3255. Type *t = base_type(type_deref(addr.addr.type));
  3256. GB_ASSERT(is_type_soa_struct(t));
  3257. // TODO(bill): bounds checking for soa variable
  3258. // if (addr.soa.index_expr != nullptr && (!cg_is_const(addr.soa.index) || t->Struct.soa_kind != StructSoa_Fixed)) {
  3259. // cgValue len = cg_soa_struct_len(p, addr.addr);
  3260. // cg_emit_bounds_check(p, ast_token(addr.soa.index_expr), addr.soa.index, len);
  3261. // }
  3262. cgValue item = {};
  3263. if (t->Struct.soa_kind == StructSoa_Fixed) {
  3264. item = cg_emit_array_ep(p, arr, index);
  3265. } else {
  3266. item = cg_emit_ptr_offset(p, cg_emit_load(p, arr), index);
  3267. }
  3268. if (sub_sel.index.count > 0) {
  3269. item = cg_emit_deep_field_gep(p, item, sub_sel);
  3270. }
  3271. item.type = alloc_type_pointer(type_deref(item.type, true));
  3272. return cg_addr(item);
  3273. } else if (addr.kind == cgAddr_Swizzle) {
  3274. GB_ASSERT(sel.index.count > 0);
  3275. // NOTE(bill): just patch the index in place
  3276. sel.index[0] = addr.swizzle.indices[sel.index[0]];
  3277. } else if (addr.kind == cgAddr_SwizzleLarge) {
  3278. GB_ASSERT(sel.index.count > 0);
  3279. // NOTE(bill): just patch the index in place
  3280. sel.index[0] = addr.swizzle.indices[sel.index[0]];
  3281. }
  3282. cgValue a = cg_addr_get_ptr(p, addr);
  3283. a = cg_emit_deep_field_gep(p, a, sel);
  3284. return cg_addr(a);
  3285. }
  3286. case_end;
  3287. case_ast_node(ce, CallExpr, expr);
  3288. cgValue res = cg_build_expr(p, expr);
  3289. switch (res.kind) {
  3290. case cgValue_Value:
  3291. return cg_addr(cg_address_from_load_or_generate_local(p, res));
  3292. case cgValue_Addr:
  3293. return cg_addr(res);
  3294. case cgValue_Multi:
  3295. GB_PANIC("cannot address a multi-valued expression");
  3296. break;
  3297. }
  3298. case_end;
  3299. case_ast_node(cl, CompoundLit, expr);
  3300. return cg_build_addr_compound_lit(p, expr);
  3301. case_end;
  3302. }
  3303. TokenPos token_pos = ast_token(expr).pos;
  3304. GB_PANIC("Unexpected address expression\n"
  3305. "\tAst: %.*s @ "
  3306. "%s\n",
  3307. LIT(ast_strings[expr->kind]),
  3308. token_pos_to_string(token_pos));
  3309. return {};
  3310. }