tilde_expr.cpp 76 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338
  1. gb_internal cgValue cg_flatten_value(cgProcedure *p, cgValue value) {
  2. GB_ASSERT(value.kind != cgValue_Multi);
  3. if (value.kind == cgValue_Symbol) {
  4. GB_ASSERT(is_type_internally_pointer_like(value.type));
  5. return cg_value(tb_inst_get_symbol_address(p->func, value.symbol), value.type);
  6. } else if (value.kind == cgValue_Addr) {
  7. // TODO(bill): Is this a good idea?
  8. // this converts an lvalue to an rvalue if trivially possible
  9. TB_DataType dt = cg_data_type(value.type);
  10. if (!TB_IS_VOID_TYPE(dt)) {
  11. TB_CharUnits align = cast(TB_CharUnits)type_align_of(value.type);
  12. return cg_value(tb_inst_load(p->func, dt, value.node, align, false), value.type);
  13. }
  14. }
  15. return value;
  16. }
  17. gb_internal bool cg_is_expr_untyped_const(Ast *expr) {
  18. auto const &tv = type_and_value_of_expr(expr);
  19. if (is_type_untyped(tv.type)) {
  20. return tv.value.kind != ExactValue_Invalid;
  21. }
  22. return false;
  23. }
  24. gb_internal cgValue cg_expr_untyped_const_to_typed(cgProcedure *p, Ast *expr, Type *t) {
  25. GB_ASSERT(is_type_typed(t));
  26. auto const &tv = type_and_value_of_expr(expr);
  27. return cg_const_value(p, t, tv.value);
  28. }
  29. gb_internal cgContextData *cg_push_context_onto_stack(cgProcedure *p, cgAddr ctx) {
  30. ctx.kind = cgAddr_Context;
  31. cgContextData *cd = array_add_and_get(&p->context_stack);
  32. cd->ctx = ctx;
  33. cd->scope_index = p->scope_index;
  34. return cd;
  35. }
  36. gb_internal cgAddr cg_find_or_generate_context_ptr(cgProcedure *p) {
  37. if (p->context_stack.count > 0) {
  38. return p->context_stack[p->context_stack.count-1].ctx;
  39. }
  40. Type *pt = base_type(p->type);
  41. GB_ASSERT(pt->kind == Type_Proc);
  42. GB_ASSERT(pt->Proc.calling_convention != ProcCC_Odin);
  43. cgAddr c = cg_add_local(p, t_context, nullptr, true);
  44. tb_node_append_attrib(c.addr.node, tb_function_attrib_variable(p->func, -1, "context", cg_debug_type(p->module, t_context)));
  45. c.kind = cgAddr_Context;
  46. // lb_emit_init_context(p, c);
  47. cg_push_context_onto_stack(p, c);
  48. // lb_add_debug_context_variable(p, c);
  49. return c;
  50. }
  51. gb_internal cgValue cg_find_value_from_entity(cgModule *m, Entity *e) {
  52. e = strip_entity_wrapping(e);
  53. GB_ASSERT(e != nullptr);
  54. GB_ASSERT(e->token.string != "_");
  55. if (e->kind == Entity_Procedure) {
  56. return cg_find_procedure_value_from_entity(m, e);
  57. }
  58. cgValue *found = nullptr;
  59. rw_mutex_shared_lock(&m->values_mutex);
  60. found = map_get(&m->values, e);
  61. rw_mutex_shared_unlock(&m->values_mutex);
  62. if (found) {
  63. return *found;
  64. }
  65. GB_PANIC("\n\tError in: %s, missing value '%.*s'\n", token_pos_to_string(e->token.pos), LIT(e->token.string));
  66. return {};
  67. }
  68. gb_internal cgAddr cg_build_addr_from_entity(cgProcedure *p, Entity *e, Ast *expr) {
  69. GB_ASSERT(e != nullptr);
  70. if (e->kind == Entity_Constant) {
  71. Type *t = default_type(type_of_expr(expr));
  72. cgValue v = cg_const_value(p, t, e->Constant.value);
  73. GB_PANIC("TODO(bill): cg_add_global_generated");
  74. // return cg_add_global_generated(p->module, t, v);
  75. return {};
  76. }
  77. cgAddr *local_found = map_get(&p->variable_map, e);
  78. if (local_found) {
  79. return *local_found;
  80. }
  81. cgValue v = {};
  82. cgModule *m = p->module;
  83. rw_mutex_lock(&m->values_mutex);
  84. cgValue *found = map_get(&m->values, e);
  85. rw_mutex_unlock(&m->values_mutex);
  86. if (found) {
  87. v = *found;
  88. } else if (e->kind == Entity_Variable && e->flags & EntityFlag_Using) {
  89. GB_PANIC("TODO(bill): cg_get_using_variable");
  90. // NOTE(bill): Calculate the using variable every time
  91. // v = cg_get_using_variable(p, e);
  92. } else if (e->flags & EntityFlag_SoaPtrField) {
  93. GB_PANIC("TODO(bill): cg_get_soa_variable_addr");
  94. // return cg_get_soa_variable_addr(p, e);
  95. }
  96. if (v.node == nullptr) {
  97. cgValue v = cg_find_value_from_entity(m, e);
  98. v = cg_flatten_value(p, v);
  99. return cg_addr(v);
  100. }
  101. return cg_addr(v);
  102. }
  103. gb_internal cgValue cg_typeid(cgProcedure *p, Type *t) {
  104. GB_ASSERT("TODO(bill): cg_typeid");
  105. return {};
  106. }
  107. gb_internal cgValue cg_correct_endianness(cgProcedure *p, cgValue value) {
  108. Type *src = core_type(value.type);
  109. GB_ASSERT(is_type_integer(src) || is_type_float(src));
  110. if (is_type_different_to_arch_endianness(src)) {
  111. GB_PANIC("TODO(bill): cg_correct_endianness");
  112. // Type *platform_src_type = integer_endian_type_to_platform_type(src);
  113. // value = cg_emit_byte_swap(p, value, platform_src_type);
  114. }
  115. return value;
  116. }
  117. gb_internal cgValue cg_emit_transmute(cgProcedure *p, cgValue value, Type *type) {
  118. GB_ASSERT(type_size_of(value.type) == type_size_of(type));
  119. value = cg_flatten_value(p, value);
  120. if (are_types_identical(value.type, type)) {
  121. return value;
  122. }
  123. if (are_types_identical(core_type(value.type), core_type(type))) {
  124. value.type = type;
  125. return value;
  126. }
  127. i64 src_align = type_align_of(value.type);
  128. i64 dst_align = type_align_of(type);
  129. if (dst_align > src_align) {
  130. cgAddr local = cg_add_local(p, type, nullptr, false);
  131. cgValue dst = local.addr;
  132. dst.type = alloc_type_pointer(value.type);
  133. cg_emit_store(p, dst, value);
  134. return cg_addr_load(p, local);
  135. }
  136. TB_DataType dt = cg_data_type(type);
  137. switch (value.kind) {
  138. case cgValue_Value:
  139. GB_ASSERT(!TB_IS_VOID_TYPE(dt));
  140. value.type = type;
  141. value.node = tb_inst_bitcast(p->func, value.node, dt);
  142. return value;
  143. case cgValue_Addr:
  144. value.type = type;
  145. return value;
  146. case cgValue_Symbol:
  147. GB_PANIC("should be handled above");
  148. break;
  149. case cgValue_Multi:
  150. GB_PANIC("cannot transmute multiple values at once");
  151. break;
  152. }
  153. return value;
  154. }
  155. gb_internal cgValue cg_emit_byte_swap(cgProcedure *p, cgValue value, Type *end_type) {
  156. GB_ASSERT(type_size_of(value.type) == type_size_of(end_type));
  157. if (type_size_of(value.type) < 2) {
  158. return value;
  159. }
  160. if (is_type_float(value.type)) {
  161. i64 sz = type_size_of(value.type);
  162. Type *integer_type = nullptr;
  163. switch (sz) {
  164. case 2: integer_type = t_u16; break;
  165. case 4: integer_type = t_u32; break;
  166. case 8: integer_type = t_u64; break;
  167. }
  168. GB_ASSERT(integer_type != nullptr);
  169. value = cg_emit_transmute(p, value, integer_type);
  170. }
  171. GB_ASSERT(value.kind == cgValue_Value);
  172. value.node = tb_inst_bswap(p->func, value.node);
  173. return cg_emit_transmute(p, value, end_type);
  174. }
  175. gb_internal cgValue cg_emit_comp(cgProcedure *p, TokenKind op_kind, cgValue left, cgValue right) {
  176. GB_ASSERT(gb_is_between(op_kind, Token__ComparisonBegin+1, Token__ComparisonEnd-1));
  177. Type *a = core_type(left.type);
  178. Type *b = core_type(right.type);
  179. cgValue nil_check = {};
  180. if (is_type_array_like(left.type) || is_type_array_like(right.type)) {
  181. // don't do `nil` check if it is array-like
  182. } else if (is_type_untyped_nil(left.type)) {
  183. nil_check = cg_emit_comp_against_nil(p, op_kind, right);
  184. } else if (is_type_untyped_nil(right.type)) {
  185. nil_check = cg_emit_comp_against_nil(p, op_kind, left);
  186. }
  187. if (nil_check.node != nullptr) {
  188. return nil_check;
  189. }
  190. if (are_types_identical(a, b)) {
  191. // NOTE(bill): No need for a conversion
  192. } /*else if (cg_is_const(left) || cg_is_const_nil(left)) {
  193. left = cg_emit_conv(p, left, right.type);
  194. } else if (cg_is_const(right) || cg_is_const_nil(right)) {
  195. right = cg_emit_conv(p, right, left.type);
  196. }*/ else {
  197. Type *lt = left.type;
  198. Type *rt = right.type;
  199. lt = left.type;
  200. rt = right.type;
  201. i64 ls = type_size_of(lt);
  202. i64 rs = type_size_of(rt);
  203. // NOTE(bill): Quick heuristic, larger types are usually the target type
  204. if (ls < rs) {
  205. left = cg_emit_conv(p, left, rt);
  206. } else if (ls > rs) {
  207. right = cg_emit_conv(p, right, lt);
  208. } else {
  209. if (is_type_union(rt)) {
  210. left = cg_emit_conv(p, left, rt);
  211. } else {
  212. right = cg_emit_conv(p, right, lt);
  213. }
  214. }
  215. }
  216. a = core_type(left.type);
  217. b = core_type(right.type);
  218. left = cg_flatten_value(p, left);
  219. right = cg_flatten_value(p, right);
  220. if (is_type_matrix(a) && (op_kind == Token_CmpEq || op_kind == Token_NotEq)) {
  221. GB_PANIC("TODO(bill): cg_emit_comp matrix");
  222. // Type *tl = base_type(a);
  223. // lbValue lhs = lb_address_from_load_or_generate_local(p, left);
  224. // lbValue rhs = lb_address_from_load_or_generate_local(p, right);
  225. // // TODO(bill): Test to see if this is actually faster!!!!
  226. // auto args = array_make<lbValue>(permanent_allocator(), 3);
  227. // args[0] = lb_emit_conv(p, lhs, t_rawptr);
  228. // args[1] = lb_emit_conv(p, rhs, t_rawptr);
  229. // args[2] = lb_const_int(p->module, t_int, type_size_of(tl));
  230. // lbValue val = lb_emit_runtime_call(p, "memory_compare", args);
  231. // lbValue res = lb_emit_comp(p, op_kind, val, lb_const_nil(p->module, val.type));
  232. // return lb_emit_conv(p, res, t_bool);
  233. }
  234. if (is_type_array_like(a)) {
  235. GB_PANIC("TODO(bill): cg_emit_comp is_type_array_like");
  236. // Type *tl = base_type(a);
  237. // lbValue lhs = lb_address_from_load_or_generate_local(p, left);
  238. // lbValue rhs = lb_address_from_load_or_generate_local(p, right);
  239. // TokenKind cmp_op = Token_And;
  240. // lbValue res = lb_const_bool(p->module, t_bool, true);
  241. // if (op_kind == Token_NotEq) {
  242. // res = lb_const_bool(p->module, t_bool, false);
  243. // cmp_op = Token_Or;
  244. // } else if (op_kind == Token_CmpEq) {
  245. // res = lb_const_bool(p->module, t_bool, true);
  246. // cmp_op = Token_And;
  247. // }
  248. // bool inline_array_arith = lb_can_try_to_inline_array_arith(tl);
  249. // i32 count = 0;
  250. // switch (tl->kind) {
  251. // case Type_Array: count = cast(i32)tl->Array.count; break;
  252. // case Type_EnumeratedArray: count = cast(i32)tl->EnumeratedArray.count; break;
  253. // }
  254. // if (inline_array_arith) {
  255. // // inline
  256. // lbAddr val = lb_add_local_generated(p, t_bool, false);
  257. // lb_addr_store(p, val, res);
  258. // for (i32 i = 0; i < count; i++) {
  259. // lbValue x = lb_emit_load(p, lb_emit_array_epi(p, lhs, i));
  260. // lbValue y = lb_emit_load(p, lb_emit_array_epi(p, rhs, i));
  261. // lbValue cmp = lb_emit_comp(p, op_kind, x, y);
  262. // lbValue new_res = lb_emit_arith(p, cmp_op, lb_addr_load(p, val), cmp, t_bool);
  263. // lb_addr_store(p, val, lb_emit_conv(p, new_res, t_bool));
  264. // }
  265. // return lb_addr_load(p, val);
  266. // } else {
  267. // if (is_type_simple_compare(tl) && (op_kind == Token_CmpEq || op_kind == Token_NotEq)) {
  268. // // TODO(bill): Test to see if this is actually faster!!!!
  269. // auto args = array_make<lbValue>(permanent_allocator(), 3);
  270. // args[0] = lb_emit_conv(p, lhs, t_rawptr);
  271. // args[1] = lb_emit_conv(p, rhs, t_rawptr);
  272. // args[2] = lb_const_int(p->module, t_int, type_size_of(tl));
  273. // lbValue val = lb_emit_runtime_call(p, "memory_compare", args);
  274. // lbValue res = lb_emit_comp(p, op_kind, val, lb_const_nil(p->module, val.type));
  275. // return lb_emit_conv(p, res, t_bool);
  276. // } else {
  277. // lbAddr val = lb_add_local_generated(p, t_bool, false);
  278. // lb_addr_store(p, val, res);
  279. // auto loop_data = lb_loop_start(p, count, t_i32);
  280. // {
  281. // lbValue i = loop_data.idx;
  282. // lbValue x = lb_emit_load(p, lb_emit_array_ep(p, lhs, i));
  283. // lbValue y = lb_emit_load(p, lb_emit_array_ep(p, rhs, i));
  284. // lbValue cmp = lb_emit_comp(p, op_kind, x, y);
  285. // lbValue new_res = lb_emit_arith(p, cmp_op, lb_addr_load(p, val), cmp, t_bool);
  286. // lb_addr_store(p, val, lb_emit_conv(p, new_res, t_bool));
  287. // }
  288. // lb_loop_end(p, loop_data);
  289. // return lb_addr_load(p, val);
  290. // }
  291. // }
  292. }
  293. if ((is_type_struct(a) || is_type_union(a)) && is_type_comparable(a)) {
  294. GB_PANIC("TODO(bill): cg_compare_records");
  295. // return cg_compare_records(p, op_kind, left, right, a);
  296. }
  297. if ((is_type_struct(b) || is_type_union(b)) && is_type_comparable(b)) {
  298. GB_PANIC("TODO(bill): cg_compare_records");
  299. // return cg_compare_records(p, op_kind, left, right, b);
  300. }
  301. if (is_type_string(a)) {
  302. if (is_type_cstring(a)) {
  303. left = cg_emit_conv(p, left, t_string);
  304. right = cg_emit_conv(p, right, t_string);
  305. }
  306. char const *runtime_procedure = nullptr;
  307. switch (op_kind) {
  308. case Token_CmpEq: runtime_procedure = "string_eq"; break;
  309. case Token_NotEq: runtime_procedure = "string_ne"; break;
  310. case Token_Lt: runtime_procedure = "string_lt"; break;
  311. case Token_Gt: runtime_procedure = "string_gt"; break;
  312. case Token_LtEq: runtime_procedure = "string_le"; break;
  313. case Token_GtEq: runtime_procedure = "string_gt"; break;
  314. }
  315. GB_ASSERT(runtime_procedure != nullptr);
  316. GB_PANIC("TODO(bill): cg_emit_runtime_call");
  317. // auto args = array_make<lbValue>(permanent_allocator(), 2);
  318. // args[0] = left;
  319. // args[1] = right;
  320. // return cg_emit_runtime_call(p, runtime_procedure, args);
  321. }
  322. if (is_type_complex(a)) {
  323. char const *runtime_procedure = "";
  324. i64 sz = 8*type_size_of(a);
  325. switch (sz) {
  326. case 32:
  327. switch (op_kind) {
  328. case Token_CmpEq: runtime_procedure = "complex32_eq"; break;
  329. case Token_NotEq: runtime_procedure = "complex32_ne"; break;
  330. }
  331. break;
  332. case 64:
  333. switch (op_kind) {
  334. case Token_CmpEq: runtime_procedure = "complex64_eq"; break;
  335. case Token_NotEq: runtime_procedure = "complex64_ne"; break;
  336. }
  337. break;
  338. case 128:
  339. switch (op_kind) {
  340. case Token_CmpEq: runtime_procedure = "complex128_eq"; break;
  341. case Token_NotEq: runtime_procedure = "complex128_ne"; break;
  342. }
  343. break;
  344. }
  345. GB_ASSERT(runtime_procedure != nullptr);
  346. GB_PANIC("TODO(bill): cg_emit_runtime_call");
  347. // auto args = array_make<lbValue>(permanent_allocator(), 2);
  348. // args[0] = left;
  349. // args[1] = right;
  350. // return lb_emit_runtime_call(p, runtime_procedure, args);
  351. }
  352. if (is_type_quaternion(a)) {
  353. char const *runtime_procedure = "";
  354. i64 sz = 8*type_size_of(a);
  355. switch (sz) {
  356. case 64:
  357. switch (op_kind) {
  358. case Token_CmpEq: runtime_procedure = "quaternion64_eq"; break;
  359. case Token_NotEq: runtime_procedure = "quaternion64_ne"; break;
  360. }
  361. break;
  362. case 128:
  363. switch (op_kind) {
  364. case Token_CmpEq: runtime_procedure = "quaternion128_eq"; break;
  365. case Token_NotEq: runtime_procedure = "quaternion128_ne"; break;
  366. }
  367. break;
  368. case 256:
  369. switch (op_kind) {
  370. case Token_CmpEq: runtime_procedure = "quaternion256_eq"; break;
  371. case Token_NotEq: runtime_procedure = "quaternion256_ne"; break;
  372. }
  373. break;
  374. }
  375. GB_ASSERT(runtime_procedure != nullptr);
  376. GB_PANIC("TODO(bill): cg_emit_runtime_call");
  377. // auto args = array_make<lbValue>(permanent_allocator(), 2);
  378. // args[0] = left;
  379. // args[1] = right;
  380. // return lb_emit_runtime_call(p, runtime_procedure, args);
  381. }
  382. if (is_type_bit_set(a)) {
  383. switch (op_kind) {
  384. case Token_Lt:
  385. case Token_LtEq:
  386. case Token_Gt:
  387. case Token_GtEq:
  388. {
  389. Type *it = bit_set_to_int(a);
  390. cgValue lhs = cg_emit_transmute(p, left, it);
  391. cgValue rhs = cg_emit_transmute(p, right, it);
  392. cgValue res = cg_emit_arith(p, Token_And, lhs, rhs, it);
  393. GB_ASSERT(lhs.kind == cgValue_Value);
  394. GB_ASSERT(rhs.kind == cgValue_Value);
  395. GB_ASSERT(res.kind == cgValue_Value);
  396. if (op_kind == Token_Lt || op_kind == Token_LtEq) {
  397. // (lhs & rhs) == lhs
  398. res = cg_value(tb_inst_cmp_eq(p->func, res.node, lhs.node), t_bool);
  399. } else if (op_kind == Token_Gt || op_kind == Token_GtEq) {
  400. // (lhs & rhs) == rhs
  401. res = cg_value(tb_inst_cmp_eq(p->func, res.node, rhs.node), t_bool);
  402. }
  403. // NOTE(bill): Strict subsets
  404. if (op_kind == Token_Lt || op_kind == Token_Gt) {
  405. // res &~ (lhs == rhs)
  406. cgValue eq = cg_value(tb_inst_cmp_eq(p->func, lhs.node, rhs.node), t_bool);
  407. res = cg_emit_arith(p, Token_AndNot, res, eq, t_bool);
  408. }
  409. return res;
  410. }
  411. case Token_CmpEq:
  412. GB_ASSERT(left.kind == cgValue_Value);
  413. GB_ASSERT(right.kind == cgValue_Value);
  414. return cg_value(tb_inst_cmp_eq(p->func, left.node, right.node), t_bool);
  415. case Token_NotEq:
  416. GB_ASSERT(left.kind == cgValue_Value);
  417. GB_ASSERT(right.kind == cgValue_Value);
  418. return cg_value(tb_inst_cmp_ne(p->func, left.node, right.node), t_bool);
  419. }
  420. }
  421. if (op_kind != Token_CmpEq && op_kind != Token_NotEq) {
  422. Type *t = left.type;
  423. if (is_type_integer(t) && is_type_different_to_arch_endianness(t)) {
  424. Type *platform_type = integer_endian_type_to_platform_type(t);
  425. cgValue x = cg_emit_byte_swap(p, left, platform_type);
  426. cgValue y = cg_emit_byte_swap(p, right, platform_type);
  427. left = x;
  428. right = y;
  429. } else if (is_type_float(t) && is_type_different_to_arch_endianness(t)) {
  430. Type *platform_type = integer_endian_type_to_platform_type(t);
  431. cgValue x = cg_emit_conv(p, left, platform_type);
  432. cgValue y = cg_emit_conv(p, right, platform_type);
  433. left = x;
  434. right = y;
  435. }
  436. }
  437. a = core_type(left.type);
  438. b = core_type(right.type);
  439. if (is_type_integer(a) ||
  440. is_type_boolean(a) ||
  441. is_type_pointer(a) ||
  442. is_type_multi_pointer(a) ||
  443. is_type_proc(a) ||
  444. is_type_enum(a) ||
  445. is_type_typeid(a)) {
  446. TB_Node *lhs = left.node;
  447. TB_Node *rhs = right.node;
  448. TB_Node *res = nullptr;
  449. bool is_signed = is_type_integer(left.type) && !is_type_unsigned(left.type);
  450. switch (op_kind) {
  451. case Token_CmpEq: res = tb_inst_cmp_eq(p->func, lhs, rhs); break;
  452. case Token_NotEq: res = tb_inst_cmp_ne(p->func, lhs, rhs); break;
  453. case Token_Gt: res = tb_inst_cmp_igt(p->func, lhs, rhs, is_signed); break;
  454. case Token_GtEq: res = tb_inst_cmp_ige(p->func, lhs, rhs, is_signed); break;
  455. case Token_Lt: res = tb_inst_cmp_ilt(p->func, lhs, rhs, is_signed); break;
  456. case Token_LtEq: res = tb_inst_cmp_ige(p->func, lhs, rhs, is_signed); break;
  457. }
  458. GB_ASSERT(res != nullptr);
  459. return cg_value(res, t_bool);
  460. } else if (is_type_float(a)) {
  461. TB_Node *lhs = left.node;
  462. TB_Node *rhs = right.node;
  463. TB_Node *res = nullptr;
  464. switch (op_kind) {
  465. case Token_CmpEq: res = tb_inst_cmp_eq(p->func, lhs, rhs); break;
  466. case Token_NotEq: res = tb_inst_cmp_ne(p->func, lhs, rhs); break;
  467. case Token_Gt: res = tb_inst_cmp_fgt(p->func, lhs, rhs); break;
  468. case Token_GtEq: res = tb_inst_cmp_fge(p->func, lhs, rhs); break;
  469. case Token_Lt: res = tb_inst_cmp_flt(p->func, lhs, rhs); break;
  470. case Token_LtEq: res = tb_inst_cmp_fge(p->func, lhs, rhs); break;
  471. }
  472. GB_ASSERT(res != nullptr);
  473. return cg_value(res, t_bool);
  474. } else if (is_type_simd_vector(a)) {
  475. GB_PANIC("TODO(bill): #simd vector");
  476. // LLVMValueRef mask = nullptr;
  477. // Type *elem = base_array_type(a);
  478. // if (is_type_float(elem)) {
  479. // LLVMRealPredicate pred = {};
  480. // switch (op_kind) {
  481. // case Token_CmpEq: pred = LLVMRealOEQ; break;
  482. // case Token_NotEq: pred = LLVMRealONE; break;
  483. // }
  484. // mask = LLVMBuildFCmp(p->builder, pred, left.value, right.value, "");
  485. // } else {
  486. // LLVMIntPredicate pred = {};
  487. // switch (op_kind) {
  488. // case Token_CmpEq: pred = LLVMIntEQ; break;
  489. // case Token_NotEq: pred = LLVMIntNE; break;
  490. // }
  491. // mask = LLVMBuildICmp(p->builder, pred, left.value, right.value, "");
  492. // }
  493. // GB_ASSERT_MSG(mask != nullptr, "Unhandled comparison kind %s (%s) %.*s %s (%s)", type_to_string(left.type), type_to_string(base_type(left.type)), LIT(token_strings[op_kind]), type_to_string(right.type), type_to_string(base_type(right.type)));
  494. // /* NOTE(bill, 2022-05-28):
  495. // Thanks to Per Vognsen, sign extending <N x i1> to
  496. // a vector of the same width as the input vector, bit casting to an integer,
  497. // and then comparing against zero is the better option
  498. // See: https://lists.llvm.org/pipermail/llvm-dev/2012-September/053046.html
  499. // // Example assuming 128-bit vector
  500. // %1 = <4 x float> ...
  501. // %2 = <4 x float> ...
  502. // %3 = fcmp oeq <4 x float> %1, %2
  503. // %4 = sext <4 x i1> %3 to <4 x i32>
  504. // %5 = bitcast <4 x i32> %4 to i128
  505. // %6 = icmp ne i128 %5, 0
  506. // br i1 %6, label %true1, label %false2
  507. // This will result in 1 cmpps + 1 ptest + 1 br
  508. // (even without SSE4.1, contrary to what the mail list states, because of pmovmskb)
  509. // */
  510. // unsigned count = cast(unsigned)get_array_type_count(a);
  511. // unsigned elem_sz = cast(unsigned)(type_size_of(elem)*8);
  512. // LLVMTypeRef mask_type = LLVMVectorType(LLVMIntTypeInContext(p->module->ctx, elem_sz), count);
  513. // mask = LLVMBuildSExtOrBitCast(p->builder, mask, mask_type, "");
  514. // LLVMTypeRef mask_int_type = LLVMIntTypeInContext(p->module->ctx, cast(unsigned)(8*type_size_of(a)));
  515. // LLVMValueRef mask_int = LLVMBuildBitCast(p->builder, mask, mask_int_type, "");
  516. // res.value = LLVMBuildICmp(p->builder, LLVMIntNE, mask_int, LLVMConstNull(LLVMTypeOf(mask_int)), "");
  517. // return res;
  518. }
  519. GB_PANIC("Unhandled comparison kind %s (%s) %.*s %s (%s)", type_to_string(left.type), type_to_string(base_type(left.type)), LIT(token_strings[op_kind]), type_to_string(right.type), type_to_string(base_type(right.type)));
  520. return {};
  521. }
  522. gb_internal cgValue cg_emit_comp_against_nil(cgProcedure *p, TokenKind op_kind, cgValue x) {
  523. GB_ASSERT(op_kind == Token_CmpEq || op_kind == Token_NotEq);
  524. x = cg_flatten_value(p, x);
  525. cgValue res = {};
  526. Type *t = x.type;
  527. TB_DataType dt = cg_data_type(t);
  528. Type *bt = base_type(t);
  529. TypeKind type_kind = bt->kind;
  530. switch (type_kind) {
  531. case Type_Basic:
  532. switch (bt->Basic.kind) {
  533. case Basic_rawptr:
  534. case Basic_cstring:
  535. GB_ASSERT(x.kind == cgValue_Value);
  536. if (op_kind == Token_CmpEq) {
  537. return cg_value(tb_inst_cmp_eq(p->func, x.node, tb_inst_uint(p->func, dt, 0)), t_bool);
  538. } else if (op_kind == Token_NotEq) {
  539. return cg_value(tb_inst_cmp_ne(p->func, x.node, tb_inst_uint(p->func, dt, 0)), t_bool);
  540. }
  541. break;
  542. case Basic_any:
  543. {
  544. GB_PANIC("TODO(bill): cg_emit_struct_ev");
  545. // // TODO(bill): is this correct behaviour for nil comparison for any?
  546. // cgValue data = cg_emit_struct_ev(p, x, 0);
  547. // cgValue ti = cg_emit_struct_ev(p, x, 1);
  548. // if (op_kind == Token_CmpEq) {
  549. // LLVMValueRef a = LLVMBuildIsNull(p->builder, data.value, "");
  550. // LLVMValueRef b = LLVMBuildIsNull(p->builder, ti.value, "");
  551. // res.value = LLVMBuildOr(p->builder, a, b, "");
  552. // return res;
  553. // } else if (op_kind == Token_NotEq) {
  554. // LLVMValueRef a = LLVMBuildIsNotNull(p->builder, data.value, "");
  555. // LLVMValueRef b = LLVMBuildIsNotNull(p->builder, ti.value, "");
  556. // res.value = LLVMBuildAnd(p->builder, a, b, "");
  557. // return res;
  558. // }
  559. }
  560. break;
  561. case Basic_typeid:
  562. cgValue invalid_typeid = cg_const_value(p, t_typeid, exact_value_i64(0));
  563. return cg_emit_comp(p, op_kind, x, invalid_typeid);
  564. }
  565. break;
  566. case Type_Enum:
  567. case Type_Pointer:
  568. case Type_MultiPointer:
  569. case Type_Proc:
  570. case Type_BitSet:
  571. GB_ASSERT(x.kind == cgValue_Value);
  572. if (op_kind == Token_CmpEq) {
  573. return cg_value(tb_inst_cmp_eq(p->func, x.node, tb_inst_uint(p->func, dt, 0)), t_bool);
  574. } else if (op_kind == Token_NotEq) {
  575. return cg_value(tb_inst_cmp_ne(p->func, x.node, tb_inst_uint(p->func, dt, 0)), t_bool);
  576. }
  577. break;
  578. case Type_Slice:
  579. {
  580. GB_PANIC("TODO(bill): cg_emit_struct_ev");
  581. // cgValue data = cg_emit_struct_ev(p, x, 0);
  582. // if (op_kind == Token_CmpEq) {
  583. // res.value = LLVMBuildIsNull(p->builder, data.value, "");
  584. // return res;
  585. // } else if (op_kind == Token_NotEq) {
  586. // res.value = LLVMBuildIsNotNull(p->builder, data.value, "");
  587. // return res;
  588. // }
  589. }
  590. break;
  591. case Type_DynamicArray:
  592. {
  593. GB_PANIC("TODO(bill): cg_emit_struct_ev");
  594. // cgValue data = cg_emit_struct_ev(p, x, 0);
  595. // if (op_kind == Token_CmpEq) {
  596. // res.value = LLVMBuildIsNull(p->builder, data.value, "");
  597. // return res;
  598. // } else if (op_kind == Token_NotEq) {
  599. // res.value = LLVMBuildIsNotNull(p->builder, data.value, "");
  600. // return res;
  601. // }
  602. }
  603. break;
  604. case Type_Map:
  605. {
  606. GB_PANIC("TODO(bill): cg_emit_struct_ev");
  607. // cgValue data_ptr = cg_emit_struct_ev(p, x, 0);
  608. // if (op_kind == Token_CmpEq) {
  609. // res.value = LLVMBuildIsNull(p->builder, data_ptr.value, "");
  610. // return res;
  611. // } else {
  612. // res.value = LLVMBuildIsNotNull(p->builder, data_ptr.value, "");
  613. // return res;
  614. // }
  615. }
  616. break;
  617. case Type_Union:
  618. {
  619. GB_PANIC("TODO(bill): cg_emit_struct_ev");
  620. // if (type_size_of(t) == 0) {
  621. // if (op_kind == Token_CmpEq) {
  622. // return cg_const_bool(p->module, t_bool, true);
  623. // } else if (op_kind == Token_NotEq) {
  624. // return cg_const_bool(p->module, t_bool, false);
  625. // }
  626. // } else if (is_type_union_maybe_pointer(t)) {
  627. // cgValue tag = cg_emit_transmute(p, x, t_rawptr);
  628. // return cg_emit_comp_against_nil(p, op_kind, tag);
  629. // } else {
  630. // cgValue tag = cg_emit_union_tag_value(p, x);
  631. // return cg_emit_comp(p, op_kind, tag, cg_zero(p->module, tag.type));
  632. // }
  633. }
  634. break;
  635. case Type_Struct:
  636. GB_PANIC("TODO(bill): cg_emit_struct_ev");
  637. // if (is_type_soa_struct(t)) {
  638. // Type *bt = base_type(t);
  639. // if (bt->Struct.soa_kind == StructSoa_Slice) {
  640. // LLVMValueRef the_value = {};
  641. // if (bt->Struct.fields.count == 0) {
  642. // cgValue len = cg_soa_struct_len(p, x);
  643. // the_value = len.value;
  644. // } else {
  645. // cgValue first_field = cg_emit_struct_ev(p, x, 0);
  646. // the_value = first_field.value;
  647. // }
  648. // if (op_kind == Token_CmpEq) {
  649. // res.value = LLVMBuildIsNull(p->builder, the_value, "");
  650. // return res;
  651. // } else if (op_kind == Token_NotEq) {
  652. // res.value = LLVMBuildIsNotNull(p->builder, the_value, "");
  653. // return res;
  654. // }
  655. // } else if (bt->Struct.soa_kind == StructSoa_Dynamic) {
  656. // LLVMValueRef the_value = {};
  657. // if (bt->Struct.fields.count == 0) {
  658. // cgValue cap = cg_soa_struct_cap(p, x);
  659. // the_value = cap.value;
  660. // } else {
  661. // cgValue first_field = cg_emit_struct_ev(p, x, 0);
  662. // the_value = first_field.value;
  663. // }
  664. // if (op_kind == Token_CmpEq) {
  665. // res.value = LLVMBuildIsNull(p->builder, the_value, "");
  666. // return res;
  667. // } else if (op_kind == Token_NotEq) {
  668. // res.value = LLVMBuildIsNotNull(p->builder, the_value, "");
  669. // return res;
  670. // }
  671. // }
  672. // } else if (is_type_struct(t) && type_has_nil(t)) {
  673. // auto args = array_make<cgValue>(permanent_allocator(), 2);
  674. // cgValue lhs = cg_address_from_load_or_generate_local(p, x);
  675. // args[0] = cg_emit_conv(p, lhs, t_rawptr);
  676. // args[1] = cg_const_int(p->module, t_int, type_size_of(t));
  677. // cgValue val = cg_emit_runtime_call(p, "memory_compare_zero", args);
  678. // cgValue res = cg_emit_comp(p, op_kind, val, cg_const_int(p->module, t_int, 0));
  679. // return res;
  680. // }
  681. break;
  682. }
  683. GB_PANIC("Unknown handled type: %s -> %s", type_to_string(t), type_to_string(bt));
  684. return {};
  685. }
  686. gb_internal cgValue cg_emit_conv(cgProcedure *p, cgValue value, Type *t) {
  687. t = reduce_tuple_to_single_type(t);
  688. value = cg_flatten_value(p, value);
  689. Type *src_type = value.type;
  690. if (are_types_identical(t, src_type)) {
  691. return value;
  692. }
  693. if (is_type_untyped_uninit(src_type)) {
  694. // return cg_const_undef(m, t);
  695. return cg_const_nil(p, t);
  696. }
  697. if (is_type_untyped_nil(src_type)) {
  698. return cg_const_nil(p, t);
  699. }
  700. Type *src = core_type(src_type);
  701. Type *dst = core_type(t);
  702. GB_ASSERT(src != nullptr);
  703. GB_ASSERT(dst != nullptr);
  704. if (are_types_identical(src, dst)) {
  705. return cg_emit_transmute(p, value, t);
  706. }
  707. TB_DataType st = cg_data_type(src);
  708. if (value.kind == cgValue_Value && !TB_IS_VOID_TYPE(value.node->dt)) {
  709. st = value.node->dt;
  710. }
  711. TB_DataType dt = cg_data_type(t);
  712. if (is_type_integer(src) && is_type_integer(dst)) {
  713. GB_ASSERT(src->kind == Type_Basic &&
  714. dst->kind == Type_Basic);
  715. GB_ASSERT(value.kind == cgValue_Value);
  716. i64 sz = type_size_of(default_type(src));
  717. i64 dz = type_size_of(default_type(dst));
  718. if (sz == dz) {
  719. if (dz > 1 && !types_have_same_internal_endian(src, dst)) {
  720. return cg_emit_byte_swap(p, value, t);
  721. }
  722. value.type = t;
  723. return value;
  724. }
  725. if (sz > 1 && is_type_different_to_arch_endianness(src)) {
  726. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  727. value = cg_emit_byte_swap(p, value, platform_src_type);
  728. }
  729. TB_Node* (*op)(TB_Function* f, TB_Node* src, TB_DataType dt) = tb_inst_trunc;
  730. if (dz < sz) {
  731. op = tb_inst_trunc;
  732. } else if (dz == sz) {
  733. op = tb_inst_bitcast;
  734. } else if (dz > sz) {
  735. op = is_type_unsigned(src) ? tb_inst_zxt : tb_inst_sxt; // zero extent
  736. }
  737. if (dz > 1 && is_type_different_to_arch_endianness(dst)) {
  738. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  739. cgValue res = cg_value(op(p->func, value.node, cg_data_type(platform_dst_type)), platform_dst_type);
  740. return cg_emit_byte_swap(p, res, t);
  741. } else {
  742. return cg_value(op(p->func, value.node, dt), t);
  743. }
  744. }
  745. // boolean -> boolean/integer
  746. if (is_type_boolean(src) && (is_type_boolean(dst) || is_type_integer(dst))) {
  747. TB_Node *v = tb_inst_cmp_ne(p->func, value.node, tb_inst_uint(p->func, st, 0));
  748. return cg_value(tb_inst_zxt(p->func, v, dt), t);
  749. }
  750. // integer -> boolean
  751. if (is_type_integer(src) && is_type_boolean(dst)) {
  752. TB_Node *v = tb_inst_cmp_ne(p->func, value.node, tb_inst_uint(p->func, st, 0));
  753. return cg_value(tb_inst_zxt(p->func, v, dt), t);
  754. }
  755. if (is_type_cstring(src) && is_type_u8_ptr(dst)) {
  756. return cg_emit_transmute(p, value, dst);
  757. }
  758. if (is_type_u8_ptr(src) && is_type_cstring(dst)) {
  759. return cg_emit_transmute(p, value, dst);
  760. }
  761. if (is_type_cstring(src) && is_type_u8_multi_ptr(dst)) {
  762. return cg_emit_transmute(p, value, dst);
  763. }
  764. if (is_type_u8_multi_ptr(src) && is_type_cstring(dst)) {
  765. return cg_emit_transmute(p, value, dst);
  766. }
  767. if (is_type_cstring(src) && is_type_rawptr(dst)) {
  768. return cg_emit_transmute(p, value, dst);
  769. }
  770. if (is_type_rawptr(src) && is_type_cstring(dst)) {
  771. return cg_emit_transmute(p, value, dst);
  772. }
  773. if (are_types_identical(src, t_cstring) && are_types_identical(dst, t_string)) {
  774. GB_PANIC("TODO(bill): cstring_to_string call");
  775. // TEMPORARY_ALLOCATOR_GUARD();
  776. // lbValue c = lb_emit_conv(p, value, t_cstring);
  777. // auto args = array_make<lbValue>(temporary_allocator(), 1);
  778. // args[0] = c;
  779. // lbValue s = lb_emit_runtime_call(p, "cstring_to_string", args);
  780. // return lb_emit_conv(p, s, dst);
  781. }
  782. // float -> float
  783. if (is_type_float(src) && is_type_float(dst)) {
  784. i64 sz = type_size_of(src);
  785. i64 dz = type_size_of(dst);
  786. if (sz == 2 || dz == 2) {
  787. GB_PANIC("TODO(bill): f16 conversions");
  788. }
  789. if (dz == sz) {
  790. if (types_have_same_internal_endian(src, dst)) {
  791. return cg_value(value.node, t);
  792. } else {
  793. return cg_emit_byte_swap(p, value, t);
  794. }
  795. }
  796. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  797. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  798. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  799. cgValue res = {};
  800. res = cg_emit_conv(p, value, platform_src_type);
  801. res = cg_emit_conv(p, res, platform_dst_type);
  802. if (is_type_different_to_arch_endianness(dst)) {
  803. res = cg_emit_byte_swap(p, res, t);
  804. }
  805. return cg_emit_conv(p, res, t);
  806. }
  807. if (dz >= sz) {
  808. return cg_value(tb_inst_fpxt(p->func, value.node, dt), t);
  809. }
  810. return cg_value(tb_inst_trunc(p->func, value.node, dt), t);
  811. }
  812. if (is_type_complex(src) && is_type_complex(dst)) {
  813. GB_PANIC("TODO(bill): complex -> complex");
  814. }
  815. if (is_type_quaternion(src) && is_type_quaternion(dst)) {
  816. // @QuaternionLayout
  817. GB_PANIC("TODO(bill): quaternion -> quaternion");
  818. }
  819. if (is_type_integer(src) && is_type_complex(dst)) {
  820. GB_PANIC("TODO(bill): int -> complex");
  821. }
  822. if (is_type_float(src) && is_type_complex(dst)) {
  823. GB_PANIC("TODO(bill): float -> complex");
  824. }
  825. if (is_type_integer(src) && is_type_quaternion(dst)) {
  826. GB_PANIC("TODO(bill): int -> quaternion");
  827. }
  828. if (is_type_float(src) && is_type_quaternion(dst)) {
  829. GB_PANIC("TODO(bill): float -> quaternion");
  830. }
  831. if (is_type_complex(src) && is_type_quaternion(dst)) {
  832. GB_PANIC("TODO(bill): complex -> quaternion");
  833. }
  834. // float <-> integer
  835. if (is_type_float(src) && is_type_integer(dst)) {
  836. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  837. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  838. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  839. cgValue res = {};
  840. res = cg_emit_conv(p, value, platform_src_type);
  841. res = cg_emit_conv(p, res, platform_dst_type);
  842. return cg_emit_conv(p, res, t);
  843. }
  844. // if (is_type_integer_128bit(dst)) {
  845. // TEMPORARY_ALLOCATOR_GUARD();
  846. // auto args = array_make<lbValue>(temporary_allocator(), 1);
  847. // args[0] = value;
  848. // char const *call = "fixunsdfdi";
  849. // if (is_type_unsigned(dst)) {
  850. // call = "fixunsdfti";
  851. // }
  852. // lbValue res_i128 = lb_emit_runtime_call(p, call, args);
  853. // return lb_emit_conv(p, res_i128, t);
  854. // }
  855. bool is_signed = !is_type_unsigned(dst);
  856. return cg_value(tb_inst_float2int(p->func, value.node, dt, is_signed), t);
  857. }
  858. if (is_type_integer(src) && is_type_float(dst)) {
  859. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  860. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  861. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  862. cgValue res = {};
  863. res = cg_emit_conv(p, value, platform_src_type);
  864. res = cg_emit_conv(p, res, platform_dst_type);
  865. if (is_type_different_to_arch_endianness(dst)) {
  866. res = cg_emit_byte_swap(p, res, t);
  867. }
  868. return cg_emit_conv(p, res, t);
  869. }
  870. // if (is_type_integer_128bit(src)) {
  871. // TEMPORARY_ALLOCATOR_GUARD();
  872. // auto args = array_make<lbValue>(temporary_allocator(), 1);
  873. // args[0] = value;
  874. // char const *call = "floattidf";
  875. // if (is_type_unsigned(src)) {
  876. // call = "floattidf_unsigned";
  877. // }
  878. // lbValue res_f64 = lb_emit_runtime_call(p, call, args);
  879. // return lb_emit_conv(p, res_f64, t);
  880. // }
  881. bool is_signed = !is_type_unsigned(dst);
  882. return cg_value(tb_inst_int2float(p->func, value.node, dt, is_signed), t);
  883. }
  884. if (is_type_simd_vector(dst)) {
  885. GB_PANIC("TODO(bill): ? -> #simd vector");
  886. }
  887. // Pointer <-> uintptr
  888. if (is_type_pointer(src) && is_type_uintptr(dst)) {
  889. return cg_value(tb_inst_ptr2int(p->func, value.node, dt), t);
  890. }
  891. if (is_type_uintptr(src) && is_type_pointer(dst)) {
  892. return cg_value(tb_inst_int2ptr(p->func, value.node), t);
  893. }
  894. if (is_type_multi_pointer(src) && is_type_uintptr(dst)) {
  895. return cg_value(tb_inst_ptr2int(p->func, value.node, dt), t);
  896. }
  897. if (is_type_uintptr(src) && is_type_multi_pointer(dst)) {
  898. return cg_value(tb_inst_int2ptr(p->func, value.node), t);
  899. }
  900. if (is_type_union(dst)) {
  901. GB_PANIC("TODO(bill): ? -> union");
  902. }
  903. // NOTE(bill): This has to be done before 'Pointer <-> Pointer' as it's
  904. // subtype polymorphism casting
  905. if (check_is_assignable_to_using_subtype(src_type, t)) {
  906. GB_PANIC("TODO(bill): ? -> subtyping");
  907. }
  908. // Pointer <-> Pointer
  909. if (is_type_pointer(src) && is_type_pointer(dst)) {
  910. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  911. }
  912. if (is_type_multi_pointer(src) && is_type_pointer(dst)) {
  913. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  914. }
  915. if (is_type_pointer(src) && is_type_multi_pointer(dst)) {
  916. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  917. }
  918. if (is_type_multi_pointer(src) && is_type_multi_pointer(dst)) {
  919. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  920. }
  921. // proc <-> proc
  922. if (is_type_proc(src) && is_type_proc(dst)) {
  923. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  924. }
  925. // pointer -> proc
  926. if (is_type_pointer(src) && is_type_proc(dst)) {
  927. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  928. }
  929. // proc -> pointer
  930. if (is_type_proc(src) && is_type_pointer(dst)) {
  931. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  932. }
  933. // []byte/[]u8 <-> string
  934. if (is_type_u8_slice(src) && is_type_string(dst)) {
  935. return cg_emit_transmute(p, value, t);
  936. }
  937. if (is_type_string(src) && is_type_u8_slice(dst)) {
  938. return cg_emit_transmute(p, value, t);
  939. }
  940. if (is_type_matrix(dst) && !is_type_matrix(src)) {
  941. GB_PANIC("TODO(bill): !matrix -> matrix");
  942. }
  943. if (is_type_matrix(dst) && is_type_matrix(src)) {
  944. GB_PANIC("TODO(bill): matrix -> matrix");
  945. }
  946. if (is_type_any(dst)) {
  947. GB_PANIC("TODO(bill): ? -> any");
  948. }
  949. i64 src_sz = type_size_of(src);
  950. i64 dst_sz = type_size_of(dst);
  951. if (src_sz == dst_sz) {
  952. // bit_set <-> integer
  953. if (is_type_integer(src) && is_type_bit_set(dst)) {
  954. cgValue v = cg_emit_conv(p, value, bit_set_to_int(dst));
  955. return cg_emit_transmute(p, v, t);
  956. }
  957. if (is_type_bit_set(src) && is_type_integer(dst)) {
  958. cgValue bs = cg_emit_transmute(p, value, bit_set_to_int(src));
  959. return cg_emit_conv(p, bs, dst);
  960. }
  961. // typeid <-> integer
  962. if (is_type_integer(src) && is_type_typeid(dst)) {
  963. return cg_emit_transmute(p, value, dst);
  964. }
  965. if (is_type_typeid(src) && is_type_integer(dst)) {
  966. return cg_emit_transmute(p, value, dst);
  967. }
  968. }
  969. if (is_type_untyped(src)) {
  970. if (is_type_string(src) && is_type_string(dst)) {
  971. cgAddr result = cg_add_local(p, t, nullptr, false);
  972. cg_addr_store(p, result, value);
  973. return cg_addr_load(p, result);
  974. }
  975. }
  976. gb_printf_err("%.*s\n", LIT(p->name));
  977. gb_printf_err("cg_emit_conv: src -> dst\n");
  978. gb_printf_err("Not Identical %s != %s\n", type_to_string(src_type), type_to_string(t));
  979. gb_printf_err("Not Identical %s != %s\n", type_to_string(src), type_to_string(dst));
  980. gb_printf_err("Not Identical %p != %p\n", src_type, t);
  981. gb_printf_err("Not Identical %p != %p\n", src, dst);
  982. GB_PANIC("Invalid type conversion: '%s' to '%s' for procedure '%.*s'",
  983. type_to_string(src_type), type_to_string(t),
  984. LIT(p->name));
  985. return {};
  986. }
  987. gb_internal cgValue cg_emit_arith(cgProcedure *p, TokenKind op, cgValue lhs, cgValue rhs, Type *type) {
  988. if (is_type_array_like(lhs.type) || is_type_array_like(rhs.type)) {
  989. GB_PANIC("TODO(bill): cg_emit_arith_array");
  990. } else if (is_type_matrix(lhs.type) || is_type_matrix(rhs.type)) {
  991. GB_PANIC("TODO(bill): cg_emit_arith_matrix");
  992. } else if (is_type_complex(type)) {
  993. GB_PANIC("TODO(bill): cg_emit_arith complex");
  994. } else if (is_type_quaternion(type)) {
  995. GB_PANIC("TODO(bill): cg_emit_arith quaternion");
  996. }
  997. lhs = cg_flatten_value(p, cg_emit_conv(p, lhs, type));
  998. rhs = cg_flatten_value(p, cg_emit_conv(p, rhs, type));
  999. GB_ASSERT(lhs.kind == cgValue_Value);
  1000. GB_ASSERT(rhs.kind == cgValue_Value);
  1001. if (is_type_integer(type) && is_type_different_to_arch_endianness(type)) {
  1002. switch (op) {
  1003. case Token_AndNot:
  1004. case Token_And:
  1005. case Token_Or:
  1006. case Token_Xor:
  1007. goto handle_op;
  1008. }
  1009. Type *platform_type = integer_endian_type_to_platform_type(type);
  1010. cgValue x = cg_emit_byte_swap(p, lhs, integer_endian_type_to_platform_type(lhs.type));
  1011. cgValue y = cg_emit_byte_swap(p, rhs, integer_endian_type_to_platform_type(rhs.type));
  1012. cgValue res = cg_emit_arith(p, op, x, y, platform_type);
  1013. return cg_emit_byte_swap(p, res, type);
  1014. }
  1015. if (is_type_float(type) && is_type_different_to_arch_endianness(type)) {
  1016. Type *platform_type = integer_endian_type_to_platform_type(type);
  1017. cgValue x = cg_emit_conv(p, lhs, integer_endian_type_to_platform_type(lhs.type));
  1018. cgValue y = cg_emit_conv(p, rhs, integer_endian_type_to_platform_type(rhs.type));
  1019. cgValue res = cg_emit_arith(p, op, x, y, platform_type);
  1020. return cg_emit_byte_swap(p, res, type);
  1021. }
  1022. handle_op:;
  1023. // NOTE(bill): Bit Set Aliases for + and -
  1024. if (is_type_bit_set(type)) {
  1025. switch (op) {
  1026. case Token_Add: op = Token_Or; break;
  1027. case Token_Sub: op = Token_AndNot; break;
  1028. }
  1029. }
  1030. TB_ArithmeticBehavior arith_behavior = cast(TB_ArithmeticBehavior)0;
  1031. Type *integral_type = type;
  1032. if (is_type_simd_vector(integral_type)) {
  1033. GB_PANIC("TODO(bill): cg_emit_arith #simd vector");
  1034. // integral_type = core_array_type(integral_type);
  1035. }
  1036. switch (op) {
  1037. case Token_Add:
  1038. if (is_type_float(integral_type)) {
  1039. return cg_value(tb_inst_fadd(p->func, lhs.node, rhs.node), type);
  1040. }
  1041. return cg_value(tb_inst_add(p->func, lhs.node, rhs.node, arith_behavior), type);
  1042. case Token_Sub:
  1043. if (is_type_float(integral_type)) {
  1044. return cg_value(tb_inst_fsub(p->func, lhs.node, rhs.node), type);
  1045. }
  1046. return cg_value(tb_inst_sub(p->func, lhs.node, rhs.node, arith_behavior), type);
  1047. case Token_Mul:
  1048. if (is_type_float(integral_type)) {
  1049. return cg_value(tb_inst_fmul(p->func, lhs.node, rhs.node), type);
  1050. }
  1051. return cg_value(tb_inst_mul(p->func, lhs.node, rhs.node, arith_behavior), type);
  1052. case Token_Quo:
  1053. if (is_type_float(integral_type)) {
  1054. return cg_value(tb_inst_fdiv(p->func, lhs.node, rhs.node), type);
  1055. }
  1056. return cg_value(tb_inst_div(p->func, lhs.node, rhs.node, !is_type_unsigned(integral_type)), type);
  1057. case Token_Mod:
  1058. if (is_type_float(integral_type)) {
  1059. GB_PANIC("TODO(bill): float %% float");
  1060. }
  1061. return cg_value(tb_inst_mod(p->func, lhs.node, rhs.node, !is_type_unsigned(integral_type)), type);
  1062. case Token_ModMod:
  1063. if (is_type_unsigned(integral_type)) {
  1064. return cg_value(tb_inst_mod(p->func, lhs.node, rhs.node, false), type);
  1065. } else {
  1066. TB_Node *a = tb_inst_mod(p->func, lhs.node, rhs.node, true);
  1067. TB_Node *b = tb_inst_add(p->func, a, rhs.node, arith_behavior);
  1068. TB_Node *c = tb_inst_mod(p->func, b, rhs.node, true);
  1069. return cg_value(c, type);
  1070. }
  1071. case Token_And:
  1072. return cg_value(tb_inst_and(p->func, lhs.node, rhs.node), type);
  1073. case Token_Or:
  1074. return cg_value(tb_inst_or(p->func, lhs.node, rhs.node), type);
  1075. case Token_Xor:
  1076. return cg_value(tb_inst_xor(p->func, lhs.node, rhs.node), type);
  1077. case Token_Shl:
  1078. {
  1079. rhs = cg_emit_conv(p, rhs, lhs.type);
  1080. TB_DataType dt = cg_data_type(lhs.type);
  1081. TB_Node *lhsval = lhs.node;
  1082. TB_Node *bits = rhs.node;
  1083. TB_Node *bit_size = tb_inst_uint(p->func, dt, 8*type_size_of(lhs.type));
  1084. TB_Node *zero = tb_inst_uint(p->func, dt, 0);
  1085. TB_Node *width_test = tb_inst_cmp_ilt(p->func, bits, bit_size, false);
  1086. TB_Node *res = tb_inst_shl(p->func, lhsval, bits, arith_behavior);
  1087. res = tb_inst_select(p->func, width_test, res, zero);
  1088. return cg_value(res, type);
  1089. }
  1090. case Token_Shr:
  1091. {
  1092. rhs = cg_emit_conv(p, rhs, lhs.type);
  1093. TB_DataType dt = cg_data_type(lhs.type);
  1094. TB_Node *lhsval = lhs.node;
  1095. TB_Node *bits = rhs.node;
  1096. TB_Node *bit_size = tb_inst_uint(p->func, dt, 8*type_size_of(lhs.type));
  1097. TB_Node *zero = tb_inst_uint(p->func, dt, 0);
  1098. TB_Node *width_test = tb_inst_cmp_ilt(p->func, bits, bit_size, false);
  1099. TB_Node *res = nullptr;
  1100. if (is_type_unsigned(integral_type)) {
  1101. res = tb_inst_shr(p->func, lhsval, bits);
  1102. } else {
  1103. res = tb_inst_sar(p->func, lhsval, bits);
  1104. }
  1105. res = tb_inst_select(p->func, width_test, res, zero);
  1106. return cg_value(res, type);
  1107. }
  1108. case Token_AndNot:
  1109. return cg_value(tb_inst_and(p->func, lhs.node, tb_inst_not(p->func, rhs.node)), type);
  1110. }
  1111. GB_PANIC("unhandled operator of cg_emit_arith");
  1112. return {};
  1113. }
  1114. gb_internal cgAddr cg_build_addr_slice_expr(cgProcedure *p, Ast *expr) {
  1115. ast_node(se, SliceExpr, expr);
  1116. cgValue low = cg_const_int(p, t_int, 0);
  1117. cgValue high = {};
  1118. if (se->low != nullptr) {
  1119. low = cg_correct_endianness(p, cg_build_expr(p, se->low));
  1120. }
  1121. if (se->high != nullptr) {
  1122. high = cg_correct_endianness(p, cg_build_expr(p, se->high));
  1123. }
  1124. bool no_indices = se->low == nullptr && se->high == nullptr;
  1125. gb_unused(no_indices);
  1126. cgAddr addr = cg_build_addr(p, se->expr);
  1127. cgValue base = cg_addr_load(p, addr);
  1128. Type *type = base_type(base.type);
  1129. if (is_type_pointer(type)) {
  1130. type = base_type(type_deref(type));
  1131. addr = cg_addr(base);
  1132. base = cg_addr_load(p, addr);
  1133. }
  1134. switch (type->kind) {
  1135. case Type_Slice: {
  1136. // Type *slice_type = type;
  1137. // cgValue len = cg_slice_len(p, base);
  1138. // if (high.value == nullptr) high = len;
  1139. // if (!no_indices) {
  1140. // cg_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  1141. // }
  1142. // cgValue elem = cg_emit_ptr_offset(p, cg_slice_elem(p, base), low);
  1143. // cgValue new_len = cg_emit_arith(p, Token_Sub, high, low, t_int);
  1144. // cgAddr slice = cg_add_local_generated(p, slice_type, false);
  1145. // cg_fill_slice(p, slice, elem, new_len);
  1146. // return slice;
  1147. GB_PANIC("cg_build_addr_slice_expr Type_Slice");
  1148. break;
  1149. }
  1150. case Type_RelativeSlice:
  1151. GB_PANIC("TODO(bill): Type_RelativeSlice should be handled above already on the cg_addr_load");
  1152. break;
  1153. case Type_DynamicArray: {
  1154. // Type *elem_type = type->DynamicArray.elem;
  1155. // Type *slice_type = alloc_type_slice(elem_type);
  1156. // lbValue len = lb_dynamic_array_len(p, base);
  1157. // if (high.value == nullptr) high = len;
  1158. // if (!no_indices) {
  1159. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  1160. // }
  1161. // lbValue elem = lb_emit_ptr_offset(p, lb_dynamic_array_elem(p, base), low);
  1162. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  1163. // lbAddr slice = lb_add_local_generated(p, slice_type, false);
  1164. // lb_fill_slice(p, slice, elem, new_len);
  1165. // return slice;
  1166. GB_PANIC("cg_build_addr_slice_expr Type_DynamicArray");
  1167. break;
  1168. }
  1169. case Type_MultiPointer: {
  1170. Type *res_type = type_of_expr(expr);
  1171. if (se->high == nullptr) {
  1172. cgAddr res = cg_add_local(p, res_type, nullptr, false);
  1173. GB_ASSERT(base.kind == cgValue_Value);
  1174. GB_ASSERT(low.kind == cgValue_Value);
  1175. i64 stride = type_size_of(type->MultiPointer.elem);
  1176. cgValue offset = cg_value(tb_inst_array_access(p->func, base.node, low.node, stride), base.type);
  1177. cg_addr_store(p, res, offset);
  1178. return res;
  1179. } else {
  1180. cgAddr res = cg_add_local(p, res_type, nullptr, true);
  1181. low = cg_emit_conv(p, low, t_int);
  1182. high = cg_emit_conv(p, high, t_int);
  1183. // cg_emit_multi_pointer_slice_bounds_check(p, se->open, low, high);
  1184. i64 stride = type_size_of(type->MultiPointer.elem);
  1185. TB_Node *offset = tb_inst_array_access(p->func, base.node, low.node, stride);
  1186. TB_Node *len = tb_inst_sub(p->func, high.node, low.node, cast(TB_ArithmeticBehavior)0);
  1187. TB_Node *data_ptr = tb_inst_member_access(p->func, res.addr.node, type_offset_of(res_type, 0));
  1188. TB_Node *len_ptr = tb_inst_member_access(p->func, res.addr.node, type_offset_of(res_type, 1));
  1189. tb_inst_store(p->func, TB_TYPE_PTR, data_ptr, offset, cast(TB_CharUnits)build_context.ptr_size, false);
  1190. tb_inst_store(p->func, TB_TYPE_INT, len_ptr, len, cast(TB_CharUnits)build_context.int_size, false);
  1191. return res;
  1192. }
  1193. }
  1194. case Type_Array: {
  1195. // Type *slice_type = alloc_type_slice(type->Array.elem);
  1196. // lbValue len = lb_const_int(p->module, t_int, type->Array.count);
  1197. // if (high.value == nullptr) high = len;
  1198. // bool low_const = type_and_value_of_expr(se->low).mode == Addressing_Constant;
  1199. // bool high_const = type_and_value_of_expr(se->high).mode == Addressing_Constant;
  1200. // if (!low_const || !high_const) {
  1201. // if (!no_indices) {
  1202. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  1203. // }
  1204. // }
  1205. // lbValue elem = lb_emit_ptr_offset(p, lb_array_elem(p, lb_addr_get_ptr(p, addr)), low);
  1206. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  1207. // lbAddr slice = lb_add_local_generated(p, slice_type, false);
  1208. // lb_fill_slice(p, slice, elem, new_len);
  1209. // return slice;
  1210. GB_PANIC("cg_build_addr_slice_expr Type_Array");
  1211. break;
  1212. }
  1213. case Type_Basic: {
  1214. // GB_ASSERT(type == t_string);
  1215. // lbValue len = lb_string_len(p, base);
  1216. // if (high.value == nullptr) high = len;
  1217. // if (!no_indices) {
  1218. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  1219. // }
  1220. // lbValue elem = lb_emit_ptr_offset(p, lb_string_elem(p, base), low);
  1221. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  1222. // lbAddr str = lb_add_local_generated(p, t_string, false);
  1223. // lb_fill_string(p, str, elem, new_len);
  1224. // return str;
  1225. GB_PANIC("cg_build_addr_slice_expr Type_Basic");
  1226. break;
  1227. }
  1228. case Type_Struct:
  1229. // if (is_type_soa_struct(type)) {
  1230. // lbValue len = lb_soa_struct_len(p, lb_addr_get_ptr(p, addr));
  1231. // if (high.value == nullptr) high = len;
  1232. // if (!no_indices) {
  1233. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  1234. // }
  1235. // #if 1
  1236. // lbAddr dst = lb_add_local_generated(p, type_of_expr(expr), true);
  1237. // if (type->Struct.soa_kind == StructSoa_Fixed) {
  1238. // i32 field_count = cast(i32)type->Struct.fields.count;
  1239. // for (i32 i = 0; i < field_count; i++) {
  1240. // lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  1241. // lbValue field_src = lb_emit_struct_ep(p, lb_addr_get_ptr(p, addr), i);
  1242. // field_src = lb_emit_array_ep(p, field_src, low);
  1243. // lb_emit_store(p, field_dst, field_src);
  1244. // }
  1245. // lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  1246. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  1247. // lb_emit_store(p, len_dst, new_len);
  1248. // } else if (type->Struct.soa_kind == StructSoa_Slice) {
  1249. // if (no_indices) {
  1250. // lb_addr_store(p, dst, base);
  1251. // } else {
  1252. // i32 field_count = cast(i32)type->Struct.fields.count - 1;
  1253. // for (i32 i = 0; i < field_count; i++) {
  1254. // lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  1255. // lbValue field_src = lb_emit_struct_ev(p, base, i);
  1256. // field_src = lb_emit_ptr_offset(p, field_src, low);
  1257. // lb_emit_store(p, field_dst, field_src);
  1258. // }
  1259. // lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  1260. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  1261. // lb_emit_store(p, len_dst, new_len);
  1262. // }
  1263. // } else if (type->Struct.soa_kind == StructSoa_Dynamic) {
  1264. // i32 field_count = cast(i32)type->Struct.fields.count - 3;
  1265. // for (i32 i = 0; i < field_count; i++) {
  1266. // lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  1267. // lbValue field_src = lb_emit_struct_ev(p, base, i);
  1268. // field_src = lb_emit_ptr_offset(p, field_src, low);
  1269. // lb_emit_store(p, field_dst, field_src);
  1270. // }
  1271. // lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  1272. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  1273. // lb_emit_store(p, len_dst, new_len);
  1274. // }
  1275. // return dst;
  1276. // #endif
  1277. // }
  1278. GB_PANIC("cg_build_addr_slice_expr Type_Struct");
  1279. break;
  1280. }
  1281. GB_PANIC("Unknown slicable type");
  1282. return {};
  1283. }
  1284. gb_internal cgValue cg_emit_unary_arith(cgProcedure *p, TokenKind op, cgValue x, Type *type) {
  1285. switch (op) {
  1286. case Token_Add:
  1287. return x;
  1288. case Token_Not: // Boolean not
  1289. case Token_Xor: // Bitwise not
  1290. case Token_Sub: // Number negation
  1291. break;
  1292. case Token_Pointer:
  1293. GB_PANIC("This should be handled elsewhere");
  1294. break;
  1295. }
  1296. x = cg_flatten_value(p, x);
  1297. if (is_type_array_like(x.type)) {
  1298. GB_PANIC("TODO(bill): cg_emit_unary_arith is_type_array_like");
  1299. // // IMPORTANT TODO(bill): This is very wasteful with regards to stack memory
  1300. // Type *tl = base_type(x.type);
  1301. // cgValue val = cg_address_from_load_or_generate_local(p, x);
  1302. // GB_ASSERT(is_type_array_like(type));
  1303. // Type *elem_type = base_array_type(type);
  1304. // // NOTE(bill): Doesn't need to be zero because it will be initialized in the loops
  1305. // cgAddr res_addr = cg_add_local(p, type, nullptr, false);
  1306. // cgValue res = cg_addr_get_ptr(p, res_addr);
  1307. // bool inline_array_arith = cg_can_try_to_inline_array_arith(type);
  1308. // i32 count = cast(i32)get_array_type_count(tl);
  1309. // LLVMTypeRef vector_type = nullptr;
  1310. // if (op != Token_Not && cg_try_vector_cast(p->module, val, &vector_type)) {
  1311. // LLVMValueRef vp = LLVMBuildPointerCast(p->builder, val.value, LLVMPointerType(vector_type, 0), "");
  1312. // LLVMValueRef v = LLVMBuildLoad2(p->builder, vector_type, vp, "");
  1313. // LLVMValueRef opv = nullptr;
  1314. // switch (op) {
  1315. // case Token_Xor:
  1316. // opv = LLVMBuildNot(p->builder, v, "");
  1317. // break;
  1318. // case Token_Sub:
  1319. // if (is_type_float(elem_type)) {
  1320. // opv = LLVMBuildFNeg(p->builder, v, "");
  1321. // } else {
  1322. // opv = LLVMBuildNeg(p->builder, v, "");
  1323. // }
  1324. // break;
  1325. // }
  1326. // if (opv != nullptr) {
  1327. // LLVMSetAlignment(res.value, cast(unsigned)cg_alignof(vector_type));
  1328. // LLVMValueRef res_ptr = LLVMBuildPointerCast(p->builder, res.value, LLVMPointerType(vector_type, 0), "");
  1329. // LLVMBuildStore(p->builder, opv, res_ptr);
  1330. // return cg_emit_conv(p, cg_emit_load(p, res), type);
  1331. // }
  1332. // }
  1333. // if (inline_array_arith) {
  1334. // // inline
  1335. // for (i32 i = 0; i < count; i++) {
  1336. // cgValue e = cg_emit_load(p, cg_emit_array_epi(p, val, i));
  1337. // cgValue z = cg_emit_unary_arith(p, op, e, elem_type);
  1338. // cg_emit_store(p, cg_emit_array_epi(p, res, i), z);
  1339. // }
  1340. // } else {
  1341. // auto loop_data = cg_loop_start(p, count, t_i32);
  1342. // cgValue e = cg_emit_load(p, cg_emit_array_ep(p, val, loop_data.idx));
  1343. // cgValue z = cg_emit_unary_arith(p, op, e, elem_type);
  1344. // cg_emit_store(p, cg_emit_array_ep(p, res, loop_data.idx), z);
  1345. // cg_loop_end(p, loop_data);
  1346. // }
  1347. // return cg_emit_load(p, res);
  1348. }
  1349. if (op == Token_Xor) {
  1350. GB_ASSERT(x.kind == cgValue_Value);
  1351. cgValue cmp = cg_value(tb_inst_not(p->func, x.node), x.type);
  1352. return cg_emit_conv(p, cmp, type);
  1353. }
  1354. if (op == Token_Not) {
  1355. TB_Node *zero = cg_const_nil(p, x.type).node;
  1356. cgValue cmp = cg_value(tb_inst_cmp_ne(p->func, x.node, zero), x.type);
  1357. return cg_emit_conv(p, cmp, type);
  1358. }
  1359. if (op == Token_Sub && is_type_integer(type) && is_type_different_to_arch_endianness(type)) {
  1360. Type *platform_type = integer_endian_type_to_platform_type(type);
  1361. cgValue v = cg_emit_byte_swap(p, x, platform_type);
  1362. cgValue res = cg_value(tb_inst_neg(p->func, v.node), platform_type);
  1363. return cg_emit_byte_swap(p, res, type);
  1364. }
  1365. if (op == Token_Sub && is_type_float(type) && is_type_different_to_arch_endianness(type)) {
  1366. Type *platform_type = integer_endian_type_to_platform_type(type);
  1367. cgValue v = cg_emit_byte_swap(p, x, platform_type);
  1368. cgValue res = cg_value(tb_inst_neg(p->func, v.node), platform_type);
  1369. return cg_emit_byte_swap(p, res, type);
  1370. }
  1371. cgValue res = {};
  1372. if (op == Token_Sub) { // Number negation
  1373. if (is_type_integer(x.type)) {
  1374. res = cg_value(tb_inst_neg(p->func, x.node), x.type);
  1375. } else if (is_type_float(x.type)) {
  1376. res = cg_value(tb_inst_neg(p->func, x.node), x.type);
  1377. } else if (is_type_complex(x.type)) {
  1378. GB_PANIC("TODO(bill): neg complex");
  1379. // LLVMValueRef v0 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 0, ""), "");
  1380. // LLVMValueRef v1 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 1, ""), "");
  1381. // cgAddr addr = cg_add_local_generated(p, x.type, false);
  1382. // LLVMTypeRef type = llvm_addr_type(p->module, addr.addr);
  1383. // LLVMBuildStore(p->builder, v0, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 0, ""));
  1384. // LLVMBuildStore(p->builder, v1, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 1, ""));
  1385. // return cg_addr_load(p, addr);
  1386. } else if (is_type_quaternion(x.type)) {
  1387. GB_PANIC("TODO(bill): neg quaternion");
  1388. // LLVMValueRef v0 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 0, ""), "");
  1389. // LLVMValueRef v1 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 1, ""), "");
  1390. // LLVMValueRef v2 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 2, ""), "");
  1391. // LLVMValueRef v3 = LLVMBuildFNeg(p->builder, LLVMBuildExtractValue(p->builder, x.value, 3, ""), "");
  1392. // cgAddr addr = cg_add_local_generated(p, x.type, false);
  1393. // LLVMTypeRef type = llvm_addr_type(p->module, addr.addr);
  1394. // LLVMBuildStore(p->builder, v0, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 0, ""));
  1395. // LLVMBuildStore(p->builder, v1, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 1, ""));
  1396. // LLVMBuildStore(p->builder, v2, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 2, ""));
  1397. // LLVMBuildStore(p->builder, v3, LLVMBuildStructGEP2(p->builder, type, addr.addr.value, 3, ""));
  1398. // return cg_addr_load(p, addr);
  1399. } else if (is_type_simd_vector(x.type)) {
  1400. GB_PANIC("TODO(bill): neg simd");
  1401. // Type *elem = base_array_type(x.type);
  1402. // if (is_type_float(elem)) {
  1403. // res.value = LLVMBuildFNeg(p->builder, x.value, "");
  1404. // } else {
  1405. // res.value = LLVMBuildNeg(p->builder, x.value, "");
  1406. // }
  1407. } else if (is_type_matrix(x.type)) {
  1408. GB_PANIC("TODO(bill): neg matrix");
  1409. // cgValue zero = {};
  1410. // zero.value = LLVMConstNull(cg_type(p->module, type));
  1411. // zero.type = type;
  1412. // return cg_emit_arith_matrix(p, Token_Sub, zero, x, type, true);
  1413. } else {
  1414. GB_PANIC("Unhandled type %s", type_to_string(x.type));
  1415. }
  1416. res.type = x.type;
  1417. return res;
  1418. }
  1419. return res;
  1420. }
  1421. gb_internal cgValue cg_build_binary_expr(cgProcedure *p, Ast *expr) {
  1422. ast_node(be, BinaryExpr, expr);
  1423. TypeAndValue tv = type_and_value_of_expr(expr);
  1424. if (is_type_matrix(be->left->tav.type) || is_type_matrix(be->right->tav.type)) {
  1425. cgValue left = cg_build_expr(p, be->left);
  1426. cgValue right = cg_build_expr(p, be->right);
  1427. GB_PANIC("TODO(bill): cg_emit_arith_matrix");
  1428. // return cg_emit_arith_matrix(p, be->op.kind, left, right, default_type(tv.type), false);
  1429. }
  1430. switch (be->op.kind) {
  1431. case Token_Add:
  1432. case Token_Sub:
  1433. case Token_Mul:
  1434. case Token_Quo:
  1435. case Token_Mod:
  1436. case Token_ModMod:
  1437. case Token_And:
  1438. case Token_Or:
  1439. case Token_Xor:
  1440. case Token_AndNot: {
  1441. Type *type = default_type(tv.type);
  1442. cgValue left = cg_build_expr(p, be->left);
  1443. cgValue right = cg_build_expr(p, be->right);
  1444. return cg_emit_arith(p, be->op.kind, left, right, type);
  1445. }
  1446. case Token_Shl:
  1447. case Token_Shr: {
  1448. cgValue left, right;
  1449. Type *type = default_type(tv.type);
  1450. left = cg_build_expr(p, be->left);
  1451. if (cg_is_expr_untyped_const(be->right)) {
  1452. // NOTE(bill): RHS shift operands can still be untyped
  1453. // Just bypass the standard cg_build_expr
  1454. right = cg_expr_untyped_const_to_typed(p, be->right, type);
  1455. } else {
  1456. right = cg_build_expr(p, be->right);
  1457. }
  1458. return cg_emit_arith(p, be->op.kind, left, right, type);
  1459. }
  1460. case Token_CmpEq:
  1461. case Token_NotEq:
  1462. GB_PANIC("TODO(bill): comparisons");
  1463. // if (is_type_untyped_nil(be->right->tav.type)) {
  1464. // // `x == nil` or `x != nil`
  1465. // cgValue left = cg_build_expr(p, be->left);
  1466. // cgValue cmp = cg_emit_comp_against_nil(p, be->op.kind, left);
  1467. // Type *type = default_type(tv.type);
  1468. // return cg_emit_conv(p, cmp, type);
  1469. // } else if (is_type_untyped_nil(be->left->tav.type)) {
  1470. // // `nil == x` or `nil != x`
  1471. // cgValue right = cg_build_expr(p, be->right);
  1472. // cgValue cmp = cg_emit_comp_against_nil(p, be->op.kind, right);
  1473. // Type *type = default_type(tv.type);
  1474. // return cg_emit_conv(p, cmp, type);
  1475. // } else if (cg_is_empty_string_constant(be->right)) {
  1476. // // `x == ""` or `x != ""`
  1477. // cgValue s = cg_build_expr(p, be->left);
  1478. // s = cg_emit_conv(p, s, t_string);
  1479. // cgValue len = cg_string_len(p, s);
  1480. // cgValue cmp = cg_emit_comp(p, be->op.kind, len, cg_const_int(p->module, t_int, 0));
  1481. // Type *type = default_type(tv.type);
  1482. // return cg_emit_conv(p, cmp, type);
  1483. // } else if (cg_is_empty_string_constant(be->left)) {
  1484. // // `"" == x` or `"" != x`
  1485. // cgValue s = cg_build_expr(p, be->right);
  1486. // s = cg_emit_conv(p, s, t_string);
  1487. // cgValue len = cg_string_len(p, s);
  1488. // cgValue cmp = cg_emit_comp(p, be->op.kind, len, cg_const_int(p->module, t_int, 0));
  1489. // Type *type = default_type(tv.type);
  1490. // return cg_emit_conv(p, cmp, type);
  1491. // }
  1492. /*fallthrough*/
  1493. case Token_Lt:
  1494. case Token_LtEq:
  1495. case Token_Gt:
  1496. case Token_GtEq:
  1497. {
  1498. cgValue left = {};
  1499. cgValue right = {};
  1500. if (be->left->tav.mode == Addressing_Type) {
  1501. left = cg_typeid(p, be->left->tav.type);
  1502. }
  1503. if (be->right->tav.mode == Addressing_Type) {
  1504. right = cg_typeid(p, be->right->tav.type);
  1505. }
  1506. if (left.node == nullptr) left = cg_build_expr(p, be->left);
  1507. if (right.node == nullptr) right = cg_build_expr(p, be->right);
  1508. cgValue cmp = cg_emit_comp(p, be->op.kind, left, right);
  1509. Type *type = default_type(tv.type);
  1510. return cg_emit_conv(p, cmp, type);
  1511. }
  1512. case Token_CmpAnd:
  1513. case Token_CmpOr:
  1514. GB_PANIC("TODO(bill): cg_emit_logical_binary_expr");
  1515. // return cg_emit_logical_binary_expr(p, be->op.kind, be->left, be->right, tv.type);
  1516. case Token_in:
  1517. case Token_not_in:
  1518. {
  1519. cgValue left = cg_build_expr(p, be->left);
  1520. cgValue right = cg_build_expr(p, be->right);
  1521. Type *rt = base_type(right.type);
  1522. if (is_type_pointer(rt)) {
  1523. right = cg_emit_load(p, right);
  1524. rt = base_type(type_deref(rt));
  1525. }
  1526. switch (rt->kind) {
  1527. case Type_Map:
  1528. {
  1529. GB_PANIC("TODO(bill): in/not_in for maps");
  1530. // cgValue map_ptr = cg_address_from_load_or_generate_local(p, right);
  1531. // cgValue key = left;
  1532. // cgValue ptr = cg_internal_dynamic_map_get_ptr(p, map_ptr, key);
  1533. // if (be->op.kind == Token_in) {
  1534. // return cg_emit_conv(p, cg_emit_comp_against_nil(p, Token_NotEq, ptr), t_bool);
  1535. // } else {
  1536. // return cg_emit_conv(p, cg_emit_comp_against_nil(p, Token_CmpEq, ptr), t_bool);
  1537. // }
  1538. }
  1539. break;
  1540. case Type_BitSet:
  1541. {
  1542. Type *key_type = rt->BitSet.elem;
  1543. GB_ASSERT(are_types_identical(left.type, key_type));
  1544. Type *it = bit_set_to_int(rt);
  1545. left = cg_emit_conv(p, left, it);
  1546. if (is_type_different_to_arch_endianness(it)) {
  1547. left = cg_emit_byte_swap(p, left, integer_endian_type_to_platform_type(it));
  1548. }
  1549. cgValue lower = cg_const_value(p, left.type, exact_value_i64(rt->BitSet.lower));
  1550. cgValue key = cg_emit_arith(p, Token_Sub, left, lower, left.type);
  1551. cgValue bit = cg_emit_arith(p, Token_Shl, cg_const_int(p, left.type, 1), key, left.type);
  1552. bit = cg_emit_conv(p, bit, it);
  1553. cgValue old_value = cg_emit_transmute(p, right, it);
  1554. cgValue new_value = cg_emit_arith(p, Token_And, old_value, bit, it);
  1555. GB_PANIC("TODO(bill): cg_emit_comp");
  1556. // TokenKind op = (be->op.kind == Token_in) ? Token_NotEq : Token_CmpEq;
  1557. // return cg_emit_conv(p, cg_emit_comp(p, op, new_value, cg_const_int(p, new_value.type, 0)), t_bool);
  1558. }
  1559. break;
  1560. default:
  1561. GB_PANIC("Invalid 'in' type");
  1562. }
  1563. break;
  1564. }
  1565. break;
  1566. default:
  1567. GB_PANIC("Invalid binary expression");
  1568. break;
  1569. }
  1570. return {};
  1571. }
  1572. gb_internal cgValue cg_build_cond(cgProcedure *p, Ast *cond, TB_Node *true_block, TB_Node *false_block) {
  1573. cond = unparen_expr(cond);
  1574. GB_ASSERT(cond != nullptr);
  1575. GB_ASSERT(true_block != nullptr);
  1576. GB_ASSERT(false_block != nullptr);
  1577. // Use to signal not to do compile time short circuit for consts
  1578. cgValue no_comptime_short_circuit = {};
  1579. switch (cond->kind) {
  1580. case_ast_node(ue, UnaryExpr, cond);
  1581. if (ue->op.kind == Token_Not) {
  1582. cgValue cond_val = cg_build_cond(p, ue->expr, false_block, true_block);
  1583. return cond_val;
  1584. // if (cond_val.value && LLVMIsConstant(cond_val.value)) {
  1585. // return cg_const_bool(p->module, cond_val.type, LLVMConstIntGetZExtValue(cond_val.value) == 0);
  1586. // }
  1587. // return no_comptime_short_circuit;
  1588. }
  1589. case_end;
  1590. case_ast_node(be, BinaryExpr, cond);
  1591. if (be->op.kind == Token_CmpAnd) {
  1592. TB_Node *block = cg_control_region(p, "cmp_and");
  1593. cg_build_cond(p, be->left, block, false_block);
  1594. tb_inst_set_control(p->func, block);
  1595. cg_build_cond(p, be->right, true_block, false_block);
  1596. return no_comptime_short_circuit;
  1597. } else if (be->op.kind == Token_CmpOr) {
  1598. TB_Node *block = cg_control_region(p, "cmp_or");
  1599. cg_build_cond(p, be->left, true_block, block);
  1600. tb_inst_set_control(p->func, block);
  1601. cg_build_cond(p, be->right, true_block, false_block);
  1602. return no_comptime_short_circuit;
  1603. }
  1604. case_end;
  1605. }
  1606. cgValue v = {};
  1607. if (cg_is_expr_untyped_const(cond)) {
  1608. v = cg_expr_untyped_const_to_typed(p, cond, t_bool);
  1609. } else {
  1610. v = cg_build_expr(p, cond);
  1611. }
  1612. GB_ASSERT(v.kind == cgValue_Value);
  1613. tb_inst_if(p->func, v.node, true_block, false_block);
  1614. return v;
  1615. }
  1616. gb_internal cgValue cg_build_expr_internal(cgProcedure *p, Ast *expr);
  1617. gb_internal cgValue cg_build_expr(cgProcedure *p, Ast *expr) {
  1618. u16 prev_state_flags = p->state_flags;
  1619. defer (p->state_flags = prev_state_flags);
  1620. if (expr->state_flags != 0) {
  1621. u16 in = expr->state_flags;
  1622. u16 out = p->state_flags;
  1623. if (in & StateFlag_bounds_check) {
  1624. out |= StateFlag_bounds_check;
  1625. out &= ~StateFlag_no_bounds_check;
  1626. } else if (in & StateFlag_no_bounds_check) {
  1627. out |= StateFlag_no_bounds_check;
  1628. out &= ~StateFlag_bounds_check;
  1629. }
  1630. if (in & StateFlag_type_assert) {
  1631. out |= StateFlag_type_assert;
  1632. out &= ~StateFlag_no_type_assert;
  1633. } else if (in & StateFlag_no_type_assert) {
  1634. out |= StateFlag_no_type_assert;
  1635. out &= ~StateFlag_type_assert;
  1636. }
  1637. p->state_flags = out;
  1638. }
  1639. // IMPORTANT NOTE(bill):
  1640. // Selector Call Expressions (foo->bar(...))
  1641. // must only evaluate `foo` once as it gets transformed into
  1642. // `foo.bar(foo, ...)`
  1643. // And if `foo` is a procedure call or something more complex, storing the value
  1644. // once is a very good idea
  1645. // If a stored value is found, it must be removed from the cache
  1646. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  1647. // cgValue *pp = map_get(&p->selector_values, expr);
  1648. // if (pp != nullptr) {
  1649. // cgValue res = *pp;
  1650. // map_remove(&p->selector_values, expr);
  1651. // return res;
  1652. // }
  1653. // cgAddr *pa = map_get(&p->selector_addr, expr);
  1654. // if (pa != nullptr) {
  1655. // cgAddr res = *pa;
  1656. // map_remove(&p->selector_addr, expr);
  1657. // return cg_addr_load(p, res);
  1658. // }
  1659. }
  1660. cgValue res = cg_build_expr_internal(p, expr);
  1661. if (res.kind == cgValue_Symbol) {
  1662. GB_ASSERT(is_type_internally_pointer_like(res.type));
  1663. res = cg_value(tb_inst_get_symbol_address(p->func, res.symbol), res.type);
  1664. }
  1665. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  1666. // map_set(&p->selector_values, expr, res);
  1667. }
  1668. return res;
  1669. }
  1670. gb_internal cgValue cg_build_expr_internal(cgProcedure *p, Ast *expr) {
  1671. expr = unparen_expr(expr);
  1672. TokenPos expr_pos = ast_token(expr).pos;
  1673. TypeAndValue tv = type_and_value_of_expr(expr);
  1674. Type *type = type_of_expr(expr);
  1675. GB_ASSERT_MSG(tv.mode != Addressing_Invalid, "invalid expression '%s' (tv.mode = %d, tv.type = %s) @ %s\n Current Proc: %.*s : %s", expr_to_string(expr), tv.mode, type_to_string(tv.type), token_pos_to_string(expr_pos), LIT(p->name), type_to_string(p->type));
  1676. if (tv.value.kind != ExactValue_Invalid) {
  1677. // NOTE(bill): The commented out code below is just for debug purposes only
  1678. // if (is_type_untyped(type)) {
  1679. // gb_printf_err("%s %s : %s @ %p\n", token_pos_to_string(expr_pos), expr_to_string(expr), type_to_string(expr->tav.type), expr);
  1680. // GB_PANIC("%s\n", type_to_string(tv.type));
  1681. // }
  1682. // NOTE(bill): Short on constant values
  1683. return cg_const_value(p, type, tv.value);
  1684. } else if (tv.mode == Addressing_Type) {
  1685. // NOTE(bill, 2023-01-16): is this correct? I hope so at least
  1686. return cg_typeid(p, tv.type);
  1687. }
  1688. switch (expr->kind) {
  1689. case_ast_node(bl, BasicLit, expr);
  1690. TokenPos pos = bl->token.pos;
  1691. GB_PANIC("Non-constant basic literal %s - %.*s", token_pos_to_string(pos), LIT(token_strings[bl->token.kind]));
  1692. case_end;
  1693. case_ast_node(bd, BasicDirective, expr);
  1694. TokenPos pos = bd->token.pos;
  1695. GB_PANIC("Non-constant basic literal %s - %.*s", token_pos_to_string(pos), LIT(bd->name.string));
  1696. case_end;
  1697. case_ast_node(i, Ident, expr);
  1698. Entity *e = entity_from_expr(expr);
  1699. e = strip_entity_wrapping(e);
  1700. GB_ASSERT_MSG(e != nullptr, "%s in %.*s %p", expr_to_string(expr), LIT(p->name), expr);
  1701. if (e->kind == Entity_Builtin) {
  1702. Token token = ast_token(expr);
  1703. GB_PANIC("TODO(bill): lb_build_expr Entity_Builtin '%.*s'\n"
  1704. "\t at %s", LIT(builtin_procs[e->Builtin.id].name),
  1705. token_pos_to_string(token.pos));
  1706. return {};
  1707. } else if (e->kind == Entity_Nil) {
  1708. GB_PANIC("TODO: cg_find_ident nil");
  1709. // TODO(bill): is this correct?
  1710. return cg_value(cast(TB_Node *)nullptr, e->type);
  1711. }
  1712. GB_ASSERT(e->kind != Entity_ProcGroup);
  1713. cgAddr *addr = map_get(&p->variable_map, e);
  1714. if (addr) {
  1715. return cg_addr_load(p, *addr);
  1716. }
  1717. // return cg_find_ident(p, m, e, expr);
  1718. GB_PANIC("TODO: cg_find_ident");
  1719. return {};
  1720. case_end;
  1721. case_ast_node(i, Implicit, expr);
  1722. return cg_addr_load(p, cg_build_addr(p, expr));
  1723. case_end;
  1724. case_ast_node(u, Uninit, expr);
  1725. if (is_type_untyped(type)) {
  1726. return cg_value(cast(TB_Node *)nullptr, t_untyped_uninit);
  1727. }
  1728. return cg_value(tb_inst_poison(p->func), type);
  1729. case_end;
  1730. case_ast_node(de, DerefExpr, expr);
  1731. return cg_addr_load(p, cg_build_addr(p, expr));
  1732. case_end;
  1733. case_ast_node(se, SelectorExpr, expr);
  1734. TypeAndValue tav = type_and_value_of_expr(expr);
  1735. GB_ASSERT(tav.mode != Addressing_Invalid);
  1736. return cg_addr_load(p, cg_build_addr(p, expr));
  1737. case_end;
  1738. case_ast_node(ise, ImplicitSelectorExpr, expr);
  1739. TypeAndValue tav = type_and_value_of_expr(expr);
  1740. GB_ASSERT(tav.mode == Addressing_Constant);
  1741. return cg_const_value(p, type, tv.value);
  1742. case_end;
  1743. case_ast_node(se, SelectorCallExpr, expr);
  1744. GB_ASSERT(se->modified_call);
  1745. return cg_build_call_expr(p, se->call);
  1746. case_end;
  1747. case_ast_node(i, CallExpr, expr);
  1748. return cg_build_call_expr(p, expr);
  1749. case_end;
  1750. case_ast_node(te, TernaryIfExpr, expr);
  1751. cgValue incoming_values[2] = {};
  1752. TB_Node *incoming_regions[2] = {};
  1753. TB_Node *then = cg_control_region(p, "if_then");
  1754. TB_Node *done = cg_control_region(p, "if_done");
  1755. TB_Node *else_ = cg_control_region(p, "if_else");
  1756. cg_build_cond(p, te->cond, then, else_);
  1757. tb_inst_set_control(p->func, then);
  1758. Type *type = default_type(type_of_expr(expr));
  1759. incoming_values [0] = cg_emit_conv(p, cg_build_expr(p, te->x), type);
  1760. incoming_regions[0] = tb_inst_get_control(p->func);
  1761. cg_emit_goto(p, done);
  1762. tb_inst_set_control(p->func, else_);
  1763. incoming_values [1] = cg_emit_conv(p, cg_build_expr(p, te->y), type);
  1764. incoming_regions[1] = tb_inst_get_control(p->func);
  1765. cg_emit_goto(p, done);
  1766. tb_inst_set_control(p->func, done);
  1767. GB_ASSERT(incoming_values[0].kind == cgValue_Value ||
  1768. incoming_values[0].kind == cgValue_Addr);
  1769. GB_ASSERT(incoming_values[0].kind == incoming_values[1].kind);
  1770. cgValue res = {};
  1771. res.kind = incoming_values[0].kind;
  1772. res.type = type;
  1773. TB_DataType dt = cg_data_type(type);
  1774. if (res.kind == cgValue_Addr) {
  1775. dt = TB_TYPE_PTR;
  1776. }
  1777. res.node = tb_inst_incomplete_phi(p->func, dt, done, 2);
  1778. tb_inst_add_phi_operand(p->func, res.node, incoming_regions[0], incoming_values[0].node);
  1779. tb_inst_add_phi_operand(p->func, res.node, incoming_regions[1], incoming_values[1].node);
  1780. return res;
  1781. case_end;
  1782. case_ast_node(te, TernaryWhenExpr, expr);
  1783. TypeAndValue tav = type_and_value_of_expr(te->cond);
  1784. GB_ASSERT(tav.mode == Addressing_Constant);
  1785. GB_ASSERT(tav.value.kind == ExactValue_Bool);
  1786. if (tav.value.value_bool) {
  1787. return cg_build_expr(p, te->x);
  1788. } else {
  1789. return cg_build_expr(p, te->y);
  1790. }
  1791. case_end;
  1792. case_ast_node(tc, TypeCast, expr);
  1793. cgValue e = cg_build_expr(p, tc->expr);
  1794. switch (tc->token.kind) {
  1795. case Token_cast:
  1796. return cg_emit_conv(p, e, type);
  1797. case Token_transmute:
  1798. return cg_emit_transmute(p, e, type);
  1799. }
  1800. GB_PANIC("Invalid AST TypeCast");
  1801. case_end;
  1802. case_ast_node(ac, AutoCast, expr);
  1803. cgValue value = cg_build_expr(p, ac->expr);
  1804. return cg_emit_conv(p, value, type);
  1805. case_end;
  1806. case_ast_node(se, SliceExpr, expr);
  1807. if (is_type_slice(type_of_expr(se->expr))) {
  1808. // NOTE(bill): Quick optimization
  1809. if (se->high == nullptr &&
  1810. (se->low == nullptr || cg_is_expr_constant_zero(se->low))) {
  1811. return cg_build_expr(p, se->expr);
  1812. }
  1813. }
  1814. return cg_addr_load(p, cg_build_addr(p, expr));
  1815. case_end;
  1816. case_ast_node(ie, IndexExpr, expr);
  1817. return cg_addr_load(p, cg_build_addr(p, expr));
  1818. case_end;
  1819. case_ast_node(ie, MatrixIndexExpr, expr);
  1820. return cg_addr_load(p, cg_build_addr(p, expr));
  1821. case_end;
  1822. case_ast_node(ue, UnaryExpr, expr);
  1823. if (ue->op.kind == Token_And) {
  1824. GB_PANIC("TODO(bill): cg_build_unary_and");
  1825. // return cg_build_unary_and(p, expr);
  1826. }
  1827. cgValue v = cg_build_expr(p, ue->expr);
  1828. return cg_emit_unary_arith(p, ue->op.kind, v, type);
  1829. case_end;
  1830. case_ast_node(be, BinaryExpr, expr);
  1831. return cg_build_binary_expr(p, expr);
  1832. case_end;
  1833. }
  1834. GB_PANIC("TODO(bill): cg_build_expr_internal %.*s", LIT(ast_strings[expr->kind]));
  1835. return {};
  1836. }
  1837. gb_internal cgAddr cg_build_addr_internal(cgProcedure *p, Ast *expr);
  1838. gb_internal cgAddr cg_build_addr(cgProcedure *p, Ast *expr) {
  1839. expr = unparen_expr(expr);
  1840. // IMPORTANT NOTE(bill):
  1841. // Selector Call Expressions (foo->bar(...))
  1842. // must only evaluate `foo` once as it gets transformed into
  1843. // `foo.bar(foo, ...)`
  1844. // And if `foo` is a procedure call or something more complex, storing the value
  1845. // once is a very good idea
  1846. // If a stored value is found, it must be removed from the cache
  1847. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  1848. // lbAddr *pp = map_get(&p->selector_addr, expr);
  1849. // if (pp != nullptr) {
  1850. // lbAddr res = *pp;
  1851. // map_remove(&p->selector_addr, expr);
  1852. // return res;
  1853. // }
  1854. }
  1855. cgAddr addr = cg_build_addr_internal(p, expr);
  1856. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  1857. // map_set(&p->selector_addr, expr, addr);
  1858. }
  1859. return addr;
  1860. }
  1861. gb_internal cgAddr cg_build_addr_internal(cgProcedure *p, Ast *expr) {
  1862. switch (expr->kind) {
  1863. case_ast_node(i, Implicit, expr);
  1864. cgAddr v = {};
  1865. switch (i->kind) {
  1866. case Token_context:
  1867. v = cg_find_or_generate_context_ptr(p);
  1868. break;
  1869. }
  1870. GB_ASSERT(v.addr.node != nullptr);
  1871. return v;
  1872. case_end;
  1873. case_ast_node(i, Ident, expr);
  1874. if (is_blank_ident(expr)) {
  1875. cgAddr val = {};
  1876. return val;
  1877. }
  1878. String name = i->token.string;
  1879. Entity *e = entity_of_node(expr);
  1880. return cg_build_addr_from_entity(p, e, expr);
  1881. case_end;
  1882. case_ast_node(se, SliceExpr, expr);
  1883. return cg_build_addr_slice_expr(p, expr);
  1884. case_end;
  1885. case_ast_node(se, SelectorExpr, expr);
  1886. Ast *sel_node = unparen_expr(se->selector);
  1887. if (sel_node->kind != Ast_Ident) {
  1888. GB_PANIC("Unsupported selector expression");
  1889. }
  1890. String selector = sel_node->Ident.token.string;
  1891. TypeAndValue tav = type_and_value_of_expr(se->expr);
  1892. if (tav.mode == Addressing_Invalid) {
  1893. // NOTE(bill): Imports
  1894. Entity *imp = entity_of_node(se->expr);
  1895. if (imp != nullptr) {
  1896. GB_ASSERT(imp->kind == Entity_ImportName);
  1897. }
  1898. return cg_build_addr(p, unparen_expr(se->selector));
  1899. }
  1900. Type *type = base_type(tav.type);
  1901. if (tav.mode == Addressing_Type) { // Addressing_Type
  1902. Selection sel = lookup_field(tav.type, selector, true);
  1903. if (sel.pseudo_field) {
  1904. GB_ASSERT(sel.entity->kind == Entity_Procedure);
  1905. return cg_addr(cg_find_value_from_entity(p->module, sel.entity));
  1906. }
  1907. GB_PANIC("Unreachable %.*s", LIT(selector));
  1908. }
  1909. if (se->swizzle_count > 0) {
  1910. Type *array_type = base_type(type_deref(tav.type));
  1911. GB_ASSERT(array_type->kind == Type_Array);
  1912. u8 swizzle_count = se->swizzle_count;
  1913. u8 swizzle_indices_raw = se->swizzle_indices;
  1914. u8 swizzle_indices[4] = {};
  1915. for (u8 i = 0; i < swizzle_count; i++) {
  1916. u8 index = swizzle_indices_raw>>(i*2) & 3;
  1917. swizzle_indices[i] = index;
  1918. }
  1919. cgValue a = {};
  1920. if (is_type_pointer(tav.type)) {
  1921. a = cg_build_expr(p, se->expr);
  1922. } else {
  1923. cgAddr addr = cg_build_addr(p, se->expr);
  1924. a = cg_addr_get_ptr(p, addr);
  1925. }
  1926. GB_ASSERT(is_type_array(expr->tav.type));
  1927. GB_PANIC("TODO(bill): cg_addr_swizzle");
  1928. // return cg_addr_swizzle(a, expr->tav.type, swizzle_count, swizzle_indices);
  1929. }
  1930. Selection sel = lookup_field(type, selector, false);
  1931. GB_ASSERT(sel.entity != nullptr);
  1932. if (sel.pseudo_field) {
  1933. GB_ASSERT(sel.entity->kind == Entity_Procedure);
  1934. Entity *e = entity_of_node(sel_node);
  1935. return cg_addr(cg_find_value_from_entity(p->module, e));
  1936. }
  1937. {
  1938. cgAddr addr = cg_build_addr(p, se->expr);
  1939. if (addr.kind == cgAddr_Map) {
  1940. cgValue v = cg_addr_load(p, addr);
  1941. cgValue a = cg_address_from_load_or_generate_local(p, v);
  1942. a = cg_emit_deep_field_gep(p, a, sel);
  1943. return cg_addr(a);
  1944. } else if (addr.kind == cgAddr_Context) {
  1945. GB_ASSERT(sel.index.count > 0);
  1946. if (addr.ctx.sel.index.count >= 0) {
  1947. sel = selection_combine(addr.ctx.sel, sel);
  1948. }
  1949. addr.ctx.sel = sel;
  1950. addr.kind = cgAddr_Context;
  1951. return addr;
  1952. } else if (addr.kind == cgAddr_SoaVariable) {
  1953. cgValue index = addr.soa.index;
  1954. i64 first_index = sel.index[0];
  1955. Selection sub_sel = sel;
  1956. sub_sel.index.data += 1;
  1957. sub_sel.index.count -= 1;
  1958. cgValue arr = cg_emit_struct_ep(p, addr.addr, first_index);
  1959. Type *t = base_type(type_deref(addr.addr.type));
  1960. GB_ASSERT(is_type_soa_struct(t));
  1961. // TODO(bill): bounds checking for soa variable
  1962. // if (addr.soa.index_expr != nullptr && (!cg_is_const(addr.soa.index) || t->Struct.soa_kind != StructSoa_Fixed)) {
  1963. // cgValue len = cg_soa_struct_len(p, addr.addr);
  1964. // cg_emit_bounds_check(p, ast_token(addr.soa.index_expr), addr.soa.index, len);
  1965. // }
  1966. cgValue item = {};
  1967. if (t->Struct.soa_kind == StructSoa_Fixed) {
  1968. item = cg_emit_array_ep(p, arr, index);
  1969. } else {
  1970. item = cg_emit_ptr_offset(p, cg_emit_load(p, arr), index);
  1971. }
  1972. if (sub_sel.index.count > 0) {
  1973. item = cg_emit_deep_field_gep(p, item, sub_sel);
  1974. }
  1975. return cg_addr(item);
  1976. } else if (addr.kind == cgAddr_Swizzle) {
  1977. GB_ASSERT(sel.index.count > 0);
  1978. // NOTE(bill): just patch the index in place
  1979. sel.index[0] = addr.swizzle.indices[sel.index[0]];
  1980. } else if (addr.kind == cgAddr_SwizzleLarge) {
  1981. GB_ASSERT(sel.index.count > 0);
  1982. // NOTE(bill): just patch the index in place
  1983. sel.index[0] = addr.swizzle.indices[sel.index[0]];
  1984. }
  1985. cgValue a = cg_addr_get_ptr(p, addr);
  1986. a = cg_emit_deep_field_gep(p, a, sel);
  1987. return cg_addr(a);
  1988. }
  1989. case_end;
  1990. }
  1991. TokenPos token_pos = ast_token(expr).pos;
  1992. GB_PANIC("Unexpected address expression\n"
  1993. "\tAst: %.*s @ "
  1994. "%s\n",
  1995. LIT(ast_strings[expr->kind]),
  1996. token_pos_to_string(token_pos));
  1997. return {};
  1998. }