tilde_expr.cpp 40 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321
  1. gb_internal cgValue cg_flatten_value(cgProcedure *p, cgValue value) {
  2. if (value.kind == cgValue_Symbol) {
  3. GB_ASSERT(is_type_internally_pointer_like(value.type));
  4. value = cg_value(tb_inst_get_symbol_address(p->func, value.symbol), value.type);
  5. }
  6. return value;
  7. }
  8. gb_internal cgContextData *cg_push_context_onto_stack(cgProcedure *p, cgAddr ctx) {
  9. ctx.kind = cgAddr_Context;
  10. cgContextData *cd = array_add_and_get(&p->context_stack);
  11. cd->ctx = ctx;
  12. cd->scope_index = p->scope_index;
  13. return cd;
  14. }
  15. gb_internal cgAddr cg_find_or_generate_context_ptr(cgProcedure *p) {
  16. if (p->context_stack.count > 0) {
  17. return p->context_stack[p->context_stack.count-1].ctx;
  18. }
  19. Type *pt = base_type(p->type);
  20. GB_ASSERT(pt->kind == Type_Proc);
  21. GB_ASSERT(pt->Proc.calling_convention != ProcCC_Odin);
  22. cgAddr c = cg_add_local(p, t_context, nullptr, true);
  23. tb_node_append_attrib(c.addr.node, tb_function_attrib_variable(p->func, -1, "context", cg_debug_type(p->module, t_context)));
  24. c.kind = cgAddr_Context;
  25. // lb_emit_init_context(p, c);
  26. cg_push_context_onto_stack(p, c);
  27. // lb_add_debug_context_variable(p, c);
  28. return c;
  29. }
  30. gb_internal cgValue cg_find_value_from_entity(cgModule *m, Entity *e) {
  31. e = strip_entity_wrapping(e);
  32. GB_ASSERT(e != nullptr);
  33. GB_ASSERT(e->token.string != "_");
  34. if (e->kind == Entity_Procedure) {
  35. return cg_find_procedure_value_from_entity(m, e);
  36. }
  37. cgValue *found = nullptr;
  38. rw_mutex_shared_lock(&m->values_mutex);
  39. found = map_get(&m->values, e);
  40. rw_mutex_shared_unlock(&m->values_mutex);
  41. if (found) {
  42. return *found;
  43. }
  44. GB_PANIC("\n\tError in: %s, missing value '%.*s'\n", token_pos_to_string(e->token.pos), LIT(e->token.string));
  45. return {};
  46. }
  47. gb_internal cgAddr cg_build_addr_from_entity(cgProcedure *p, Entity *e, Ast *expr) {
  48. GB_ASSERT(e != nullptr);
  49. if (e->kind == Entity_Constant) {
  50. Type *t = default_type(type_of_expr(expr));
  51. cgValue v = cg_const_value(p, t, e->Constant.value);
  52. GB_PANIC("TODO(bill): cg_add_global_generated");
  53. // return cg_add_global_generated(p->module, t, v);
  54. return {};
  55. }
  56. cgAddr *local_found = map_get(&p->variable_map, e);
  57. if (local_found) {
  58. return *local_found;
  59. }
  60. cgValue v = {};
  61. cgModule *m = p->module;
  62. rw_mutex_lock(&m->values_mutex);
  63. cgValue *found = map_get(&m->values, e);
  64. rw_mutex_unlock(&m->values_mutex);
  65. if (found) {
  66. v = *found;
  67. } else if (e->kind == Entity_Variable && e->flags & EntityFlag_Using) {
  68. GB_PANIC("TODO(bill): cg_get_using_variable");
  69. // NOTE(bill): Calculate the using variable every time
  70. // v = cg_get_using_variable(p, e);
  71. } else if (e->flags & EntityFlag_SoaPtrField) {
  72. GB_PANIC("TODO(bill): cg_get_soa_variable_addr");
  73. // return cg_get_soa_variable_addr(p, e);
  74. }
  75. if (v.node == nullptr) {
  76. cgValue v = cg_find_value_from_entity(m, e);
  77. v = cg_flatten_value(p, v);
  78. return cg_addr(v);
  79. }
  80. return cg_addr(v);
  81. }
  82. gb_internal cgValue cg_typeid(cgModule *m, Type *t) {
  83. GB_ASSERT("TODO(bill): cg_typeid");
  84. return {};
  85. }
  86. gb_internal cgValue cg_correct_endianness(cgProcedure *p, cgValue value) {
  87. Type *src = core_type(value.type);
  88. GB_ASSERT(is_type_integer(src) || is_type_float(src));
  89. if (is_type_different_to_arch_endianness(src)) {
  90. GB_PANIC("TODO(bill): cg_correct_endianness");
  91. // Type *platform_src_type = integer_endian_type_to_platform_type(src);
  92. // value = cg_emit_byte_swap(p, value, platform_src_type);
  93. }
  94. return value;
  95. }
  96. gb_internal cgValue cg_emit_transmute(cgProcedure *p, cgValue value, Type *type) {
  97. GB_ASSERT(type_size_of(value.type) == type_size_of(type));
  98. value = cg_flatten_value(p, value);
  99. if (are_types_identical(value.type, type)) {
  100. return value;
  101. }
  102. if (are_types_identical(core_type(value.type), core_type(type))) {
  103. value.type = type;
  104. return value;
  105. }
  106. i64 src_align = type_align_of(value.type);
  107. i64 dst_align = type_align_of(type);
  108. if (dst_align > src_align) {
  109. cgAddr local = cg_add_local(p, type, nullptr, false);
  110. cgValue dst = local.addr;
  111. dst.type = alloc_type_pointer(value.type);
  112. cg_emit_store(p, dst, value);
  113. return cg_addr_load(p, local);
  114. }
  115. TB_DataType dt = cg_data_type(type);
  116. switch (value.kind) {
  117. case cgValue_Value:
  118. GB_ASSERT(!TB_IS_VOID_TYPE(dt));
  119. value.type = type;
  120. value.node = tb_inst_bitcast(p->func, value.node, dt);
  121. return value;
  122. case cgValue_Addr:
  123. value.type = type;
  124. return value;
  125. case cgValue_Symbol:
  126. GB_PANIC("should be handled above");
  127. break;
  128. }
  129. return value;
  130. }
  131. gb_internal cgValue cg_emit_byte_swap(cgProcedure *p, cgValue value, Type *end_type) {
  132. GB_ASSERT(type_size_of(value.type) == type_size_of(end_type));
  133. if (type_size_of(value.type) < 2) {
  134. return value;
  135. }
  136. if (is_type_float(value.type)) {
  137. i64 sz = type_size_of(value.type);
  138. Type *integer_type = nullptr;
  139. switch (sz) {
  140. case 2: integer_type = t_u16; break;
  141. case 4: integer_type = t_u32; break;
  142. case 8: integer_type = t_u64; break;
  143. }
  144. GB_ASSERT(integer_type != nullptr);
  145. value = cg_emit_transmute(p, value, integer_type);
  146. }
  147. GB_ASSERT(value.kind == cgValue_Value);
  148. value.node = tb_inst_bswap(p->func, value.node);
  149. return cg_emit_transmute(p, value, end_type);
  150. }
  151. gb_internal cgValue cg_emit_conv(cgProcedure *p, cgValue value, Type *t) {
  152. t = reduce_tuple_to_single_type(t);
  153. value = cg_flatten_value(p, value);
  154. Type *src_type = value.type;
  155. if (are_types_identical(t, src_type)) {
  156. return value;
  157. }
  158. if (is_type_untyped_uninit(src_type)) {
  159. // return cg_const_undef(m, t);
  160. return cg_const_nil(p, t);
  161. }
  162. if (is_type_untyped_nil(src_type)) {
  163. return cg_const_nil(p, t);
  164. }
  165. Type *src = core_type(src_type);
  166. Type *dst = core_type(t);
  167. GB_ASSERT(src != nullptr);
  168. GB_ASSERT(dst != nullptr);
  169. if (are_types_identical(src, dst)) {
  170. return cg_emit_transmute(p, value, t);
  171. }
  172. TB_DataType st = cg_data_type(src);
  173. TB_DataType dt = cg_data_type(t);
  174. if (is_type_integer(src) && is_type_integer(dst)) {
  175. GB_ASSERT(src->kind == Type_Basic &&
  176. dst->kind == Type_Basic);
  177. GB_ASSERT(value.kind == cgValue_Value);
  178. i64 sz = type_size_of(default_type(src));
  179. i64 dz = type_size_of(default_type(dst));
  180. if (sz == dz) {
  181. if (dz > 1 && !types_have_same_internal_endian(src, dst)) {
  182. return cg_emit_byte_swap(p, value, t);
  183. }
  184. value.type = t;
  185. return value;
  186. }
  187. if (sz > 1 && is_type_different_to_arch_endianness(src)) {
  188. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  189. value = cg_emit_byte_swap(p, value, platform_src_type);
  190. }
  191. TB_Node* (*op)(TB_Function* f, TB_Node* src, TB_DataType dt) = tb_inst_trunc;
  192. if (dz < sz) {
  193. op = tb_inst_trunc;
  194. } else if (dz == sz) {
  195. op = tb_inst_bitcast;
  196. } else if (dz > sz) {
  197. op = is_type_unsigned(src) ? tb_inst_zxt : tb_inst_sxt; // zero extent
  198. }
  199. if (dz > 1 && is_type_different_to_arch_endianness(dst)) {
  200. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  201. cgValue res = cg_value(op(p->func, value.node, cg_data_type(platform_dst_type)), platform_dst_type);
  202. return cg_emit_byte_swap(p, res, t);
  203. } else {
  204. return cg_value(op(p->func, value.node, dt), t);
  205. }
  206. }
  207. // boolean -> boolean/integer
  208. if (is_type_boolean(src) && (is_type_boolean(dst) || is_type_integer(dst))) {
  209. TB_Node *v = tb_inst_cmp_ne(p->func, value.node, tb_inst_uint(p->func, st, 0));
  210. return cg_value(tb_inst_zxt(p->func, v, dt), t);
  211. }
  212. // integer -> boolean
  213. if (is_type_integer(src) && is_type_boolean(dst)) {
  214. TB_Node *v = tb_inst_cmp_ne(p->func, value.node, tb_inst_uint(p->func, st, 0));
  215. return cg_value(tb_inst_zxt(p->func, v, dt), t);
  216. }
  217. if (is_type_cstring(src) && is_type_u8_ptr(dst)) {
  218. return cg_emit_transmute(p, value, dst);
  219. }
  220. if (is_type_u8_ptr(src) && is_type_cstring(dst)) {
  221. return cg_emit_transmute(p, value, dst);
  222. }
  223. if (is_type_cstring(src) && is_type_u8_multi_ptr(dst)) {
  224. return cg_emit_transmute(p, value, dst);
  225. }
  226. if (is_type_u8_multi_ptr(src) && is_type_cstring(dst)) {
  227. return cg_emit_transmute(p, value, dst);
  228. }
  229. if (is_type_cstring(src) && is_type_rawptr(dst)) {
  230. return cg_emit_transmute(p, value, dst);
  231. }
  232. if (is_type_rawptr(src) && is_type_cstring(dst)) {
  233. return cg_emit_transmute(p, value, dst);
  234. }
  235. if (are_types_identical(src, t_cstring) && are_types_identical(dst, t_string)) {
  236. GB_PANIC("TODO(bill): cstring_to_string call");
  237. // TEMPORARY_ALLOCATOR_GUARD();
  238. // lbValue c = lb_emit_conv(p, value, t_cstring);
  239. // auto args = array_make<lbValue>(temporary_allocator(), 1);
  240. // args[0] = c;
  241. // lbValue s = lb_emit_runtime_call(p, "cstring_to_string", args);
  242. // return lb_emit_conv(p, s, dst);
  243. }
  244. // float -> float
  245. if (is_type_float(src) && is_type_float(dst)) {
  246. i64 sz = type_size_of(src);
  247. i64 dz = type_size_of(dst);
  248. if (sz == 2 || dz == 2) {
  249. GB_PANIC("TODO(bill): f16 conversions");
  250. }
  251. if (dz == sz) {
  252. if (types_have_same_internal_endian(src, dst)) {
  253. return cg_value(value.node, t);
  254. } else {
  255. return cg_emit_byte_swap(p, value, t);
  256. }
  257. }
  258. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  259. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  260. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  261. cgValue res = {};
  262. res = cg_emit_conv(p, value, platform_src_type);
  263. res = cg_emit_conv(p, res, platform_dst_type);
  264. if (is_type_different_to_arch_endianness(dst)) {
  265. res = cg_emit_byte_swap(p, res, t);
  266. }
  267. return cg_emit_conv(p, res, t);
  268. }
  269. if (dz >= sz) {
  270. return cg_value(tb_inst_fpxt(p->func, value.node, dt), t);
  271. }
  272. return cg_value(tb_inst_trunc(p->func, value.node, dt), t);
  273. }
  274. if (is_type_complex(src) && is_type_complex(dst)) {
  275. GB_PANIC("TODO(bill): complex -> complex");
  276. }
  277. if (is_type_quaternion(src) && is_type_quaternion(dst)) {
  278. // @QuaternionLayout
  279. GB_PANIC("TODO(bill): quaternion -> quaternion");
  280. }
  281. if (is_type_integer(src) && is_type_complex(dst)) {
  282. GB_PANIC("TODO(bill): int -> complex");
  283. }
  284. if (is_type_float(src) && is_type_complex(dst)) {
  285. GB_PANIC("TODO(bill): float -> complex");
  286. }
  287. if (is_type_integer(src) && is_type_quaternion(dst)) {
  288. GB_PANIC("TODO(bill): int -> quaternion");
  289. }
  290. if (is_type_float(src) && is_type_quaternion(dst)) {
  291. GB_PANIC("TODO(bill): float -> quaternion");
  292. }
  293. if (is_type_complex(src) && is_type_quaternion(dst)) {
  294. GB_PANIC("TODO(bill): complex -> quaternion");
  295. }
  296. // float <-> integer
  297. if (is_type_float(src) && is_type_integer(dst)) {
  298. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  299. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  300. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  301. cgValue res = {};
  302. res = cg_emit_conv(p, value, platform_src_type);
  303. res = cg_emit_conv(p, res, platform_dst_type);
  304. return cg_emit_conv(p, res, t);
  305. }
  306. // if (is_type_integer_128bit(dst)) {
  307. // TEMPORARY_ALLOCATOR_GUARD();
  308. // auto args = array_make<lbValue>(temporary_allocator(), 1);
  309. // args[0] = value;
  310. // char const *call = "fixunsdfdi";
  311. // if (is_type_unsigned(dst)) {
  312. // call = "fixunsdfti";
  313. // }
  314. // lbValue res_i128 = lb_emit_runtime_call(p, call, args);
  315. // return lb_emit_conv(p, res_i128, t);
  316. // }
  317. bool is_signed = !is_type_unsigned(dst);
  318. return cg_value(tb_inst_float2int(p->func, value.node, dt, is_signed), t);
  319. }
  320. if (is_type_integer(src) && is_type_float(dst)) {
  321. if (is_type_different_to_arch_endianness(src) || is_type_different_to_arch_endianness(dst)) {
  322. Type *platform_src_type = integer_endian_type_to_platform_type(src);
  323. Type *platform_dst_type = integer_endian_type_to_platform_type(dst);
  324. cgValue res = {};
  325. res = cg_emit_conv(p, value, platform_src_type);
  326. res = cg_emit_conv(p, res, platform_dst_type);
  327. if (is_type_different_to_arch_endianness(dst)) {
  328. res = cg_emit_byte_swap(p, res, t);
  329. }
  330. return cg_emit_conv(p, res, t);
  331. }
  332. // if (is_type_integer_128bit(src)) {
  333. // TEMPORARY_ALLOCATOR_GUARD();
  334. // auto args = array_make<lbValue>(temporary_allocator(), 1);
  335. // args[0] = value;
  336. // char const *call = "floattidf";
  337. // if (is_type_unsigned(src)) {
  338. // call = "floattidf_unsigned";
  339. // }
  340. // lbValue res_f64 = lb_emit_runtime_call(p, call, args);
  341. // return lb_emit_conv(p, res_f64, t);
  342. // }
  343. bool is_signed = !is_type_unsigned(dst);
  344. return cg_value(tb_inst_int2float(p->func, value.node, dt, is_signed), t);
  345. }
  346. if (is_type_simd_vector(dst)) {
  347. GB_PANIC("TODO(bill): ? -> #simd vector");
  348. }
  349. // Pointer <-> uintptr
  350. if (is_type_pointer(src) && is_type_uintptr(dst)) {
  351. return cg_value(tb_inst_ptr2int(p->func, value.node, dt), t);
  352. }
  353. if (is_type_uintptr(src) && is_type_pointer(dst)) {
  354. return cg_value(tb_inst_int2ptr(p->func, value.node), t);
  355. }
  356. if (is_type_multi_pointer(src) && is_type_uintptr(dst)) {
  357. return cg_value(tb_inst_ptr2int(p->func, value.node, dt), t);
  358. }
  359. if (is_type_uintptr(src) && is_type_multi_pointer(dst)) {
  360. return cg_value(tb_inst_int2ptr(p->func, value.node), t);
  361. }
  362. if (is_type_union(dst)) {
  363. GB_PANIC("TODO(bill): ? -> union");
  364. }
  365. // NOTE(bill): This has to be done before 'Pointer <-> Pointer' as it's
  366. // subtype polymorphism casting
  367. if (check_is_assignable_to_using_subtype(src_type, t)) {
  368. GB_PANIC("TODO(bill): ? -> subtyping");
  369. }
  370. // Pointer <-> Pointer
  371. if (is_type_pointer(src) && is_type_pointer(dst)) {
  372. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  373. }
  374. if (is_type_multi_pointer(src) && is_type_pointer(dst)) {
  375. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  376. }
  377. if (is_type_pointer(src) && is_type_multi_pointer(dst)) {
  378. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  379. }
  380. if (is_type_multi_pointer(src) && is_type_multi_pointer(dst)) {
  381. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  382. }
  383. // proc <-> proc
  384. if (is_type_proc(src) && is_type_proc(dst)) {
  385. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  386. }
  387. // pointer -> proc
  388. if (is_type_pointer(src) && is_type_proc(dst)) {
  389. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  390. }
  391. // proc -> pointer
  392. if (is_type_proc(src) && is_type_pointer(dst)) {
  393. return cg_value(tb_inst_bitcast(p->func, value.node, dt), t);
  394. }
  395. // []byte/[]u8 <-> string
  396. if (is_type_u8_slice(src) && is_type_string(dst)) {
  397. return cg_emit_transmute(p, value, t);
  398. }
  399. if (is_type_string(src) && is_type_u8_slice(dst)) {
  400. return cg_emit_transmute(p, value, t);
  401. }
  402. if (is_type_matrix(dst) && !is_type_matrix(src)) {
  403. GB_PANIC("TODO(bill): !matrix -> matrix");
  404. }
  405. if (is_type_matrix(dst) && is_type_matrix(src)) {
  406. GB_PANIC("TODO(bill): matrix -> matrix");
  407. }
  408. if (is_type_any(dst)) {
  409. GB_PANIC("TODO(bill): ? -> any");
  410. }
  411. i64 src_sz = type_size_of(src);
  412. i64 dst_sz = type_size_of(dst);
  413. if (src_sz == dst_sz) {
  414. // bit_set <-> integer
  415. if (is_type_integer(src) && is_type_bit_set(dst)) {
  416. cgValue v = cg_emit_conv(p, value, bit_set_to_int(dst));
  417. return cg_emit_transmute(p, v, t);
  418. }
  419. if (is_type_bit_set(src) && is_type_integer(dst)) {
  420. cgValue bs = cg_emit_transmute(p, value, bit_set_to_int(src));
  421. return cg_emit_conv(p, bs, dst);
  422. }
  423. // typeid <-> integer
  424. if (is_type_integer(src) && is_type_typeid(dst)) {
  425. return cg_emit_transmute(p, value, dst);
  426. }
  427. if (is_type_typeid(src) && is_type_integer(dst)) {
  428. return cg_emit_transmute(p, value, dst);
  429. }
  430. }
  431. if (is_type_untyped(src)) {
  432. if (is_type_string(src) && is_type_string(dst)) {
  433. cgAddr result = cg_add_local(p, t, nullptr, false);
  434. cg_addr_store(p, result, value);
  435. return cg_addr_load(p, result);
  436. }
  437. }
  438. gb_printf_err("%.*s\n", LIT(p->name));
  439. gb_printf_err("cg_emit_conv: src -> dst\n");
  440. gb_printf_err("Not Identical %s != %s\n", type_to_string(src_type), type_to_string(t));
  441. gb_printf_err("Not Identical %s != %s\n", type_to_string(src), type_to_string(dst));
  442. gb_printf_err("Not Identical %p != %p\n", src_type, t);
  443. gb_printf_err("Not Identical %p != %p\n", src, dst);
  444. GB_PANIC("Invalid type conversion: '%s' to '%s' for procedure '%.*s'",
  445. type_to_string(src_type), type_to_string(t),
  446. LIT(p->name));
  447. return {};
  448. }
  449. gb_internal cgValue cg_emit_arith(cgProcedure *p, TokenKind op, cgValue lhs, cgValue rhs, Type *type) {
  450. if (is_type_array_like(lhs.type) || is_type_array_like(rhs.type)) {
  451. GB_PANIC("TODO(bill): cg_emit_arith_array");
  452. } else if (is_type_matrix(lhs.type) || is_type_matrix(rhs.type)) {
  453. GB_PANIC("TODO(bill): cg_emit_arith_matrix");
  454. } else if (is_type_complex(type)) {
  455. GB_PANIC("TODO(bill): cg_emit_arith complex");
  456. } else if (is_type_quaternion(type)) {
  457. GB_PANIC("TODO(bill): cg_emit_arith quaternion");
  458. }
  459. lhs = cg_flatten_value(p, cg_emit_conv(p, lhs, type));
  460. rhs = cg_flatten_value(p, cg_emit_conv(p, rhs, type));
  461. GB_ASSERT(lhs.kind == cgValue_Value);
  462. GB_ASSERT(rhs.kind == cgValue_Value);
  463. if (is_type_integer(type) && is_type_different_to_arch_endianness(type)) {
  464. switch (op) {
  465. case Token_AndNot:
  466. case Token_And:
  467. case Token_Or:
  468. case Token_Xor:
  469. goto handle_op;
  470. }
  471. Type *platform_type = integer_endian_type_to_platform_type(type);
  472. cgValue x = cg_emit_byte_swap(p, lhs, integer_endian_type_to_platform_type(lhs.type));
  473. cgValue y = cg_emit_byte_swap(p, rhs, integer_endian_type_to_platform_type(rhs.type));
  474. cgValue res = cg_emit_arith(p, op, x, y, platform_type);
  475. return cg_emit_byte_swap(p, res, type);
  476. }
  477. if (is_type_float(type) && is_type_different_to_arch_endianness(type)) {
  478. Type *platform_type = integer_endian_type_to_platform_type(type);
  479. cgValue x = cg_emit_conv(p, lhs, integer_endian_type_to_platform_type(lhs.type));
  480. cgValue y = cg_emit_conv(p, rhs, integer_endian_type_to_platform_type(rhs.type));
  481. cgValue res = cg_emit_arith(p, op, x, y, platform_type);
  482. return cg_emit_byte_swap(p, res, type);
  483. }
  484. handle_op:;
  485. // NOTE(bill): Bit Set Aliases for + and -
  486. if (is_type_bit_set(type)) {
  487. switch (op) {
  488. case Token_Add: op = Token_Or; break;
  489. case Token_Sub: op = Token_AndNot; break;
  490. }
  491. }
  492. TB_ArithmeticBehavior arith_behavior = cast(TB_ArithmeticBehavior)50;
  493. Type *integral_type = type;
  494. if (is_type_simd_vector(integral_type)) {
  495. GB_PANIC("TODO(bill): cg_emit_arith #simd vector");
  496. // integral_type = core_array_type(integral_type);
  497. }
  498. switch (op) {
  499. case Token_Add:
  500. if (is_type_float(integral_type)) {
  501. return cg_value(tb_inst_fadd(p->func, lhs.node, rhs.node), type);
  502. }
  503. return cg_value(tb_inst_add(p->func, lhs.node, rhs.node, arith_behavior), type);
  504. case Token_Sub:
  505. if (is_type_float(integral_type)) {
  506. return cg_value(tb_inst_fsub(p->func, lhs.node, rhs.node), type);
  507. }
  508. return cg_value(tb_inst_sub(p->func, lhs.node, rhs.node, arith_behavior), type);
  509. case Token_Mul:
  510. if (is_type_float(integral_type)) {
  511. return cg_value(tb_inst_fmul(p->func, lhs.node, rhs.node), type);
  512. }
  513. return cg_value(tb_inst_mul(p->func, lhs.node, rhs.node, arith_behavior), type);
  514. case Token_Quo:
  515. if (is_type_float(integral_type)) {
  516. return cg_value(tb_inst_fdiv(p->func, lhs.node, rhs.node), type);
  517. }
  518. return cg_value(tb_inst_div(p->func, lhs.node, rhs.node, !is_type_unsigned(integral_type)), type);
  519. case Token_Mod:
  520. if (is_type_float(integral_type)) {
  521. GB_PANIC("TODO(bill): float %% float");
  522. }
  523. return cg_value(tb_inst_mod(p->func, lhs.node, rhs.node, !is_type_unsigned(integral_type)), type);
  524. case Token_ModMod:
  525. if (is_type_unsigned(integral_type)) {
  526. return cg_value(tb_inst_mod(p->func, lhs.node, rhs.node, false), type);
  527. } else {
  528. TB_Node *a = tb_inst_mod(p->func, lhs.node, rhs.node, true);
  529. TB_Node *b = tb_inst_add(p->func, a, rhs.node, arith_behavior);
  530. TB_Node *c = tb_inst_mod(p->func, b, rhs.node, true);
  531. return cg_value(c, type);
  532. }
  533. case Token_And:
  534. return cg_value(tb_inst_and(p->func, lhs.node, rhs.node), type);
  535. case Token_Or:
  536. return cg_value(tb_inst_or(p->func, lhs.node, rhs.node), type);
  537. case Token_Xor:
  538. return cg_value(tb_inst_xor(p->func, lhs.node, rhs.node), type);
  539. case Token_Shl:
  540. {
  541. rhs = cg_emit_conv(p, rhs, lhs.type);
  542. TB_DataType dt = cg_data_type(lhs.type);
  543. TB_Node *lhsval = lhs.node;
  544. TB_Node *bits = rhs.node;
  545. TB_Node *bit_size = tb_inst_uint(p->func, dt, 8*type_size_of(lhs.type));
  546. TB_Node *zero = tb_inst_uint(p->func, dt, 0);
  547. TB_Node *width_test = tb_inst_cmp_ilt(p->func, bits, bit_size, false);
  548. TB_Node *res = tb_inst_shl(p->func, lhsval, bits, arith_behavior);
  549. res = tb_inst_select(p->func, width_test, res, zero);
  550. return cg_value(res, type);
  551. }
  552. case Token_Shr:
  553. {
  554. rhs = cg_emit_conv(p, rhs, lhs.type);
  555. TB_DataType dt = cg_data_type(lhs.type);
  556. TB_Node *lhsval = lhs.node;
  557. TB_Node *bits = rhs.node;
  558. TB_Node *bit_size = tb_inst_uint(p->func, dt, 8*type_size_of(lhs.type));
  559. TB_Node *zero = tb_inst_uint(p->func, dt, 0);
  560. TB_Node *width_test = tb_inst_cmp_ilt(p->func, bits, bit_size, false);
  561. TB_Node *res = nullptr;
  562. if (is_type_unsigned(integral_type)) {
  563. res = tb_inst_shr(p->func, lhsval, bits);
  564. } else {
  565. res = tb_inst_sar(p->func, lhsval, bits);
  566. }
  567. res = tb_inst_select(p->func, width_test, res, zero);
  568. return cg_value(res, type);
  569. }
  570. case Token_AndNot:
  571. return cg_value(tb_inst_and(p->func, lhs.node, tb_inst_not(p->func, rhs.node)), type);
  572. }
  573. GB_PANIC("unhandled operator of cg_emit_arith");
  574. return {};
  575. }
  576. gb_internal cgAddr cg_build_addr_slice_expr(cgProcedure *p, Ast *expr) {
  577. ast_node(se, SliceExpr, expr);
  578. cgValue low = cg_const_int(p, t_int, 0);
  579. cgValue high = {};
  580. if (se->low != nullptr) {
  581. low = cg_correct_endianness(p, cg_build_expr(p, se->low));
  582. }
  583. if (se->high != nullptr) {
  584. high = cg_correct_endianness(p, cg_build_expr(p, se->high));
  585. }
  586. bool no_indices = se->low == nullptr && se->high == nullptr;
  587. gb_unused(no_indices);
  588. cgAddr addr = cg_build_addr(p, se->expr);
  589. cgValue base = cg_addr_load(p, addr);
  590. Type *type = base_type(base.type);
  591. if (is_type_pointer(type)) {
  592. type = base_type(type_deref(type));
  593. addr = cg_addr(base);
  594. base = cg_addr_load(p, addr);
  595. }
  596. switch (type->kind) {
  597. case Type_Slice: {
  598. // Type *slice_type = type;
  599. // cgValue len = cg_slice_len(p, base);
  600. // if (high.value == nullptr) high = len;
  601. // if (!no_indices) {
  602. // cg_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  603. // }
  604. // cgValue elem = cg_emit_ptr_offset(p, cg_slice_elem(p, base), low);
  605. // cgValue new_len = cg_emit_arith(p, Token_Sub, high, low, t_int);
  606. // cgAddr slice = cg_add_local_generated(p, slice_type, false);
  607. // cg_fill_slice(p, slice, elem, new_len);
  608. // return slice;
  609. GB_PANIC("cg_build_addr_slice_expr Type_Slice");
  610. break;
  611. }
  612. case Type_RelativeSlice:
  613. GB_PANIC("TODO(bill): Type_RelativeSlice should be handled above already on the cg_addr_load");
  614. break;
  615. case Type_DynamicArray: {
  616. // Type *elem_type = type->DynamicArray.elem;
  617. // Type *slice_type = alloc_type_slice(elem_type);
  618. // lbValue len = lb_dynamic_array_len(p, base);
  619. // if (high.value == nullptr) high = len;
  620. // if (!no_indices) {
  621. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  622. // }
  623. // lbValue elem = lb_emit_ptr_offset(p, lb_dynamic_array_elem(p, base), low);
  624. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  625. // lbAddr slice = lb_add_local_generated(p, slice_type, false);
  626. // lb_fill_slice(p, slice, elem, new_len);
  627. // return slice;
  628. GB_PANIC("cg_build_addr_slice_expr Type_DynamicArray");
  629. break;
  630. }
  631. case Type_MultiPointer: {
  632. Type *res_type = type_of_expr(expr);
  633. if (se->high == nullptr) {
  634. cgAddr res = cg_add_local(p, res_type, nullptr, false);
  635. GB_ASSERT(base.kind == cgValue_Value);
  636. GB_ASSERT(low.kind == cgValue_Value);
  637. i64 stride = type_size_of(type->MultiPointer.elem);
  638. cgValue offset = cg_value(tb_inst_array_access(p->func, base.node, low.node, stride), base.type);
  639. cg_addr_store(p, res, offset);
  640. return res;
  641. } else {
  642. cgAddr res = cg_add_local(p, res_type, nullptr, true);
  643. low = cg_emit_conv(p, low, t_int);
  644. high = cg_emit_conv(p, high, t_int);
  645. // cg_emit_multi_pointer_slice_bounds_check(p, se->open, low, high);
  646. i64 stride = type_size_of(type->MultiPointer.elem);
  647. TB_Node *offset = tb_inst_array_access(p->func, base.node, low.node, stride);
  648. TB_Node *len = tb_inst_sub(p->func, high.node, low.node, cast(TB_ArithmeticBehavior)0);
  649. TB_Node *data_ptr = tb_inst_member_access(p->func, res.addr.node, type_offset_of(res_type, 0));
  650. TB_Node *len_ptr = tb_inst_member_access(p->func, res.addr.node, type_offset_of(res_type, 1));
  651. tb_inst_store(p->func, TB_TYPE_PTR, data_ptr, offset, cast(TB_CharUnits)build_context.ptr_size, false);
  652. tb_inst_store(p->func, TB_TYPE_INT, len_ptr, len, cast(TB_CharUnits)build_context.int_size, false);
  653. return res;
  654. }
  655. }
  656. case Type_Array: {
  657. // Type *slice_type = alloc_type_slice(type->Array.elem);
  658. // lbValue len = lb_const_int(p->module, t_int, type->Array.count);
  659. // if (high.value == nullptr) high = len;
  660. // bool low_const = type_and_value_of_expr(se->low).mode == Addressing_Constant;
  661. // bool high_const = type_and_value_of_expr(se->high).mode == Addressing_Constant;
  662. // if (!low_const || !high_const) {
  663. // if (!no_indices) {
  664. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  665. // }
  666. // }
  667. // lbValue elem = lb_emit_ptr_offset(p, lb_array_elem(p, lb_addr_get_ptr(p, addr)), low);
  668. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  669. // lbAddr slice = lb_add_local_generated(p, slice_type, false);
  670. // lb_fill_slice(p, slice, elem, new_len);
  671. // return slice;
  672. GB_PANIC("cg_build_addr_slice_expr Type_Array");
  673. break;
  674. }
  675. case Type_Basic: {
  676. // GB_ASSERT(type == t_string);
  677. // lbValue len = lb_string_len(p, base);
  678. // if (high.value == nullptr) high = len;
  679. // if (!no_indices) {
  680. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  681. // }
  682. // lbValue elem = lb_emit_ptr_offset(p, lb_string_elem(p, base), low);
  683. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  684. // lbAddr str = lb_add_local_generated(p, t_string, false);
  685. // lb_fill_string(p, str, elem, new_len);
  686. // return str;
  687. GB_PANIC("cg_build_addr_slice_expr Type_Basic");
  688. break;
  689. }
  690. case Type_Struct:
  691. // if (is_type_soa_struct(type)) {
  692. // lbValue len = lb_soa_struct_len(p, lb_addr_get_ptr(p, addr));
  693. // if (high.value == nullptr) high = len;
  694. // if (!no_indices) {
  695. // lb_emit_slice_bounds_check(p, se->open, low, high, len, se->low != nullptr);
  696. // }
  697. // #if 1
  698. // lbAddr dst = lb_add_local_generated(p, type_of_expr(expr), true);
  699. // if (type->Struct.soa_kind == StructSoa_Fixed) {
  700. // i32 field_count = cast(i32)type->Struct.fields.count;
  701. // for (i32 i = 0; i < field_count; i++) {
  702. // lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  703. // lbValue field_src = lb_emit_struct_ep(p, lb_addr_get_ptr(p, addr), i);
  704. // field_src = lb_emit_array_ep(p, field_src, low);
  705. // lb_emit_store(p, field_dst, field_src);
  706. // }
  707. // lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  708. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  709. // lb_emit_store(p, len_dst, new_len);
  710. // } else if (type->Struct.soa_kind == StructSoa_Slice) {
  711. // if (no_indices) {
  712. // lb_addr_store(p, dst, base);
  713. // } else {
  714. // i32 field_count = cast(i32)type->Struct.fields.count - 1;
  715. // for (i32 i = 0; i < field_count; i++) {
  716. // lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  717. // lbValue field_src = lb_emit_struct_ev(p, base, i);
  718. // field_src = lb_emit_ptr_offset(p, field_src, low);
  719. // lb_emit_store(p, field_dst, field_src);
  720. // }
  721. // lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  722. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  723. // lb_emit_store(p, len_dst, new_len);
  724. // }
  725. // } else if (type->Struct.soa_kind == StructSoa_Dynamic) {
  726. // i32 field_count = cast(i32)type->Struct.fields.count - 3;
  727. // for (i32 i = 0; i < field_count; i++) {
  728. // lbValue field_dst = lb_emit_struct_ep(p, dst.addr, i);
  729. // lbValue field_src = lb_emit_struct_ev(p, base, i);
  730. // field_src = lb_emit_ptr_offset(p, field_src, low);
  731. // lb_emit_store(p, field_dst, field_src);
  732. // }
  733. // lbValue len_dst = lb_emit_struct_ep(p, dst.addr, field_count);
  734. // lbValue new_len = lb_emit_arith(p, Token_Sub, high, low, t_int);
  735. // lb_emit_store(p, len_dst, new_len);
  736. // }
  737. // return dst;
  738. // #endif
  739. // }
  740. GB_PANIC("cg_build_addr_slice_expr Type_Struct");
  741. break;
  742. }
  743. GB_PANIC("Unknown slicable type");
  744. return {};
  745. }
  746. gb_internal cgValue cg_build_expr_internal(cgProcedure *p, Ast *expr);
  747. gb_internal cgValue cg_build_expr(cgProcedure *p, Ast *expr) {
  748. u16 prev_state_flags = p->state_flags;
  749. defer (p->state_flags = prev_state_flags);
  750. if (expr->state_flags != 0) {
  751. u16 in = expr->state_flags;
  752. u16 out = p->state_flags;
  753. if (in & StateFlag_bounds_check) {
  754. out |= StateFlag_bounds_check;
  755. out &= ~StateFlag_no_bounds_check;
  756. } else if (in & StateFlag_no_bounds_check) {
  757. out |= StateFlag_no_bounds_check;
  758. out &= ~StateFlag_bounds_check;
  759. }
  760. if (in & StateFlag_type_assert) {
  761. out |= StateFlag_type_assert;
  762. out &= ~StateFlag_no_type_assert;
  763. } else if (in & StateFlag_no_type_assert) {
  764. out |= StateFlag_no_type_assert;
  765. out &= ~StateFlag_type_assert;
  766. }
  767. p->state_flags = out;
  768. }
  769. // IMPORTANT NOTE(bill):
  770. // Selector Call Expressions (foo->bar(...))
  771. // must only evaluate `foo` once as it gets transformed into
  772. // `foo.bar(foo, ...)`
  773. // And if `foo` is a procedure call or something more complex, storing the value
  774. // once is a very good idea
  775. // If a stored value is found, it must be removed from the cache
  776. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  777. // cgValue *pp = map_get(&p->selector_values, expr);
  778. // if (pp != nullptr) {
  779. // cgValue res = *pp;
  780. // map_remove(&p->selector_values, expr);
  781. // return res;
  782. // }
  783. // cgAddr *pa = map_get(&p->selector_addr, expr);
  784. // if (pa != nullptr) {
  785. // cgAddr res = *pa;
  786. // map_remove(&p->selector_addr, expr);
  787. // return cg_addr_load(p, res);
  788. // }
  789. }
  790. cgValue res = cg_build_expr_internal(p, expr);
  791. if (res.kind == cgValue_Symbol) {
  792. GB_ASSERT(is_type_internally_pointer_like(res.type));
  793. res = cg_value(tb_inst_get_symbol_address(p->func, res.symbol), res.type);
  794. }
  795. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  796. // map_set(&p->selector_values, expr, res);
  797. }
  798. return res;
  799. }
  800. gb_internal cgValue cg_build_expr_internal(cgProcedure *p, Ast *expr) {
  801. cgModule *m = p->module;
  802. expr = unparen_expr(expr);
  803. TokenPos expr_pos = ast_token(expr).pos;
  804. TypeAndValue tv = type_and_value_of_expr(expr);
  805. Type *type = type_of_expr(expr);
  806. GB_ASSERT_MSG(tv.mode != Addressing_Invalid, "invalid expression '%s' (tv.mode = %d, tv.type = %s) @ %s\n Current Proc: %.*s : %s", expr_to_string(expr), tv.mode, type_to_string(tv.type), token_pos_to_string(expr_pos), LIT(p->name), type_to_string(p->type));
  807. if (tv.value.kind != ExactValue_Invalid) {
  808. // NOTE(bill): The commented out code below is just for debug purposes only
  809. // if (is_type_untyped(type)) {
  810. // gb_printf_err("%s %s : %s @ %p\n", token_pos_to_string(expr_pos), expr_to_string(expr), type_to_string(expr->tav.type), expr);
  811. // GB_PANIC("%s\n", type_to_string(tv.type));
  812. // }
  813. // NOTE(bill): Short on constant values
  814. return cg_const_value(p, type, tv.value);
  815. } else if (tv.mode == Addressing_Type) {
  816. // NOTE(bill, 2023-01-16): is this correct? I hope so at least
  817. return cg_typeid(m, tv.type);
  818. }
  819. switch (expr->kind) {
  820. case_ast_node(bl, BasicLit, expr);
  821. TokenPos pos = bl->token.pos;
  822. GB_PANIC("Non-constant basic literal %s - %.*s", token_pos_to_string(pos), LIT(token_strings[bl->token.kind]));
  823. case_end;
  824. case_ast_node(bd, BasicDirective, expr);
  825. TokenPos pos = bd->token.pos;
  826. GB_PANIC("Non-constant basic literal %s - %.*s", token_pos_to_string(pos), LIT(bd->name.string));
  827. case_end;
  828. case_ast_node(i, Ident, expr);
  829. Entity *e = entity_from_expr(expr);
  830. e = strip_entity_wrapping(e);
  831. GB_ASSERT_MSG(e != nullptr, "%s in %.*s %p", expr_to_string(expr), LIT(p->name), expr);
  832. if (e->kind == Entity_Builtin) {
  833. Token token = ast_token(expr);
  834. GB_PANIC("TODO(bill): lb_build_expr Entity_Builtin '%.*s'\n"
  835. "\t at %s", LIT(builtin_procs[e->Builtin.id].name),
  836. token_pos_to_string(token.pos));
  837. return {};
  838. } else if (e->kind == Entity_Nil) {
  839. GB_PANIC("TODO: cg_find_ident nil");
  840. // TODO(bill): is this correct?
  841. return cg_value(cast(TB_Node *)nullptr, e->type);
  842. }
  843. GB_ASSERT(e->kind != Entity_ProcGroup);
  844. cgAddr *addr = map_get(&p->variable_map, e);
  845. if (addr) {
  846. return cg_addr_load(p, *addr);
  847. }
  848. // return cg_find_ident(p, m, e, expr);
  849. GB_PANIC("TODO: cg_find_ident");
  850. return {};
  851. case_end;
  852. case_ast_node(i, Implicit, expr);
  853. return cg_addr_load(p, cg_build_addr(p, expr));
  854. case_end;
  855. case_ast_node(u, Uninit, expr);
  856. if (is_type_untyped(type)) {
  857. return cg_value(cast(TB_Node *)nullptr, t_untyped_uninit);
  858. }
  859. return cg_value(tb_inst_poison(p->func), type);
  860. case_end;
  861. case_ast_node(de, DerefExpr, expr);
  862. return cg_addr_load(p, cg_build_addr(p, expr));
  863. case_end;
  864. case_ast_node(se, SelectorExpr, expr);
  865. TypeAndValue tav = type_and_value_of_expr(expr);
  866. GB_ASSERT(tav.mode != Addressing_Invalid);
  867. return cg_addr_load(p, cg_build_addr(p, expr));
  868. case_end;
  869. case_ast_node(ise, ImplicitSelectorExpr, expr);
  870. TypeAndValue tav = type_and_value_of_expr(expr);
  871. GB_ASSERT(tav.mode == Addressing_Constant);
  872. return cg_const_value(p, type, tv.value);
  873. case_end;
  874. case_ast_node(se, SelectorCallExpr, expr);
  875. GB_ASSERT(se->modified_call);
  876. return cg_build_call_expr(p, se->call);
  877. case_end;
  878. case_ast_node(i, CallExpr, expr);
  879. return cg_build_call_expr(p, expr);
  880. case_end;
  881. case_ast_node(te, TernaryIfExpr, expr);
  882. GB_PANIC("TODO(bill): TernaryIfExpr");
  883. case_end;
  884. case_ast_node(te, TernaryWhenExpr, expr);
  885. TypeAndValue tav = type_and_value_of_expr(te->cond);
  886. GB_ASSERT(tav.mode == Addressing_Constant);
  887. GB_ASSERT(tav.value.kind == ExactValue_Bool);
  888. if (tav.value.value_bool) {
  889. return cg_build_expr(p, te->x);
  890. } else {
  891. return cg_build_expr(p, te->y);
  892. }
  893. case_end;
  894. case_ast_node(tc, TypeCast, expr);
  895. cgValue e = cg_build_expr(p, tc->expr);
  896. switch (tc->token.kind) {
  897. case Token_cast:
  898. return cg_emit_conv(p, e, type);
  899. case Token_transmute:
  900. return cg_emit_transmute(p, e, type);
  901. }
  902. GB_PANIC("Invalid AST TypeCast");
  903. case_end;
  904. case_ast_node(ac, AutoCast, expr);
  905. cgValue value = cg_build_expr(p, ac->expr);
  906. return cg_emit_conv(p, value, type);
  907. case_end;
  908. case_ast_node(se, SliceExpr, expr);
  909. if (is_type_slice(type_of_expr(se->expr))) {
  910. // NOTE(bill): Quick optimization
  911. if (se->high == nullptr &&
  912. (se->low == nullptr || cg_is_expr_constant_zero(se->low))) {
  913. return cg_build_expr(p, se->expr);
  914. }
  915. }
  916. return cg_addr_load(p, cg_build_addr(p, expr));
  917. case_end;
  918. case_ast_node(ie, IndexExpr, expr);
  919. return cg_addr_load(p, cg_build_addr(p, expr));
  920. case_end;
  921. case_ast_node(ie, MatrixIndexExpr, expr);
  922. return cg_addr_load(p, cg_build_addr(p, expr));
  923. case_end;
  924. }
  925. GB_PANIC("TODO(bill): cg_build_expr_internal %.*s", LIT(ast_strings[expr->kind]));
  926. return {};
  927. }
  928. gb_internal cgAddr cg_build_addr_internal(cgProcedure *p, Ast *expr);
  929. gb_internal cgAddr cg_build_addr(cgProcedure *p, Ast *expr) {
  930. expr = unparen_expr(expr);
  931. // IMPORTANT NOTE(bill):
  932. // Selector Call Expressions (foo->bar(...))
  933. // must only evaluate `foo` once as it gets transformed into
  934. // `foo.bar(foo, ...)`
  935. // And if `foo` is a procedure call or something more complex, storing the value
  936. // once is a very good idea
  937. // If a stored value is found, it must be removed from the cache
  938. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  939. // lbAddr *pp = map_get(&p->selector_addr, expr);
  940. // if (pp != nullptr) {
  941. // lbAddr res = *pp;
  942. // map_remove(&p->selector_addr, expr);
  943. // return res;
  944. // }
  945. }
  946. cgAddr addr = cg_build_addr_internal(p, expr);
  947. if (expr->state_flags & StateFlag_SelectorCallExpr) {
  948. // map_set(&p->selector_addr, expr, addr);
  949. }
  950. return addr;
  951. }
  952. gb_internal cgAddr cg_build_addr_internal(cgProcedure *p, Ast *expr) {
  953. switch (expr->kind) {
  954. case_ast_node(i, Implicit, expr);
  955. cgAddr v = {};
  956. switch (i->kind) {
  957. case Token_context:
  958. v = cg_find_or_generate_context_ptr(p);
  959. break;
  960. }
  961. GB_ASSERT(v.addr.node != nullptr);
  962. return v;
  963. case_end;
  964. case_ast_node(i, Ident, expr);
  965. if (is_blank_ident(expr)) {
  966. cgAddr val = {};
  967. return val;
  968. }
  969. String name = i->token.string;
  970. Entity *e = entity_of_node(expr);
  971. return cg_build_addr_from_entity(p, e, expr);
  972. case_end;
  973. case_ast_node(se, SliceExpr, expr);
  974. return cg_build_addr_slice_expr(p, expr);
  975. case_end;
  976. case_ast_node(se, SelectorExpr, expr);
  977. Ast *sel_node = unparen_expr(se->selector);
  978. if (sel_node->kind != Ast_Ident) {
  979. GB_PANIC("Unsupported selector expression");
  980. }
  981. String selector = sel_node->Ident.token.string;
  982. TypeAndValue tav = type_and_value_of_expr(se->expr);
  983. if (tav.mode == Addressing_Invalid) {
  984. // NOTE(bill): Imports
  985. Entity *imp = entity_of_node(se->expr);
  986. if (imp != nullptr) {
  987. GB_ASSERT(imp->kind == Entity_ImportName);
  988. }
  989. return cg_build_addr(p, unparen_expr(se->selector));
  990. }
  991. Type *type = base_type(tav.type);
  992. if (tav.mode == Addressing_Type) { // Addressing_Type
  993. Selection sel = lookup_field(tav.type, selector, true);
  994. if (sel.pseudo_field) {
  995. GB_ASSERT(sel.entity->kind == Entity_Procedure);
  996. return cg_addr(cg_find_value_from_entity(p->module, sel.entity));
  997. }
  998. GB_PANIC("Unreachable %.*s", LIT(selector));
  999. }
  1000. if (se->swizzle_count > 0) {
  1001. Type *array_type = base_type(type_deref(tav.type));
  1002. GB_ASSERT(array_type->kind == Type_Array);
  1003. u8 swizzle_count = se->swizzle_count;
  1004. u8 swizzle_indices_raw = se->swizzle_indices;
  1005. u8 swizzle_indices[4] = {};
  1006. for (u8 i = 0; i < swizzle_count; i++) {
  1007. u8 index = swizzle_indices_raw>>(i*2) & 3;
  1008. swizzle_indices[i] = index;
  1009. }
  1010. cgValue a = {};
  1011. if (is_type_pointer(tav.type)) {
  1012. a = cg_build_expr(p, se->expr);
  1013. } else {
  1014. cgAddr addr = cg_build_addr(p, se->expr);
  1015. a = cg_addr_get_ptr(p, addr);
  1016. }
  1017. GB_ASSERT(is_type_array(expr->tav.type));
  1018. GB_PANIC("TODO(bill): cg_addr_swizzle");
  1019. // return cg_addr_swizzle(a, expr->tav.type, swizzle_count, swizzle_indices);
  1020. }
  1021. Selection sel = lookup_field(type, selector, false);
  1022. GB_ASSERT(sel.entity != nullptr);
  1023. if (sel.pseudo_field) {
  1024. GB_ASSERT(sel.entity->kind == Entity_Procedure);
  1025. Entity *e = entity_of_node(sel_node);
  1026. return cg_addr(cg_find_value_from_entity(p->module, e));
  1027. }
  1028. {
  1029. cgAddr addr = cg_build_addr(p, se->expr);
  1030. if (addr.kind == cgAddr_Map) {
  1031. cgValue v = cg_addr_load(p, addr);
  1032. cgValue a = cg_address_from_load_or_generate_local(p, v);
  1033. a = cg_emit_deep_field_gep(p, a, sel);
  1034. return cg_addr(a);
  1035. } else if (addr.kind == cgAddr_Context) {
  1036. GB_ASSERT(sel.index.count > 0);
  1037. if (addr.ctx.sel.index.count >= 0) {
  1038. sel = selection_combine(addr.ctx.sel, sel);
  1039. }
  1040. addr.ctx.sel = sel;
  1041. addr.kind = cgAddr_Context;
  1042. return addr;
  1043. } else if (addr.kind == cgAddr_SoaVariable) {
  1044. cgValue index = addr.soa.index;
  1045. i64 first_index = sel.index[0];
  1046. Selection sub_sel = sel;
  1047. sub_sel.index.data += 1;
  1048. sub_sel.index.count -= 1;
  1049. cgValue arr = cg_emit_struct_ep(p, addr.addr, first_index);
  1050. Type *t = base_type(type_deref(addr.addr.type));
  1051. GB_ASSERT(is_type_soa_struct(t));
  1052. // TODO(bill): bounds checking for soa variable
  1053. // if (addr.soa.index_expr != nullptr && (!cg_is_const(addr.soa.index) || t->Struct.soa_kind != StructSoa_Fixed)) {
  1054. // cgValue len = cg_soa_struct_len(p, addr.addr);
  1055. // cg_emit_bounds_check(p, ast_token(addr.soa.index_expr), addr.soa.index, len);
  1056. // }
  1057. cgValue item = {};
  1058. if (t->Struct.soa_kind == StructSoa_Fixed) {
  1059. item = cg_emit_array_ep(p, arr, index);
  1060. } else {
  1061. item = cg_emit_ptr_offset(p, cg_emit_load(p, arr), index);
  1062. }
  1063. if (sub_sel.index.count > 0) {
  1064. item = cg_emit_deep_field_gep(p, item, sub_sel);
  1065. }
  1066. return cg_addr(item);
  1067. } else if (addr.kind == cgAddr_Swizzle) {
  1068. GB_ASSERT(sel.index.count > 0);
  1069. // NOTE(bill): just patch the index in place
  1070. sel.index[0] = addr.swizzle.indices[sel.index[0]];
  1071. } else if (addr.kind == cgAddr_SwizzleLarge) {
  1072. GB_ASSERT(sel.index.count > 0);
  1073. // NOTE(bill): just patch the index in place
  1074. sel.index[0] = addr.swizzle.indices[sel.index[0]];
  1075. }
  1076. cgValue a = cg_addr_get_ptr(p, addr);
  1077. a = cg_emit_deep_field_gep(p, a, sel);
  1078. return cg_addr(a);
  1079. }
  1080. case_end;
  1081. }
  1082. TokenPos token_pos = ast_token(expr).pos;
  1083. GB_PANIC("Unexpected address expression\n"
  1084. "\tAst: %.*s @ "
  1085. "%s\n",
  1086. LIT(ast_strings[expr->kind]),
  1087. token_pos_to_string(token_pos));
  1088. return {};
  1089. }