llvm_backend_const.cpp 65 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017
  1. gb_internal bool lb_is_const(lbValue value) {
  2. LLVMValueRef v = value.value;
  3. if (is_type_untyped_nil(value.type)) {
  4. // TODO(bill): Is this correct behaviour?
  5. return true;
  6. }
  7. if (LLVMIsConstant(v)) {
  8. return true;
  9. }
  10. return false;
  11. }
  12. gb_internal bool lb_is_const_or_global(lbValue value) {
  13. if (lb_is_const(value)) {
  14. return true;
  15. }
  16. return false;
  17. }
  18. gb_internal bool lb_is_elem_const(Ast *elem, Type *elem_type) {
  19. if (!elem_type_can_be_constant(elem_type)) {
  20. return false;
  21. }
  22. if (elem->kind == Ast_FieldValue) {
  23. elem = elem->FieldValue.value;
  24. }
  25. TypeAndValue tav = type_and_value_of_expr(elem);
  26. GB_ASSERT_MSG(tav.mode != Addressing_Invalid, "%s %s", expr_to_string(elem), type_to_string(tav.type));
  27. return tav.value.kind != ExactValue_Invalid;
  28. }
  29. gb_internal bool lb_is_const_nil(lbValue value) {
  30. LLVMValueRef v = value.value;
  31. if (v != nullptr && LLVMIsConstant(v)) {
  32. if (LLVMIsAConstantAggregateZero(v)) {
  33. return true;
  34. } else if (LLVMIsAConstantPointerNull(v)) {
  35. return true;
  36. }
  37. }
  38. return false;
  39. }
  40. gb_internal bool lb_is_expr_constant_zero(Ast *expr) {
  41. GB_ASSERT(expr != nullptr);
  42. auto v = exact_value_to_integer(expr->tav.value);
  43. if (v.kind == ExactValue_Integer) {
  44. return big_int_cmp_zero(&v.value_integer) == 0;
  45. }
  46. return false;
  47. }
  48. gb_internal String lb_get_const_string(lbModule *m, lbValue value) {
  49. GB_ASSERT(lb_is_const(value));
  50. GB_ASSERT(LLVMIsConstant(value.value));
  51. Type *t = base_type(value.type);
  52. GB_ASSERT(are_types_identical(t, t_string));
  53. unsigned ptr_indices[1] = {0};
  54. unsigned len_indices[1] = {1};
  55. LLVMValueRef underlying_ptr = llvm_const_extract_value(m, value.value, ptr_indices, gb_count_of(ptr_indices));
  56. LLVMValueRef underlying_len = llvm_const_extract_value(m, value.value, len_indices, gb_count_of(len_indices));
  57. GB_ASSERT(LLVMGetConstOpcode(underlying_ptr) == LLVMGetElementPtr);
  58. underlying_ptr = LLVMGetOperand(underlying_ptr, 0);
  59. GB_ASSERT(LLVMIsAGlobalVariable(underlying_ptr));
  60. underlying_ptr = LLVMGetInitializer(underlying_ptr);
  61. size_t length = 0;
  62. char const *text = LLVMGetAsString(underlying_ptr, &length);
  63. isize real_length = cast(isize)LLVMConstIntGetSExtValue(underlying_len);
  64. return make_string(cast(u8 const *)text, real_length);
  65. }
  66. gb_internal LLVMValueRef llvm_const_cast(LLVMValueRef val, LLVMTypeRef dst, bool *failure_) {
  67. LLVMTypeRef src = LLVMTypeOf(val);
  68. if (src == dst) {
  69. return val;
  70. }
  71. if (LLVMIsNull(val)) {
  72. return LLVMConstNull(dst);
  73. }
  74. GB_ASSERT_MSG(lb_sizeof(dst) == lb_sizeof(src), "%s vs %s", LLVMPrintTypeToString(dst), LLVMPrintTypeToString(src));
  75. LLVMTypeKind kind = LLVMGetTypeKind(dst);
  76. switch (kind) {
  77. case LLVMPointerTypeKind:
  78. return LLVMConstPointerCast(val, dst);
  79. case LLVMStructTypeKind:
  80. return val;
  81. }
  82. if (failure_) *failure_ = true;
  83. return val;
  84. }
  85. gb_internal lbValue lb_const_ptr_cast(lbModule *m, lbValue value, Type *t) {
  86. GB_ASSERT(is_type_internally_pointer_like(value.type));
  87. GB_ASSERT(is_type_internally_pointer_like(t));
  88. GB_ASSERT(lb_is_const(value));
  89. lbValue res = {};
  90. res.value = LLVMConstPointerCast(value.value, lb_type(m, t));
  91. res.type = t;
  92. return res;
  93. }
  94. gb_internal LLVMValueRef llvm_const_string_internal(lbModule *m, Type *t, LLVMValueRef data, LLVMValueRef len) {
  95. GB_ASSERT(!is_type_string16(t));
  96. if (build_context.metrics.ptr_size < build_context.metrics.int_size) {
  97. LLVMValueRef values[3] = {
  98. data,
  99. LLVMConstNull(lb_type(m, t_i32)),
  100. len,
  101. };
  102. return llvm_const_named_struct_internal(m, lb_type(m, t), values, 3);
  103. } else {
  104. LLVMValueRef values[2] = {
  105. data,
  106. len,
  107. };
  108. return llvm_const_named_struct_internal(m, lb_type(m, t), values, 2);
  109. }
  110. }
  111. gb_internal LLVMValueRef llvm_const_string16_internal(lbModule *m, Type *t, LLVMValueRef data, LLVMValueRef len) {
  112. GB_ASSERT(is_type_string16(t));
  113. if (build_context.metrics.ptr_size < build_context.metrics.int_size) {
  114. LLVMValueRef values[3] = {
  115. data,
  116. LLVMConstNull(lb_type(m, t_i32)),
  117. len,
  118. };
  119. return llvm_const_named_struct_internal(m, lb_type(m, t), values, 3);
  120. } else {
  121. LLVMValueRef values[2] = {
  122. data,
  123. len,
  124. };
  125. return llvm_const_named_struct_internal(m, lb_type(m, t), values, 2);
  126. }
  127. }
  128. gb_internal LLVMValueRef llvm_const_named_struct(lbModule *m, Type *t, LLVMValueRef *values, isize value_count_) {
  129. LLVMTypeRef struct_type = lb_type(m, t);
  130. GB_ASSERT(LLVMGetTypeKind(struct_type) == LLVMStructTypeKind);
  131. unsigned value_count = cast(unsigned)value_count_;
  132. unsigned elem_count = LLVMCountStructElementTypes(struct_type);
  133. if (elem_count == value_count) {
  134. return llvm_const_named_struct_internal(m, struct_type, values, value_count_);
  135. }
  136. Type *bt = base_type(t);
  137. GB_ASSERT(bt->kind == Type_Struct || bt->kind == Type_Union);
  138. GB_ASSERT(value_count_ == bt->Struct.fields.count);
  139. auto field_remapping = lb_get_struct_remapping(m, t);
  140. unsigned values_with_padding_count = elem_count;
  141. LLVMValueRef *values_with_padding = gb_alloc_array(permanent_allocator(), LLVMValueRef, values_with_padding_count);
  142. for (unsigned i = 0; i < value_count; i++) {
  143. values_with_padding[field_remapping[i]] = values[i];
  144. }
  145. for (unsigned i = 0; i < values_with_padding_count; i++) {
  146. if (values_with_padding[i] == nullptr) {
  147. values_with_padding[i] = LLVMConstNull(LLVMStructGetTypeAtIndex(struct_type, i));
  148. }
  149. }
  150. return llvm_const_named_struct_internal(m, struct_type, values_with_padding, values_with_padding_count);
  151. }
  152. gb_internal LLVMValueRef llvm_const_named_struct_internal(lbModule *m, LLVMTypeRef t, LLVMValueRef *values, isize value_count_) {
  153. unsigned value_count = cast(unsigned)value_count_;
  154. unsigned elem_count = LLVMCountStructElementTypes(t);
  155. GB_ASSERT_MSG(value_count == elem_count, "%s %u %u", LLVMPrintTypeToString(t), value_count, elem_count);
  156. bool failure = false;
  157. for (unsigned i = 0; i < elem_count; i++) {
  158. LLVMTypeRef elem_type = LLVMStructGetTypeAtIndex(t, i);
  159. values[i] = llvm_const_cast(values[i], elem_type, &failure);
  160. }
  161. if (failure) {
  162. return LLVMConstStructInContext(m->ctx, values, value_count, true);
  163. }
  164. return LLVMConstNamedStruct(t, values, value_count);
  165. }
  166. gb_internal LLVMValueRef llvm_const_array(lbModule *m, LLVMTypeRef elem_type, LLVMValueRef *values, isize value_count_) {
  167. unsigned value_count = cast(unsigned)value_count_;
  168. bool failure = false;
  169. for (unsigned i = 0; i < value_count; i++) {
  170. values[i] = llvm_const_cast(values[i], elem_type, &failure);
  171. }
  172. if (failure) {
  173. return LLVMConstStructInContext(m->ctx, values, value_count, false);
  174. }
  175. for (unsigned i = 0; i < value_count; i++) {
  176. if (elem_type != LLVMTypeOf(values[i])) {
  177. return LLVMConstStructInContext(m->ctx, values, value_count, false);
  178. }
  179. }
  180. return LLVMConstArray(elem_type, values, value_count);
  181. }
  182. gb_internal LLVMValueRef llvm_const_slice_internal(lbModule *m, LLVMValueRef data, LLVMValueRef len) {
  183. if (build_context.metrics.ptr_size < build_context.metrics.int_size) {
  184. GB_ASSERT(build_context.metrics.ptr_size == 4);
  185. GB_ASSERT(build_context.metrics.int_size == 8);
  186. LLVMValueRef vals[3] = {
  187. data,
  188. LLVMConstNull(lb_type(m, t_u32)),
  189. len,
  190. };
  191. return LLVMConstStructInContext(m->ctx, vals, gb_count_of(vals), false);
  192. } else {
  193. LLVMValueRef vals[2] = {
  194. data,
  195. len,
  196. };
  197. return LLVMConstStructInContext(m->ctx, vals, gb_count_of(vals), false);
  198. }
  199. }
  200. gb_internal LLVMValueRef llvm_const_slice(lbModule *m, lbValue data, lbValue len) {
  201. GB_ASSERT(is_type_pointer(data.type) || is_type_multi_pointer(data.type));
  202. GB_ASSERT(are_types_identical(len.type, t_int));
  203. return llvm_const_slice_internal(m, data.value, len.value);
  204. }
  205. gb_internal lbValue lb_const_nil(lbModule *m, Type *type) {
  206. LLVMValueRef v = LLVMConstNull(lb_type(m, type));
  207. return lbValue{v, type};
  208. }
  209. gb_internal lbValue lb_const_undef(lbModule *m, Type *type) {
  210. LLVMValueRef v = LLVMGetUndef(lb_type(m, type));
  211. return lbValue{v, type};
  212. }
  213. gb_internal lbValue lb_const_int(lbModule *m, Type *type, u64 value) {
  214. lbValue res = {};
  215. res.value = LLVMConstInt(lb_type(m, type), cast(unsigned long long)value, !is_type_unsigned(type));
  216. res.type = type;
  217. return res;
  218. }
  219. gb_internal lbValue lb_const_string(lbModule *m, String const &value) {
  220. return lb_const_value(m, t_string, exact_value_string(value));
  221. }
  222. gb_internal lbValue lb_const_string(lbModule *m, String16 const &value) {
  223. return lb_const_value(m, t_string16, exact_value_string16(value));
  224. }
  225. gb_internal lbValue lb_const_bool(lbModule *m, Type *type, bool value) {
  226. lbValue res = {};
  227. res.value = LLVMConstInt(lb_type(m, type), value, false);
  228. res.type = type;
  229. return res;
  230. }
  231. gb_internal LLVMValueRef lb_const_f16(lbModule *m, f32 f, Type *type=t_f16) {
  232. GB_ASSERT(type_size_of(type) == 2);
  233. u16 u = f32_to_f16(f);
  234. if (is_type_different_to_arch_endianness(type)) {
  235. u = gb_endian_swap16(u);
  236. }
  237. LLVMValueRef i = LLVMConstInt(LLVMInt16TypeInContext(m->ctx), u, false);
  238. return LLVMConstBitCast(i, lb_type(m, type));
  239. }
  240. gb_internal LLVMValueRef lb_const_f32(lbModule *m, f32 f, Type *type=t_f32) {
  241. GB_ASSERT(type_size_of(type) == 4);
  242. u32 u = bit_cast<u32>(f);
  243. if (is_type_different_to_arch_endianness(type)) {
  244. u = gb_endian_swap32(u);
  245. }
  246. LLVMValueRef i = LLVMConstInt(LLVMInt32TypeInContext(m->ctx), u, false);
  247. return LLVMConstBitCast(i, lb_type(m, type));
  248. }
  249. gb_internal bool lb_is_expr_untyped_const(Ast *expr) {
  250. auto const &tv = type_and_value_of_expr(expr);
  251. if (is_type_untyped(tv.type)) {
  252. return tv.value.kind != ExactValue_Invalid;
  253. }
  254. return false;
  255. }
  256. gb_internal lbValue lb_expr_untyped_const_to_typed(lbModule *m, Ast *expr, Type *t) {
  257. GB_ASSERT(is_type_typed(t));
  258. auto const &tv = type_and_value_of_expr(expr);
  259. return lb_const_value(m, t, tv.value);
  260. }
  261. gb_internal lbValue lb_const_source_code_location_const(lbModule *m, String const &procedure_, TokenPos const &pos) {
  262. String file = get_file_path_string(pos.file_id);
  263. String procedure = procedure_;
  264. i32 line = pos.line;
  265. i32 column = pos.column;
  266. switch (build_context.source_code_location_info) {
  267. case SourceCodeLocationInfo_Normal:
  268. break;
  269. case SourceCodeLocationInfo_Obfuscated:
  270. file = obfuscate_string(file, "F");
  271. procedure = obfuscate_string(procedure, "P");
  272. line = obfuscate_i32(line);
  273. column = obfuscate_i32(column);
  274. break;
  275. case SourceCodeLocationInfo_Filename:
  276. file = last_path_element(file);
  277. break;
  278. case SourceCodeLocationInfo_None:
  279. file = str_lit("");
  280. procedure = str_lit("");
  281. line = 0;
  282. column = 0;
  283. break;
  284. }
  285. LLVMValueRef fields[4] = {};
  286. fields[0]/*file*/ = lb_find_or_add_entity_string(m, file, false).value;
  287. fields[1]/*line*/ = lb_const_int(m, t_i32, line).value;
  288. fields[2]/*column*/ = lb_const_int(m, t_i32, column).value;
  289. fields[3]/*procedure*/ = lb_find_or_add_entity_string(m, procedure, false).value;
  290. lbValue res = {};
  291. res.value = llvm_const_named_struct(m, t_source_code_location, fields, gb_count_of(fields));
  292. res.type = t_source_code_location;
  293. return res;
  294. }
  295. gb_internal lbValue lb_emit_source_code_location_const(lbProcedure *p, String const &procedure, TokenPos const &pos) {
  296. lbModule *m = p->module;
  297. return lb_const_source_code_location_const(m, procedure, pos);
  298. }
  299. gb_internal lbValue lb_emit_source_code_location_const(lbProcedure *p, Ast *node) {
  300. String proc_name = {};
  301. if (p->entity) {
  302. proc_name = p->entity->token.string;
  303. }
  304. TokenPos pos = {};
  305. if (node) {
  306. pos = ast_token(node).pos;
  307. }
  308. return lb_emit_source_code_location_const(p, proc_name, pos);
  309. }
  310. gb_internal String lb_source_code_location_gen_name(String const &procedure, TokenPos const &pos) {
  311. gbString s = gb_string_make(permanent_allocator(), "scl$[");
  312. s = gb_string_append_length(s, procedure.text, procedure.len);
  313. if (pos.offset != 0) {
  314. s = gb_string_append_fmt(s, "%d", pos.offset);
  315. } else {
  316. s = gb_string_append_fmt(s, "%d_%d", pos.line, pos.column);
  317. }
  318. s = gb_string_appendc(s, "]");
  319. return make_string(cast(u8 const *)s, gb_string_length(s));
  320. }
  321. gb_internal String lb_source_code_location_gen_name(lbProcedure *p, Ast *node) {
  322. String proc_name = {};
  323. if (p->entity) {
  324. proc_name = p->entity->token.string;
  325. }
  326. TokenPos pos = {};
  327. if (node) {
  328. pos = ast_token(node).pos;
  329. }
  330. return lb_source_code_location_gen_name(proc_name, pos);
  331. }
  332. gb_internal lbValue lb_emit_source_code_location_as_global_ptr(lbProcedure *p, String const &procedure, TokenPos const &pos) {
  333. lbValue loc = lb_emit_source_code_location_const(p, procedure, pos);
  334. lbAddr addr = lb_add_global_generated_with_name(p->module, loc.type, loc, lb_source_code_location_gen_name(procedure, pos));
  335. lb_make_global_private_const(addr);
  336. return addr.addr;
  337. }
  338. gb_internal lbValue lb_const_source_code_location_as_global_ptr(lbModule *m, String const &procedure, TokenPos const &pos) {
  339. lbValue loc = lb_const_source_code_location_const(m, procedure, pos);
  340. lbAddr addr = lb_add_global_generated_with_name(m, loc.type, loc, lb_source_code_location_gen_name(procedure, pos));
  341. lb_make_global_private_const(addr);
  342. return addr.addr;
  343. }
  344. gb_internal lbValue lb_emit_source_code_location_as_global_ptr(lbProcedure *p, Ast *node) {
  345. lbValue loc = lb_emit_source_code_location_const(p, node);
  346. lbAddr addr = lb_add_global_generated_with_name(p->module, loc.type, loc, lb_source_code_location_gen_name(p, node));
  347. lb_make_global_private_const(addr);
  348. return addr.addr;
  349. }
  350. gb_internal lbValue lb_emit_source_code_location_as_global(lbProcedure *p, String const &procedure, TokenPos const &pos) {
  351. return lb_emit_load(p, lb_emit_source_code_location_as_global_ptr(p, procedure, pos));
  352. }
  353. gb_internal lbValue lb_emit_source_code_location_as_global(lbProcedure *p, Ast *node) {
  354. return lb_emit_load(p, lb_emit_source_code_location_as_global_ptr(p, node));
  355. }
  356. gb_internal LLVMValueRef lb_build_constant_array_values(lbModule *m, Type *type, Type *elem_type, isize count, LLVMValueRef *values, lbConstContext cc) {
  357. if (cc.allow_local) {
  358. cc.is_rodata = false;
  359. }
  360. bool is_local = cc.allow_local && m->curr_procedure != nullptr;
  361. bool is_const = true;
  362. if (is_local) {
  363. for (isize i = 0; i < count; i++) {
  364. GB_ASSERT(values[i] != nullptr);
  365. if (!LLVMIsConstant(values[i])) {
  366. is_const = false;
  367. break;
  368. }
  369. }
  370. }
  371. if (!is_const) {
  372. LLVMTypeRef llvm_elem_type = lb_type(m, elem_type);
  373. lbProcedure *p = m->curr_procedure;
  374. GB_ASSERT(p != nullptr);
  375. lbAddr v = lb_add_local_generated(p, type, false);
  376. lbValue ptr = lb_addr_get_ptr(p, v);
  377. for (isize i = 0; i < count; i++) {
  378. lbValue elem = lb_emit_array_epi(p, ptr, i);
  379. if (is_type_proc(elem_type)) {
  380. values[i] = LLVMConstPointerCast(values[i], llvm_elem_type);
  381. }
  382. LLVMBuildStore(p->builder, values[i], elem.value);
  383. }
  384. return lb_addr_load(p, v).value;
  385. }
  386. return llvm_const_array(m, lb_type(m, elem_type), values, cast(unsigned int)count);
  387. }
  388. gb_internal LLVMValueRef lb_big_int_to_llvm(lbModule *m, Type *original_type, BigInt const *a) {
  389. if (big_int_is_zero(a)) {
  390. return LLVMConstNull(lb_type(m, original_type));
  391. }
  392. size_t sz = cast(size_t)type_size_of(original_type);
  393. u64 rop64[4] = {}; // 2 u64 is the maximum we will ever need, so doubling it will be fine :P
  394. u8 *rop = cast(u8 *)rop64;
  395. size_t max_count = 0;
  396. size_t written = 0;
  397. size_t size = 1;
  398. size_t nails = 0;
  399. mp_endian endian = MP_LITTLE_ENDIAN;
  400. max_count = mp_pack_count(a, nails, size);
  401. if (sz < max_count) {
  402. debug_print_big_int(a);
  403. gb_printf_err("%s -> %tu\n", type_to_string(original_type), sz);;
  404. }
  405. GB_ASSERT_MSG(sz >= max_count, "max_count: %tu, sz: %tu, written: %tu, type %s", max_count, sz, written, type_to_string(original_type));
  406. GB_ASSERT(gb_size_of(rop64) >= sz);
  407. mp_err err = mp_pack(rop, sz, &written,
  408. MP_LSB_FIRST,
  409. size, endian, nails,
  410. a);
  411. GB_ASSERT(err == MP_OKAY);
  412. if (!is_type_endian_little(original_type)) {
  413. for (size_t i = 0; i < sz/2; i++) {
  414. u8 tmp = rop[i];
  415. rop[i] = rop[sz-1-i];
  416. rop[sz-1-i] = tmp;
  417. }
  418. }
  419. GB_ASSERT(!is_type_array(original_type));
  420. LLVMValueRef value = LLVMConstIntOfArbitraryPrecision(lb_type(m, original_type), cast(unsigned)((sz+7)/8), cast(u64 *)rop);
  421. if (big_int_is_neg(a)) {
  422. value = LLVMConstNeg(value);
  423. }
  424. return value;
  425. }
  426. gb_internal bool lb_is_nested_possibly_constant(Type *ft, Selection const &sel, Ast *elem) {
  427. GB_ASSERT(!sel.indirect);
  428. for (i32 index : sel.index) {
  429. Type *bt = base_type(ft);
  430. switch (bt->kind) {
  431. case Type_Struct:
  432. if (bt->Struct.is_raw_union) {
  433. return false;
  434. }
  435. ft = bt->Struct.fields[index]->type;
  436. break;
  437. case Type_Array:
  438. ft = bt->Array.elem;
  439. break;
  440. default:
  441. return false;
  442. }
  443. }
  444. if (is_type_raw_union(ft) || is_type_typeid(ft)) {
  445. return false;
  446. }
  447. return lb_is_elem_const(elem, ft);
  448. }
  449. gb_internal Slice<LLVMValueRef> lb_construct_const_union_flatten_values(lbModule *m, LLVMValueRef variant_value, Type *variant_type, LLVMTypeRef elem) {
  450. LLVMTypeRef llvm_variant_type = lb_type(m, variant_type);
  451. LLVMTypeKind variant_kind = LLVMGetTypeKind(llvm_variant_type);
  452. LLVMTypeKind elem_kind = LLVMGetTypeKind(elem);
  453. if (is_type_struct(variant_type)) {
  454. Type *st = base_type(variant_type);
  455. GB_ASSERT(st->kind == Type_Struct);
  456. if (st->Struct.fields.count == 1) {
  457. LLVMValueRef f = llvm_const_extract_value(m, variant_value, 0);
  458. return lb_construct_const_union_flatten_values(m, f, st->Struct.fields[0]->type, elem);
  459. }
  460. } else if (is_llvm_type_slice_like(llvm_variant_type)) {
  461. if (lb_sizeof(elem) == build_context.ptr_size) {
  462. LLVMValueRef *elems = temporary_alloc_array<LLVMValueRef>(2);
  463. elems[0] = llvm_const_extract_value(m, variant_value, 0);
  464. elems[0] = LLVMConstPtrToInt(elems[0], elem);
  465. elems[1] = llvm_const_extract_value(m, variant_value, 1);
  466. return {elems, 2};
  467. }
  468. } else if (is_type_array_like(variant_type)) {
  469. Type *array_elem = base_array_type(variant_type);
  470. isize array_count = get_array_type_count(variant_type);
  471. Slice<LLVMValueRef> array = temporary_slice_make<LLVMValueRef>(array_count);
  472. for (isize i = 0; i < array_count; i++) {
  473. LLVMValueRef v = llvm_const_extract_value(m, variant_value, 0);
  474. auto res = lb_construct_const_union_flatten_values(m, v, array_elem, elem);
  475. if (res.count != 1) {
  476. return {};
  477. }
  478. array[i] = res[0];
  479. }
  480. return array;
  481. } else if (variant_kind == LLVMIntegerTypeKind) {
  482. if (elem == llvm_variant_type) {
  483. LLVMValueRef *elems = temporary_alloc_array<LLVMValueRef>(1);
  484. elems[0] = variant_value;
  485. return {elems, 1};
  486. } else if (!is_type_different_to_arch_endianness(variant_type)) {
  487. i64 elem_size = lb_sizeof(elem);
  488. i64 variant_size = lb_sizeof(llvm_variant_type);
  489. if (elem_size > variant_size) {
  490. u64 val = LLVMConstIntGetZExtValue(variant_value);
  491. LLVMValueRef *elems = temporary_alloc_array<LLVMValueRef>(1);
  492. elems[0] = LLVMConstInt(elem, val, false);
  493. return {elems, 1};
  494. }
  495. }
  496. } else if (!is_type_different_to_arch_endianness(variant_type) &&
  497. elem_kind == LLVMIntegerTypeKind) {
  498. switch (variant_kind) {
  499. case LLVMHalfTypeKind:
  500. {
  501. LLVMBool loses = false;
  502. f64 res = LLVMConstRealGetDouble(variant_value, &loses);
  503. u16 val = f32_to_f16(cast(f32)res);
  504. LLVMValueRef *elems = temporary_alloc_array<LLVMValueRef>(1);
  505. elems[0] = LLVMConstInt(elem, val, false);
  506. return {elems, 1};
  507. }
  508. break;
  509. case LLVMFloatTypeKind:
  510. {
  511. LLVMBool loses = false;
  512. f64 res = LLVMConstRealGetDouble(variant_value, &loses);
  513. union { f32 f; u32 i; } val = {};
  514. val.f = cast(f32)res;
  515. LLVMValueRef *elems = temporary_alloc_array<LLVMValueRef>(1);
  516. elems[0] = LLVMConstInt(elem, val.i, false);
  517. return {elems, 1};
  518. }
  519. break;
  520. case LLVMDoubleTypeKind:
  521. {
  522. LLVMBool loses = false;
  523. f64 res = LLVMConstRealGetDouble(variant_value, &loses);
  524. union { f64 f; u64 i; } val = {};
  525. val.f = res;
  526. LLVMValueRef *elems = temporary_alloc_array<LLVMValueRef>(1);
  527. elems[0] = LLVMConstInt(elem, val.i, false);
  528. return {elems, 1};
  529. }
  530. break;
  531. }
  532. }
  533. return {};
  534. }
  535. gb_internal LLVMValueRef lb_construct_const_union(lbModule *m, LLVMValueRef variant_value, Type *variant_type, Type *union_type) {
  536. #if 1
  537. return nullptr;
  538. #else
  539. Type *bt = base_type(union_type);
  540. GB_ASSERT(bt->kind == Type_Union);
  541. GB_ASSERT(lb_type(m, variant_type) == LLVMTypeOf(variant_value));
  542. LLVMTypeRef llvm_type = lb_type(m, union_type);
  543. if (LLVMIsNull(variant_value)) {
  544. return LLVMConstNull(llvm_type);
  545. }
  546. if (bt->Union.variants.count == 0) {
  547. GB_ASSERT(LLVMIsNull(variant_value));
  548. return variant_value;
  549. }
  550. i64 block_size = bt->Union.variant_block_size;
  551. i64 variant_size = type_size_of(variant_type);
  552. LLVMTypeRef llvm_variant_type = lb_type(m, variant_type);
  553. if (is_type_union_maybe_pointer(bt)) {
  554. GB_ASSERT(lb_sizeof(LLVMTypeOf(variant_value)) == lb_sizeof(llvm_type));
  555. return LLVMConstBitCast(variant_value, llvm_type);
  556. }
  557. if (bt->Union.variants.count == 1) {
  558. unsigned long long the_tag = cast(unsigned long long)union_variant_index(union_type, variant_type);
  559. LLVMTypeRef tag_type = lb_type(m, union_tag_type(bt));
  560. LLVMValueRef values[3] = {};
  561. unsigned i = 0;
  562. values[i++] = variant_value;
  563. values[i++] = LLVMConstInt(tag_type, the_tag, false);
  564. i64 used_size = block_size + lb_sizeof(tag_type);
  565. i64 padding = type_size_of(union_type) - used_size;
  566. i64 align = type_align_of(union_type);
  567. if (padding > 0) {
  568. LLVMTypeRef padding_type = lb_type_padding_filler(m, padding, align);
  569. values[i++] = LLVMConstNull(padding_type);
  570. }
  571. return LLVMConstNamedStruct(llvm_type, values, i);
  572. } else if (true) {
  573. // TODO(bill): ignore this for the time being
  574. return nullptr;
  575. }
  576. LLVMTypeRef block_type = LLVMStructGetTypeAtIndex(llvm_type, 0);
  577. LLVMTypeRef tag_type = lb_type(m, union_tag_type(bt));
  578. i64 used_size = block_size + lb_sizeof(tag_type);
  579. i64 padding = type_size_of(union_type) - used_size;
  580. i64 align = type_align_of(union_type);
  581. LLVMTypeRef padding_type = nullptr;
  582. if (padding > 0) {
  583. padding_type = lb_type_padding_filler(m, padding, align);
  584. }
  585. unsigned i = 0;
  586. LLVMValueRef values[3] = {};
  587. LLVMValueRef block_value = variant_value;
  588. if (block_size == 0) {
  589. block_value = LLVMConstNull(block_type);
  590. } else if (lb_sizeof(llvm_variant_type) == 0) {
  591. block_value = LLVMConstNull(block_type);
  592. } else if (block_type != llvm_variant_type) {
  593. LLVMTypeKind block_kind = LLVMGetTypeKind(block_type);
  594. LLVMTypeKind variant_kind = LLVMGetTypeKind(llvm_variant_type);
  595. if (block_kind == LLVMArrayTypeKind) {
  596. LLVMTypeRef elem = LLVMGetElementType(block_type);
  597. unsigned count = LLVMGetArrayLength(block_type);
  598. Slice<LLVMValueRef> partial_elems = lb_construct_const_union_flatten_values(m, variant_value, variant_type, elem);
  599. if (partial_elems.count == count) {
  600. block_value = LLVMConstArray(elem, partial_elems.data, count);
  601. goto assign_value_wrapped;
  602. }
  603. Slice<LLVMValueRef> full_elems = temporary_slice_make<LLVMValueRef>(count);
  604. slice_copy(&full_elems, partial_elems);
  605. for (isize j = partial_elems.count; j < count; j++) {
  606. full_elems[j] = LLVMConstNull(elem);
  607. }
  608. block_value = LLVMConstArray(elem, full_elems.data, count);
  609. goto assign_value_wrapped;
  610. } else if (block_size != variant_size) {
  611. if (block_kind == LLVMIntegerTypeKind && !is_type_different_to_arch_endianness(variant_type)) {
  612. Slice<LLVMValueRef> partial_elems = lb_construct_const_union_flatten_values(m, variant_value, variant_type, block_type);
  613. if (partial_elems.count == 1) {
  614. block_value = partial_elems[0];
  615. goto assign_value_wrapped;
  616. }
  617. }
  618. return nullptr;
  619. }
  620. if (block_kind == LLVMIntegerTypeKind) {
  621. GB_ASSERT(block_size == variant_size);
  622. switch (variant_kind) {
  623. case LLVMHalfTypeKind:
  624. case LLVMFloatTypeKind:
  625. case LLVMDoubleTypeKind:
  626. block_value = LLVMConstBitCast(block_value, block_type);
  627. goto assign_value_wrapped;
  628. case LLVMPointerTypeKind:
  629. block_value = LLVMConstPtrToInt(block_value, block_type);
  630. goto assign_value_wrapped;
  631. }
  632. }
  633. return nullptr;
  634. } else {
  635. // TODO(bill): ignore this for the time being
  636. return nullptr;
  637. }
  638. assign_value_wrapped:;
  639. values[i++] = block_value;
  640. unsigned long long the_tag = cast(unsigned long long)union_variant_index(union_type, variant_type);
  641. values[i++] = LLVMConstInt(tag_type, the_tag, false);
  642. if (padding > 0) {
  643. values[i++] = LLVMConstNull(padding_type);
  644. }
  645. return LLVMConstNamedStruct(llvm_type, values, i);
  646. #endif
  647. }
  648. gb_internal bool lb_try_construct_const_union(lbModule *m, lbValue *value, Type *variant_type, Type *union_type) {
  649. if (lb_is_const(*value)) {
  650. LLVMValueRef res = lb_construct_const_union(m, value->value, variant_type, union_type);
  651. if (res != nullptr) {
  652. *value = {res, union_type};
  653. return true;
  654. }
  655. // gb_printf_err("%s -> %s\n", LLVMPrintValueToString(value->value), LLVMPrintTypeToString(lb_type(m, union_type)));
  656. }
  657. return false;
  658. }
  659. gb_internal lbValue lb_const_value(lbModule *m, Type *type, ExactValue value, lbConstContext cc, Type *value_type) {
  660. if (cc.allow_local) {
  661. cc.is_rodata = false;
  662. }
  663. LLVMContextRef ctx = m->ctx;
  664. type = default_type(type);
  665. Type *original_type = type;
  666. lbValue res = {};
  667. res.type = original_type;
  668. type = core_type(type);
  669. value = convert_exact_value_for_type(value, type);
  670. if (value.kind == ExactValue_Typeid) {
  671. return lb_typeid(m, value.value_typeid);
  672. }
  673. if (value.kind == ExactValue_Invalid) {
  674. return lb_const_nil(m, original_type);
  675. }
  676. if (value.kind == ExactValue_Procedure) {
  677. lbValue res = {};
  678. Ast *expr = unparen_expr(value.value_procedure);
  679. GB_ASSERT(expr != nullptr);
  680. if (expr->kind == Ast_ProcLit) {
  681. res = lb_generate_anonymous_proc_lit(m, str_lit("_proclit"), expr);
  682. } else {
  683. Entity *e = entity_from_expr(expr);
  684. res = lb_find_procedure_value_from_entity(m, e);
  685. }
  686. if (res.value == nullptr) {
  687. // This is an unspecialized polymorphic procedure, return nil or dummy value
  688. return lb_const_nil(m, original_type);
  689. }
  690. GB_ASSERT(LLVMGetValueKind(res.value) == LLVMFunctionValueKind);
  691. if (LLVMGetIntrinsicID(res.value) == 0) {
  692. // NOTE(bill): do not cast intrinsics as they are not really procedures that can be casted
  693. res.value = LLVMConstPointerCast(res.value, lb_type(m, res.type));
  694. }
  695. return res;
  696. }
  697. bool is_local = cc.allow_local && m->curr_procedure != nullptr;
  698. if (is_type_union(type) && is_type_union_constantable(type)) {
  699. Type *bt = base_type(type);
  700. GB_ASSERT(bt->kind == Type_Union);
  701. if (bt->Union.variants.count == 0) {
  702. return lb_const_nil(m, original_type);
  703. } else if (bt->Union.variants.count == 1) {
  704. Type *t = bt->Union.variants[0];
  705. lbValue cv = lb_const_value(m, t, value, cc);
  706. GB_ASSERT(LLVMIsConstant(cv.value));
  707. LLVMTypeRef llvm_type = lb_type(m, original_type);
  708. if (is_type_union_maybe_pointer(type)) {
  709. LLVMValueRef values[1] = {cv.value};
  710. res.value = llvm_const_named_struct_internal(m, llvm_type, values, 1);
  711. res.type = original_type;
  712. return res;
  713. } else {
  714. unsigned tag_value = 1;
  715. if (bt->Union.kind == UnionType_no_nil) {
  716. tag_value = 0;
  717. }
  718. LLVMValueRef tag = LLVMConstInt(LLVMStructGetTypeAtIndex(llvm_type, 1), tag_value, false);
  719. LLVMValueRef padding = nullptr;
  720. LLVMValueRef values[3] = {cv.value, tag, padding};
  721. isize value_count = 2;
  722. if (LLVMCountStructElementTypes(llvm_type) > 2) {
  723. value_count = 3;
  724. padding = LLVMConstNull(LLVMStructGetTypeAtIndex(llvm_type, 2));
  725. }
  726. res.value = llvm_const_named_struct_internal(m, llvm_type, values, value_count);
  727. res.type = original_type;
  728. return res;
  729. }
  730. } else {
  731. GB_ASSERT(value_type != nullptr);
  732. i64 block_size = bt->Union.variant_block_size;
  733. if (are_types_identical(value_type, original_type)) {
  734. if (value.kind == ExactValue_Compound) {
  735. ast_node(cl, CompoundLit, value.value_compound);
  736. GB_ASSERT(cl->elems.count == 0);
  737. return lb_const_nil(m, original_type);
  738. }
  739. GB_PANIC("%s vs %s", type_to_string(value_type), type_to_string(original_type));
  740. }
  741. lbValue cv = lb_const_value(m, value_type, value, cc, value_type);
  742. Type *variant_type = cv.type;
  743. LLVMValueRef values[4] = {};
  744. unsigned value_count = 0;
  745. values[value_count++] = cv.value;
  746. if (type_size_of(variant_type) != block_size) {
  747. LLVMTypeRef padding_type = lb_type_padding_filler(m, block_size - type_size_of(variant_type), 1);
  748. values[value_count++] = LLVMConstNull(padding_type);
  749. }
  750. Type *tag_type = union_tag_type(bt);
  751. LLVMTypeRef llvm_tag_type = lb_type(m, tag_type);
  752. i64 tag_index = union_variant_index(bt, variant_type);
  753. values[value_count++] = LLVMConstInt(llvm_tag_type, tag_index, false);
  754. i64 used_size = block_size + type_size_of(tag_type);
  755. i64 union_size = type_size_of(bt);
  756. i64 padding = union_size - used_size;
  757. if (padding > 0) {
  758. LLVMTypeRef padding_type = lb_type_padding_filler(m, padding, 1);
  759. values[value_count++] = LLVMConstNull(padding_type);
  760. }
  761. res.value = LLVMConstStructInContext(m->ctx, values, value_count, true);
  762. return res;
  763. }
  764. }
  765. // GB_ASSERT_MSG(is_type_typed(type), "%s", type_to_string(type));
  766. if (is_type_slice(type)) {
  767. if (value.kind == ExactValue_String) {
  768. GB_ASSERT(is_type_slice(type));
  769. res.value = lb_find_or_add_entity_string_byte_slice_with_type(m, value.value_string, original_type).value;
  770. return res;
  771. } else if (value.kind == ExactValue_String16) {
  772. GB_ASSERT(is_type_slice(type));
  773. res.value = lb_find_or_add_entity_string16_slice_with_type(m, value.value_string16, original_type).value;
  774. return res;
  775. }else {
  776. ast_node(cl, CompoundLit, value.value_compound);
  777. isize count = cl->elems.count;
  778. if (count == 0) {
  779. return lb_const_nil(m, type);
  780. }
  781. count = gb_max(cast(isize)cl->max_count, count);
  782. Type *elem = base_type(type)->Slice.elem;
  783. Type *t = alloc_type_array(elem, count);
  784. lbValue backing_array = lb_const_value(m, t, value, cc);
  785. LLVMValueRef array_data = nullptr;
  786. if (is_local) {
  787. // NOTE(bill, 2020-06-08): This is a bit of a hack but a "constant" slice needs
  788. // its backing data on the stack
  789. lbProcedure *p = m->curr_procedure;
  790. LLVMTypeRef llvm_type = lb_type(m, t);
  791. array_data = llvm_alloca(p, llvm_type, 16);
  792. {
  793. LLVMValueRef ptr = array_data;
  794. ptr = LLVMBuildPointerCast(p->builder, ptr, LLVMPointerType(LLVMTypeOf(backing_array.value), 0), "");
  795. LLVMBuildStore(p->builder, backing_array.value, ptr);
  796. }
  797. {
  798. LLVMValueRef indices[2] = {llvm_zero(m), llvm_zero(m)};
  799. LLVMValueRef ptr = LLVMBuildInBoundsGEP2(p->builder, llvm_type, array_data, indices, 2, "");
  800. LLVMValueRef len = LLVMConstInt(lb_type(m, t_int), count, true);
  801. lbAddr slice = lb_add_local_generated(p, original_type, false);
  802. map_set(&m->exact_value_compound_literal_addr_map, value.value_compound, slice);
  803. lb_fill_slice(p, slice, {ptr, alloc_type_pointer(elem)}, {len, t_int});
  804. return lb_addr_load(p, slice);
  805. }
  806. } else {
  807. u32 id = m->global_array_index.fetch_add(1);
  808. gbString str = gb_string_make(temporary_allocator(), "csba$");
  809. str = gb_string_appendc(str, m->module_name);
  810. str = gb_string_append_fmt(str, "$%x", id);
  811. String name = make_string(cast(u8 const *)str, gb_string_length(str));
  812. Entity *e = alloc_entity_constant(nullptr, make_token_ident(name), t, value);
  813. array_data = LLVMAddGlobal(m->mod, LLVMTypeOf(backing_array.value), str);
  814. LLVMSetInitializer(array_data, backing_array.value);
  815. if (cc.link_section.len > 0) {
  816. LLVMSetSection(array_data, alloc_cstring(permanent_allocator(), cc.link_section));
  817. }
  818. if (cc.is_rodata) {
  819. LLVMSetGlobalConstant(array_data, true);
  820. }
  821. lbValue g = {};
  822. g.value = LLVMConstPointerCast(array_data, LLVMPointerType(lb_type(m, t), 0));
  823. g.type = t;
  824. lb_add_entity(m, e, g);
  825. lb_add_member(m, name, g);
  826. {
  827. LLVMValueRef ptr = g.value;
  828. LLVMValueRef len = LLVMConstInt(lb_type(m, t_int), count, true);
  829. LLVMValueRef values[2] = {ptr, len};
  830. res.value = llvm_const_named_struct(m, original_type, values, 2);
  831. return res;
  832. }
  833. }
  834. }
  835. } else if (is_type_array(type) && value.kind == ExactValue_String && !is_type_u8(core_array_type(type))) {
  836. if (is_type_rune_array(type)) {
  837. i64 count = type->Array.count;
  838. Type *elem = type->Array.elem;
  839. LLVMTypeRef et = lb_type(m, elem);
  840. Rune rune;
  841. isize offset = 0;
  842. isize width = 1;
  843. String s = value.value_string;
  844. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, cast(isize)count);
  845. for (i64 i = 0; i < count && offset < s.len; i++) {
  846. width = utf8_decode(s.text+offset, s.len-offset, &rune);
  847. offset += width;
  848. elems[i] = LLVMConstInt(et, rune, true);
  849. }
  850. GB_ASSERT(offset == s.len);
  851. res.value = llvm_const_array(m, et, elems, cast(unsigned)count);
  852. return res;
  853. }
  854. // NOTE(bill, 2021-10-07): Allow for array programming value constants
  855. Type *core_elem = core_array_type(type);
  856. return lb_const_value(m, core_elem, value, cc);
  857. } else if (is_type_u8_array(type) && value.kind == ExactValue_String) {
  858. GB_ASSERT(type->Array.count == value.value_string.len);
  859. LLVMValueRef data = LLVMConstStringInContext(ctx,
  860. cast(char const *)value.value_string.text,
  861. cast(unsigned)value.value_string.len,
  862. true /*DontNullTerminate*/);
  863. res.value = data;
  864. return res;
  865. } else if (is_type_array(type) &&
  866. value.kind != ExactValue_Invalid &&
  867. value.kind != ExactValue_String &&
  868. value.kind != ExactValue_Compound) {
  869. i64 count = type->Array.count;
  870. Type *elem = type->Array.elem;
  871. lbValue single_elem = lb_const_value(m, elem, value, cc);
  872. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, cast(isize)count);
  873. for (i64 i = 0; i < count; i++) {
  874. elems[i] = single_elem.value;
  875. }
  876. res.value = llvm_const_array(m, lb_type(m, elem), elems, cast(unsigned)count);
  877. return res;
  878. } else if (is_type_matrix(type) &&
  879. value.kind != ExactValue_Invalid &&
  880. value.kind != ExactValue_Compound) {
  881. i64 row = type->Matrix.row_count;
  882. i64 column = type->Matrix.column_count;
  883. GB_ASSERT(row == column);
  884. Type *elem = type->Matrix.elem;
  885. lbValue single_elem = lb_const_value(m, elem, value, cc);
  886. single_elem.value = llvm_const_cast(single_elem.value, lb_type(m, elem), /*failure_*/nullptr);
  887. i64 total_elem_count = matrix_type_total_internal_elems(type);
  888. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, cast(isize)total_elem_count);
  889. for (i64 i = 0; i < row; i++) {
  890. elems[matrix_indices_to_offset(type, i, i)] = single_elem.value;
  891. }
  892. for (i64 i = 0; i < total_elem_count; i++) {
  893. if (elems[i] == nullptr) {
  894. elems[i] = LLVMConstNull(lb_type(m, elem));
  895. }
  896. }
  897. res.value = LLVMConstArray(lb_type(m, elem), elems, cast(unsigned)total_elem_count);
  898. return res;
  899. } else if (is_type_simd_vector(type) &&
  900. value.kind != ExactValue_Invalid &&
  901. value.kind != ExactValue_Compound) {
  902. i64 count = type->SimdVector.count;
  903. Type *elem = type->SimdVector.elem;
  904. lbValue single_elem = lb_const_value(m, elem, value, cc);
  905. single_elem.value = llvm_const_cast(single_elem.value, lb_type(m, elem), /*failure_*/nullptr);
  906. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, count);
  907. for (i64 i = 0; i < count; i++) {
  908. elems[i] = single_elem.value;
  909. }
  910. res.value = LLVMConstVector(elems, cast(unsigned)count);
  911. return res;
  912. }
  913. switch (value.kind) {
  914. case ExactValue_Invalid:
  915. res.value = LLVMConstNull(lb_type(m, original_type));
  916. return res;
  917. case ExactValue_Bool:
  918. res.value = LLVMConstInt(lb_type(m, original_type), value.value_bool, false);
  919. return res;
  920. case ExactValue_String:
  921. {
  922. bool custom_link_section = cc.link_section.len > 0;
  923. LLVMValueRef ptr = nullptr;
  924. lbValue res = {};
  925. res.type = default_type(original_type);
  926. isize len = value.value_string.len;
  927. if (is_type_string16(res.type) || is_type_cstring16(res.type)) {
  928. TEMPORARY_ALLOCATOR_GUARD();
  929. String16 s16 = string_to_string16(temporary_allocator(), value.value_string);
  930. len = s16.len;
  931. ptr = lb_find_or_add_entity_string16_ptr(m, s16, custom_link_section);
  932. } else {
  933. ptr = lb_find_or_add_entity_string_ptr(m, value.value_string, custom_link_section);
  934. }
  935. if (custom_link_section) {
  936. LLVMSetSection(ptr, alloc_cstring(permanent_allocator(), cc.link_section));
  937. }
  938. if (is_type_cstring(res.type) || is_type_cstring16(res.type)) {
  939. res.value = ptr;
  940. } else {
  941. if (len == 0) {
  942. if (is_type_string16(res.type)) {
  943. ptr = LLVMConstNull(lb_type(m, t_u16_ptr));
  944. } else {
  945. ptr = LLVMConstNull(lb_type(m, t_u8_ptr));
  946. }
  947. }
  948. LLVMValueRef str_len = LLVMConstInt(lb_type(m, t_int), len, true);
  949. GB_ASSERT(is_type_string(original_type));
  950. if (is_type_string16(res.type)) {
  951. res.value = llvm_const_string16_internal(m, original_type, ptr, str_len);
  952. } else {
  953. res.value = llvm_const_string_internal(m, original_type, ptr, str_len);
  954. }
  955. }
  956. return res;
  957. }
  958. case ExactValue_String16:
  959. {
  960. GB_ASSERT(is_type_string16(res.type) || is_type_cstring16(res.type));
  961. bool custom_link_section = cc.link_section.len > 0;
  962. LLVMValueRef ptr = lb_find_or_add_entity_string16_ptr(m, value.value_string16, custom_link_section);
  963. lbValue res = {};
  964. res.type = default_type(original_type);
  965. if (custom_link_section) {
  966. LLVMSetSection(ptr, alloc_cstring(permanent_allocator(), cc.link_section));
  967. }
  968. if (is_type_cstring16(res.type)) {
  969. res.value = ptr;
  970. } else {
  971. if (value.value_string16.len == 0) {
  972. ptr = LLVMConstNull(lb_type(m, t_u8_ptr));
  973. }
  974. LLVMValueRef str_len = LLVMConstInt(lb_type(m, t_int), value.value_string16.len, true);
  975. GB_ASSERT(is_type_string(original_type));
  976. res.value = llvm_const_string16_internal(m, original_type, ptr, str_len);
  977. }
  978. return res;
  979. }
  980. case ExactValue_Integer:
  981. if (is_type_pointer(type) || is_type_multi_pointer(type) || is_type_proc(type)) {
  982. LLVMTypeRef t = lb_type(m, original_type);
  983. LLVMValueRef i = lb_big_int_to_llvm(m, t_uintptr, &value.value_integer);
  984. res.value = LLVMConstIntToPtr(i, t);
  985. } else {
  986. res.value = lb_big_int_to_llvm(m, original_type, &value.value_integer);
  987. }
  988. return res;
  989. case ExactValue_Float:
  990. if (is_type_different_to_arch_endianness(type)) {
  991. if (type->Basic.kind == Basic_f32le || type->Basic.kind == Basic_f32be) {
  992. f32 f = static_cast<float>(value.value_float);
  993. u32 u = bit_cast<u32>(f);
  994. u = gb_endian_swap32(u);
  995. res.value = LLVMConstReal(lb_type(m, original_type), bit_cast<f32>(u));
  996. } else if (type->Basic.kind == Basic_f16le || type->Basic.kind == Basic_f16be) {
  997. f32 f = static_cast<float>(value.value_float);
  998. u16 u = f32_to_f16(f);
  999. u = gb_endian_swap16(u);
  1000. res.value = LLVMConstReal(lb_type(m, original_type), f16_to_f32(u));
  1001. } else {
  1002. u64 u = bit_cast<u64>(value.value_float);
  1003. u = gb_endian_swap64(u);
  1004. res.value = LLVMConstReal(lb_type(m, original_type), bit_cast<f64>(u));
  1005. }
  1006. } else {
  1007. res.value = LLVMConstReal(lb_type(m, original_type), value.value_float);
  1008. }
  1009. return res;
  1010. case ExactValue_Complex:
  1011. {
  1012. LLVMValueRef values[2] = {};
  1013. switch (8*type_size_of(type)) {
  1014. case 32:
  1015. values[0] = lb_const_f16(m, cast(f32)value.value_complex->real);
  1016. values[1] = lb_const_f16(m, cast(f32)value.value_complex->imag);
  1017. break;
  1018. case 64:
  1019. values[0] = lb_const_f32(m, cast(f32)value.value_complex->real);
  1020. values[1] = lb_const_f32(m, cast(f32)value.value_complex->imag);
  1021. break;
  1022. case 128:
  1023. values[0] = LLVMConstReal(lb_type(m, t_f64), value.value_complex->real);
  1024. values[1] = LLVMConstReal(lb_type(m, t_f64), value.value_complex->imag);
  1025. break;
  1026. }
  1027. res.value = llvm_const_named_struct(m, original_type, values, 2);
  1028. return res;
  1029. }
  1030. break;
  1031. case ExactValue_Quaternion:
  1032. {
  1033. LLVMValueRef values[4] = {};
  1034. switch (8*type_size_of(type)) {
  1035. case 64:
  1036. // @QuaternionLayout
  1037. values[3] = lb_const_f16(m, cast(f32)value.value_quaternion->real);
  1038. values[0] = lb_const_f16(m, cast(f32)value.value_quaternion->imag);
  1039. values[1] = lb_const_f16(m, cast(f32)value.value_quaternion->jmag);
  1040. values[2] = lb_const_f16(m, cast(f32)value.value_quaternion->kmag);
  1041. break;
  1042. case 128:
  1043. // @QuaternionLayout
  1044. values[3] = lb_const_f32(m, cast(f32)value.value_quaternion->real);
  1045. values[0] = lb_const_f32(m, cast(f32)value.value_quaternion->imag);
  1046. values[1] = lb_const_f32(m, cast(f32)value.value_quaternion->jmag);
  1047. values[2] = lb_const_f32(m, cast(f32)value.value_quaternion->kmag);
  1048. break;
  1049. case 256:
  1050. // @QuaternionLayout
  1051. values[3] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->real);
  1052. values[0] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->imag);
  1053. values[1] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->jmag);
  1054. values[2] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->kmag);
  1055. break;
  1056. }
  1057. res.value = llvm_const_named_struct(m, original_type, values, 4);
  1058. return res;
  1059. }
  1060. break;
  1061. case ExactValue_Pointer:
  1062. res.value = LLVMConstIntToPtr(LLVMConstInt(lb_type(m, t_uintptr), value.value_pointer, false), lb_type(m, original_type));
  1063. return res;
  1064. case ExactValue_Compound:
  1065. if (is_type_slice(type)) {
  1066. return lb_const_value(m, type, value, cc);
  1067. } else if (is_type_soa_struct(type)) {
  1068. GB_ASSERT(type->kind == Type_Struct);
  1069. GB_ASSERT(type->Struct.soa_kind == StructSoa_Fixed);
  1070. ast_node(cl, CompoundLit, value.value_compound);
  1071. Type *elem_type = type->Struct.soa_elem;
  1072. isize elem_count = cl->elems.count;
  1073. if (elem_count == 0 || !elem_type_can_be_constant(elem_type)) {
  1074. return lb_const_nil(m, original_type);
  1075. }
  1076. if (cl->elems[0]->kind == Ast_FieldValue) {
  1077. TEMPORARY_ALLOCATOR_GUARD();
  1078. // TODO(bill): This is O(N*M) and will be quite slow; it should probably be sorted before hand
  1079. isize elem_count = cast(isize)type->Struct.soa_count;
  1080. LLVMValueRef *aos_values = gb_alloc_array(temporary_allocator(), LLVMValueRef, elem_count);
  1081. isize value_index = 0;
  1082. for (i64 i = 0; i < elem_count; i++) {
  1083. bool found = false;
  1084. for (isize j = 0; j < elem_count; j++) {
  1085. Ast *elem = cl->elems[j];
  1086. ast_node(fv, FieldValue, elem);
  1087. if (is_ast_range(fv->field)) {
  1088. ast_node(ie, BinaryExpr, fv->field);
  1089. TypeAndValue lo_tav = ie->left->tav;
  1090. TypeAndValue hi_tav = ie->right->tav;
  1091. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  1092. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  1093. TokenKind op = ie->op.kind;
  1094. i64 lo = exact_value_to_i64(lo_tav.value);
  1095. i64 hi = exact_value_to_i64(hi_tav.value);
  1096. if (op != Token_RangeHalf) {
  1097. hi += 1;
  1098. }
  1099. if (lo == i) {
  1100. TypeAndValue tav = fv->value->tav;
  1101. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1102. for (i64 k = lo; k < hi; k++) {
  1103. aos_values[value_index++] = val;
  1104. }
  1105. found = true;
  1106. i += (hi-lo-1);
  1107. break;
  1108. }
  1109. } else {
  1110. TypeAndValue index_tav = fv->field->tav;
  1111. GB_ASSERT(index_tav.mode == Addressing_Constant);
  1112. i64 index = exact_value_to_i64(index_tav.value);
  1113. if (index == i) {
  1114. TypeAndValue tav = fv->value->tav;
  1115. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1116. aos_values[value_index++] = val;
  1117. found = true;
  1118. break;
  1119. }
  1120. }
  1121. }
  1122. if (!found) {
  1123. aos_values[value_index++] = nullptr;
  1124. }
  1125. }
  1126. isize field_count = type->Struct.fields.count;
  1127. LLVMValueRef *soa_values = gb_alloc_array(temporary_allocator(), LLVMValueRef, field_count);
  1128. for (isize i = 0; i < field_count; i++) {
  1129. TEMPORARY_ALLOCATOR_GUARD();
  1130. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, elem_count);
  1131. Entity *f = type->Struct.fields[i];
  1132. Type *array_type = f->type;
  1133. GB_ASSERT(array_type->kind == Type_Array);
  1134. Type *field_type = array_type->Array.elem;
  1135. for (isize j = 0; j < elem_count; j++) {
  1136. LLVMValueRef v = aos_values[j];
  1137. if (v != nullptr) {
  1138. values[j] = llvm_const_extract_value(m, v, cast(unsigned)i);
  1139. } else {
  1140. values[j] = LLVMConstNull(lb_type(m, field_type));
  1141. }
  1142. }
  1143. soa_values[i] = lb_build_constant_array_values(m, array_type, field_type, elem_count, values, cc);
  1144. }
  1145. res.value = llvm_const_named_struct(m, type, soa_values, field_count);
  1146. return res;
  1147. } else {
  1148. GB_ASSERT_MSG(elem_count == type->Struct.soa_count, "%td != %td", elem_count, type->Struct.soa_count);
  1149. TEMPORARY_ALLOCATOR_GUARD();
  1150. isize elem_count = cast(isize)type->Struct.soa_count;
  1151. LLVMValueRef *aos_values = gb_alloc_array(temporary_allocator(), LLVMValueRef, elem_count);
  1152. for (isize i = 0; i < elem_count; i++) {
  1153. TypeAndValue tav = cl->elems[i]->tav;
  1154. GB_ASSERT(tav.mode != Addressing_Invalid);
  1155. aos_values[i] = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1156. }
  1157. for (isize i = elem_count; i < type->Struct.soa_count; i++) {
  1158. aos_values[i] = nullptr;
  1159. }
  1160. isize field_count = type->Struct.fields.count;
  1161. LLVMValueRef *soa_values = gb_alloc_array(temporary_allocator(), LLVMValueRef, field_count);
  1162. for (isize i = 0; i < field_count; i++) {
  1163. TEMPORARY_ALLOCATOR_GUARD();
  1164. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, elem_count);
  1165. Entity *f = type->Struct.fields[i];
  1166. Type *array_type = f->type;
  1167. GB_ASSERT(array_type->kind == Type_Array);
  1168. Type *field_type = array_type->Array.elem;
  1169. for (isize j = 0; j < elem_count; j++) {
  1170. LLVMValueRef v = aos_values[j];
  1171. if (v != nullptr) {
  1172. values[j] = llvm_const_extract_value(m, v, cast(unsigned)i);
  1173. } else {
  1174. values[j] = LLVMConstNull(lb_type(m, field_type));
  1175. }
  1176. }
  1177. soa_values[i] = lb_build_constant_array_values(m, array_type, field_type, elem_count, values, cc);
  1178. }
  1179. res.value = llvm_const_named_struct(m, type, soa_values, field_count);
  1180. return res;
  1181. }
  1182. } else if (is_type_array(type)) {
  1183. ast_node(cl, CompoundLit, value.value_compound);
  1184. Type *elem_type = type->Array.elem;
  1185. isize elem_count = cl->elems.count;
  1186. if (elem_count == 0 || !elem_type_can_be_constant(elem_type)) {
  1187. return lb_const_nil(m, original_type);
  1188. }
  1189. if (cl->elems[0]->kind == Ast_FieldValue) {
  1190. // TODO(bill): This is O(N*M) and will be quite slow; it should probably be sorted before hand
  1191. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->Array.count);
  1192. isize value_index = 0;
  1193. for (i64 i = 0; i < type->Array.count; i++) {
  1194. bool found = false;
  1195. for (isize j = 0; j < elem_count; j++) {
  1196. Ast *elem = cl->elems[j];
  1197. ast_node(fv, FieldValue, elem);
  1198. if (is_ast_range(fv->field)) {
  1199. ast_node(ie, BinaryExpr, fv->field);
  1200. TypeAndValue lo_tav = ie->left->tav;
  1201. TypeAndValue hi_tav = ie->right->tav;
  1202. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  1203. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  1204. TokenKind op = ie->op.kind;
  1205. i64 lo = exact_value_to_i64(lo_tav.value);
  1206. i64 hi = exact_value_to_i64(hi_tav.value);
  1207. if (op != Token_RangeHalf) {
  1208. hi += 1;
  1209. }
  1210. if (lo == i) {
  1211. TypeAndValue tav = fv->value->tav;
  1212. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1213. for (i64 k = lo; k < hi; k++) {
  1214. values[value_index++] = val;
  1215. }
  1216. found = true;
  1217. i += (hi-lo-1);
  1218. break;
  1219. }
  1220. } else {
  1221. TypeAndValue index_tav = fv->field->tav;
  1222. GB_ASSERT(index_tav.mode == Addressing_Constant);
  1223. i64 index = exact_value_to_i64(index_tav.value);
  1224. if (index == i) {
  1225. TypeAndValue tav = fv->value->tav;
  1226. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1227. values[value_index++] = val;
  1228. found = true;
  1229. break;
  1230. }
  1231. }
  1232. }
  1233. if (!found) {
  1234. values[value_index++] = LLVMConstNull(lb_type(m, elem_type));
  1235. }
  1236. }
  1237. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->Array.count, values, cc);
  1238. return res;
  1239. } else if (are_types_identical(value.value_compound->tav.type, elem_type)) {
  1240. // Compound is of array item type; expand its value to all items in array.
  1241. LLVMValueRef* values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->Array.count);
  1242. for (isize i = 0; i < type->Array.count; i++) {
  1243. values[i] = lb_const_value(m, elem_type, value, cc, elem_type).value;
  1244. }
  1245. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->Array.count, values, cc);
  1246. return res;
  1247. } else {
  1248. // Assume that compound value is an array literal
  1249. GB_ASSERT_MSG(elem_count == type->Array.count, "%td != %td", elem_count, type->Array.count);
  1250. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->Array.count);
  1251. for (isize i = 0; i < elem_count; i++) {
  1252. TypeAndValue tav = cl->elems[i]->tav;
  1253. GB_ASSERT(tav.mode != Addressing_Invalid);
  1254. values[i] = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1255. }
  1256. for (isize i = elem_count; i < type->Array.count; i++) {
  1257. values[i] = LLVMConstNull(lb_type(m, elem_type));
  1258. }
  1259. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->Array.count, values, cc);
  1260. return res;
  1261. }
  1262. } else if (is_type_enumerated_array(type)) {
  1263. ast_node(cl, CompoundLit, value.value_compound);
  1264. Type *elem_type = type->EnumeratedArray.elem;
  1265. isize elem_count = cl->elems.count;
  1266. if (elem_count == 0 || !elem_type_can_be_constant(elem_type)) {
  1267. return lb_const_nil(m, original_type);
  1268. }
  1269. if (cl->elems[0]->kind == Ast_FieldValue) {
  1270. // TODO(bill): This is O(N*M) and will be quite slow; it should probably be sorted before hand
  1271. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->EnumeratedArray.count);
  1272. isize value_index = 0;
  1273. i64 total_lo = exact_value_to_i64(*type->EnumeratedArray.min_value);
  1274. i64 total_hi = exact_value_to_i64(*type->EnumeratedArray.max_value);
  1275. for (i64 i = total_lo; i <= total_hi; i++) {
  1276. bool found = false;
  1277. for (isize j = 0; j < elem_count; j++) {
  1278. Ast *elem = cl->elems[j];
  1279. ast_node(fv, FieldValue, elem);
  1280. if (is_ast_range(fv->field)) {
  1281. ast_node(ie, BinaryExpr, fv->field);
  1282. TypeAndValue lo_tav = ie->left->tav;
  1283. TypeAndValue hi_tav = ie->right->tav;
  1284. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  1285. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  1286. TokenKind op = ie->op.kind;
  1287. i64 lo = exact_value_to_i64(lo_tav.value);
  1288. i64 hi = exact_value_to_i64(hi_tav.value);
  1289. if (op != Token_RangeHalf) {
  1290. hi += 1;
  1291. }
  1292. if (lo == i) {
  1293. TypeAndValue tav = fv->value->tav;
  1294. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1295. for (i64 k = lo; k < hi; k++) {
  1296. values[value_index++] = val;
  1297. }
  1298. found = true;
  1299. i += (hi-lo-1);
  1300. break;
  1301. }
  1302. } else {
  1303. TypeAndValue index_tav = fv->field->tav;
  1304. GB_ASSERT(index_tav.mode == Addressing_Constant);
  1305. i64 index = exact_value_to_i64(index_tav.value);
  1306. if (index == i) {
  1307. TypeAndValue tav = fv->value->tav;
  1308. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1309. values[value_index++] = val;
  1310. found = true;
  1311. break;
  1312. }
  1313. }
  1314. }
  1315. if (!found) {
  1316. values[value_index++] = LLVMConstNull(lb_type(m, elem_type));
  1317. }
  1318. }
  1319. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->EnumeratedArray.count, values, cc);
  1320. return res;
  1321. } else {
  1322. GB_ASSERT_MSG(elem_count == type->EnumeratedArray.count, "%td != %td", elem_count, type->EnumeratedArray.count);
  1323. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->EnumeratedArray.count);
  1324. for (isize i = 0; i < elem_count; i++) {
  1325. TypeAndValue tav = cl->elems[i]->tav;
  1326. GB_ASSERT(tav.mode != Addressing_Invalid);
  1327. values[i] = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1328. }
  1329. for (isize i = elem_count; i < type->EnumeratedArray.count; i++) {
  1330. values[i] = LLVMConstNull(lb_type(m, elem_type));
  1331. }
  1332. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->EnumeratedArray.count, values, cc);
  1333. return res;
  1334. }
  1335. } else if (is_type_simd_vector(type)) {
  1336. ast_node(cl, CompoundLit, value.value_compound);
  1337. Type *elem_type = type->SimdVector.elem;
  1338. isize elem_count = cl->elems.count;
  1339. if (elem_count == 0) {
  1340. return lb_const_nil(m, original_type);
  1341. }
  1342. GB_ASSERT(elem_type_can_be_constant(elem_type));
  1343. isize total_elem_count = cast(isize)type->SimdVector.count;
  1344. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, total_elem_count);
  1345. if (cl->elems[0]->kind == Ast_FieldValue) {
  1346. // TODO(bill): This is O(N*M) and will be quite slow; it should probably be sorted before hand
  1347. isize value_index = 0;
  1348. for (i64 i = 0; i < total_elem_count; i++) {
  1349. bool found = false;
  1350. for (isize j = 0; j < elem_count; j++) {
  1351. Ast *elem = cl->elems[j];
  1352. ast_node(fv, FieldValue, elem);
  1353. if (is_ast_range(fv->field)) {
  1354. ast_node(ie, BinaryExpr, fv->field);
  1355. TypeAndValue lo_tav = ie->left->tav;
  1356. TypeAndValue hi_tav = ie->right->tav;
  1357. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  1358. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  1359. TokenKind op = ie->op.kind;
  1360. i64 lo = exact_value_to_i64(lo_tav.value);
  1361. i64 hi = exact_value_to_i64(hi_tav.value);
  1362. if (op != Token_RangeHalf) {
  1363. hi += 1;
  1364. }
  1365. if (lo == i) {
  1366. TypeAndValue tav = fv->value->tav;
  1367. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1368. for (i64 k = lo; k < hi; k++) {
  1369. values[value_index++] = val;
  1370. }
  1371. found = true;
  1372. i += (hi-lo-1);
  1373. break;
  1374. }
  1375. } else {
  1376. TypeAndValue index_tav = fv->field->tav;
  1377. GB_ASSERT(index_tav.mode == Addressing_Constant);
  1378. i64 index = exact_value_to_i64(index_tav.value);
  1379. if (index == i) {
  1380. TypeAndValue tav = fv->value->tav;
  1381. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1382. values[value_index++] = val;
  1383. found = true;
  1384. break;
  1385. }
  1386. }
  1387. }
  1388. if (!found) {
  1389. values[value_index++] = LLVMConstNull(lb_type(m, elem_type));
  1390. }
  1391. }
  1392. res.value = LLVMConstVector(values, cast(unsigned)total_elem_count);
  1393. return res;
  1394. } else {
  1395. for (isize i = 0; i < elem_count; i++) {
  1396. TypeAndValue tav = cl->elems[i]->tav;
  1397. GB_ASSERT(tav.mode != Addressing_Invalid);
  1398. values[i] = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1399. }
  1400. LLVMTypeRef et = lb_type(m, elem_type);
  1401. for (isize i = elem_count; i < total_elem_count; i++) {
  1402. values[i] = LLVMConstNull(et);
  1403. }
  1404. for (isize i = 0; i < total_elem_count; i++) {
  1405. values[i] = llvm_const_cast(values[i], et, /*failure_*/nullptr);
  1406. }
  1407. res.value = LLVMConstVector(values, cast(unsigned)total_elem_count);
  1408. return res;
  1409. }
  1410. } else if (is_type_struct(type)) {
  1411. ast_node(cl, CompoundLit, value.value_compound);
  1412. if (cl->elems.count == 0) {
  1413. return lb_const_nil(m, original_type);
  1414. }
  1415. if (is_type_raw_union(type)) {
  1416. return lb_const_nil(m, original_type);
  1417. }
  1418. LLVMTypeRef struct_type = lb_type(m, original_type);
  1419. auto field_remapping = lb_get_struct_remapping(m, type);
  1420. unsigned value_count = LLVMCountStructElementTypes(struct_type);
  1421. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, value_count);
  1422. bool *visited = gb_alloc_array(temporary_allocator(), bool, value_count);
  1423. if (cl->elems[0]->kind == Ast_FieldValue) {
  1424. isize elem_count = cl->elems.count;
  1425. for (isize i = 0; i < elem_count; i++) {
  1426. ast_node(fv, FieldValue, cl->elems[i]);
  1427. String name = fv->field->Ident.token.string;
  1428. TypeAndValue tav = fv->value->tav;
  1429. GB_ASSERT(tav.mode != Addressing_Invalid);
  1430. Selection sel = lookup_field(type, name, false);
  1431. GB_ASSERT(!sel.indirect);
  1432. Entity *f = type->Struct.fields[sel.index[0]];
  1433. i32 index = field_remapping[f->Variable.field_index];
  1434. if (elem_type_can_be_constant(f->type)) {
  1435. if (sel.index.count == 1) {
  1436. values[index] = lb_const_value(m, f->type, tav.value, cc, tav.type).value;
  1437. visited[index] = true;
  1438. } else {
  1439. if (!visited[index]) {
  1440. auto new_cc = cc;
  1441. new_cc.allow_local = false;
  1442. values[index] = lb_const_value(m, f->type, {}, new_cc).value;
  1443. visited[index] = true;
  1444. }
  1445. unsigned idx_list_len = cast(unsigned)sel.index.count-1;
  1446. unsigned *idx_list = gb_alloc_array(temporary_allocator(), unsigned, idx_list_len);
  1447. if (lb_is_nested_possibly_constant(type, sel, fv->value)) {
  1448. bool is_constant = true;
  1449. Type *cv_type = f->type;
  1450. for (isize j = 1; j < sel.index.count; j++) {
  1451. i32 index = sel.index[j];
  1452. Type *cvt = base_type(cv_type);
  1453. if (cvt->kind == Type_Struct) {
  1454. if (cvt->Struct.is_raw_union) {
  1455. // sanity check which should have been caught by `lb_is_nested_possibly_constant`
  1456. is_constant = false;
  1457. break;
  1458. }
  1459. cv_type = cvt->Struct.fields[index]->type;
  1460. if (is_type_struct(cvt)) {
  1461. auto cv_field_remapping = lb_get_struct_remapping(m, cvt);
  1462. unsigned remapped_index = cast(unsigned)cv_field_remapping[index];
  1463. idx_list[j-1] = remapped_index;
  1464. } else {
  1465. idx_list[j-1] = cast(unsigned)index;
  1466. }
  1467. } else if (cvt->kind == Type_Array) {
  1468. cv_type = cvt->Array.elem;
  1469. idx_list[j-1] = cast(unsigned)index;
  1470. } else {
  1471. GB_PANIC("UNKNOWN TYPE: %s", type_to_string(cv_type));
  1472. }
  1473. }
  1474. if (is_constant) {
  1475. LLVMValueRef elem_value = lb_const_value(m, tav.type, tav.value, cc, tav.type).value;
  1476. if (LLVMIsConstant(elem_value) && LLVMIsConstant(values[index])) {
  1477. values[index] = llvm_const_insert_value(m, values[index], elem_value, idx_list, idx_list_len);
  1478. } else if (is_local) {
  1479. #if 1
  1480. lbProcedure *p = m->curr_procedure;
  1481. GB_ASSERT(p != nullptr);
  1482. if (LLVMIsConstant(values[index])) {
  1483. lbAddr addr = lb_add_local_generated(p, f->type, false);
  1484. lb_addr_store(p, addr, lbValue{values[index], f->type});
  1485. values[index] = lb_addr_load(p, addr).value;
  1486. }
  1487. GB_ASSERT(LLVMIsALoadInst(values[index]));
  1488. LLVMValueRef ptr = LLVMGetOperand(values[index], 0);
  1489. LLVMValueRef *indices = gb_alloc_array(temporary_allocator(), LLVMValueRef, idx_list_len);
  1490. LLVMTypeRef lt_u32 = lb_type(m, t_u32);
  1491. for (unsigned i = 0; i < idx_list_len; i++) {
  1492. indices[i] = LLVMConstInt(lt_u32, idx_list[i], false);
  1493. }
  1494. ptr = LLVMBuildGEP2(p->builder, lb_type(m, f->type), ptr, indices, idx_list_len, "");
  1495. ptr = LLVMBuildPointerCast(p->builder, ptr, lb_type(m, alloc_type_pointer(tav.type)), "");
  1496. if (LLVMIsALoadInst(elem_value)) {
  1497. i64 sz = type_size_of(tav.type);
  1498. LLVMValueRef src = LLVMGetOperand(elem_value, 0);
  1499. lb_mem_copy_non_overlapping(p, {ptr, t_rawptr}, {src, t_rawptr}, lb_const_int(m, t_int, sz), false);
  1500. } else {
  1501. LLVMBuildStore(p->builder, elem_value, ptr);
  1502. }
  1503. #endif
  1504. is_constant = false;
  1505. } else {
  1506. is_constant = false;
  1507. }
  1508. }
  1509. }
  1510. }
  1511. }
  1512. }
  1513. } else {
  1514. for_array(i, cl->elems) {
  1515. Entity *f = type->Struct.fields[i];
  1516. TypeAndValue tav = cl->elems[i]->tav;
  1517. ExactValue val = {};
  1518. if (tav.mode != Addressing_Invalid) {
  1519. val = tav.value;
  1520. }
  1521. i32 index = field_remapping[f->Variable.field_index];
  1522. if (elem_type_can_be_constant(f->type)) {
  1523. values[index] = lb_const_value(m, f->type, val, cc, tav.type).value;
  1524. visited[index] = true;
  1525. }
  1526. }
  1527. }
  1528. for (isize i = 0; i < value_count; i++) {
  1529. if (!visited[i]) {
  1530. GB_ASSERT(values[i] == nullptr);
  1531. LLVMTypeRef type = LLVMStructGetTypeAtIndex(struct_type, cast(unsigned)i);
  1532. values[i] = LLVMConstNull(type);
  1533. }
  1534. }
  1535. bool is_constant = true;
  1536. for (isize i = 0; i < value_count; i++) {
  1537. LLVMValueRef val = values[i];
  1538. if (!LLVMIsConstant(val)) {
  1539. GB_ASSERT(is_local);
  1540. GB_ASSERT(LLVMIsALoadInst(val));
  1541. is_constant = false;
  1542. }
  1543. }
  1544. if (is_constant) {
  1545. res.value = llvm_const_named_struct_internal(m, struct_type, values, cast(unsigned)value_count);
  1546. return res;
  1547. } else {
  1548. // TODO(bill): THIS IS HACK BUT IT WORKS FOR WHAT I NEED
  1549. LLVMValueRef *old_values = values;
  1550. LLVMValueRef *new_values = gb_alloc_array(temporary_allocator(), LLVMValueRef, value_count);
  1551. for (isize i = 0; i < value_count; i++) {
  1552. LLVMValueRef old_value = old_values[i];
  1553. if (LLVMIsConstant(old_value)) {
  1554. new_values[i] = old_value;
  1555. } else {
  1556. new_values[i] = LLVMConstNull(LLVMTypeOf(old_value));
  1557. }
  1558. }
  1559. LLVMValueRef constant_value = llvm_const_named_struct_internal(m, struct_type, new_values, cast(unsigned)value_count);
  1560. GB_ASSERT(is_local);
  1561. lbProcedure *p = m->curr_procedure;
  1562. lbAddr v = lb_add_local_generated(p, res.type, true);
  1563. map_set(&m->exact_value_compound_literal_addr_map, value.value_compound, v);
  1564. LLVMBuildStore(p->builder, constant_value, v.addr.value);
  1565. for (isize i = 0; i < value_count; i++) {
  1566. LLVMValueRef val = old_values[i];
  1567. if (!LLVMIsConstant(val)) {
  1568. LLVMValueRef dst = LLVMBuildStructGEP2(p->builder, llvm_addr_type(p->module, v.addr), v.addr.value, cast(unsigned)i, "");
  1569. // if (LLVMIsALoadInst(val)) {
  1570. // Type *ptr_type = v.addr.type;
  1571. // i64 sz = type_size_of(type_deref(ptr_type));
  1572. // LLVMValueRef src = LLVMGetOperand(val, 0);
  1573. // lb_mem_copy_non_overlapping(p, {dst, ptr_type}, {src, ptr_type}, lb_const_int(m, t_int, sz), false);
  1574. // } else {
  1575. LLVMBuildStore(p->builder, val, dst);
  1576. // }
  1577. }
  1578. }
  1579. return lb_addr_load(p, v);
  1580. }
  1581. } else if (is_type_bit_set(type)) {
  1582. ast_node(cl, CompoundLit, value.value_compound);
  1583. if (cl->elems.count == 0) {
  1584. return lb_const_nil(m, original_type);
  1585. }
  1586. i64 sz = type_size_of(type);
  1587. if (sz == 0) {
  1588. return lb_const_nil(m, original_type);
  1589. }
  1590. BigInt bits = {};
  1591. BigInt one = {};
  1592. big_int_from_u64(&one, 1);
  1593. for_array(i, cl->elems) {
  1594. Ast *e = cl->elems[i];
  1595. GB_ASSERT(e->kind != Ast_FieldValue);
  1596. TypeAndValue tav = e->tav;
  1597. if (tav.mode != Addressing_Constant) {
  1598. continue;
  1599. }
  1600. GB_ASSERT(tav.value.kind == ExactValue_Integer);
  1601. i64 v = big_int_to_i64(&tav.value.value_integer);
  1602. i64 lower = type->BitSet.lower;
  1603. u64 index = cast(u64)(v-lower);
  1604. BigInt bit = {};
  1605. big_int_from_u64(&bit, index);
  1606. big_int_shl(&bit, &one, &bit);
  1607. big_int_or(&bits, &bits, &bit);
  1608. }
  1609. res.value = lb_big_int_to_llvm(m, original_type, &bits);
  1610. return res;
  1611. } else if (is_type_matrix(type)) {
  1612. ast_node(cl, CompoundLit, value.value_compound);
  1613. Type *elem_type = type->Matrix.elem;
  1614. isize elem_count = cl->elems.count;
  1615. if (elem_count == 0 || !elem_type_can_be_constant(elem_type)) {
  1616. return lb_const_nil(m, original_type);
  1617. }
  1618. i64 max_count = type->Matrix.row_count*type->Matrix.column_count;
  1619. i64 total_count = matrix_type_total_internal_elems(type);
  1620. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)total_count);
  1621. if (cl->elems[0]->kind == Ast_FieldValue) {
  1622. for_array(j, cl->elems) {
  1623. Ast *elem = cl->elems[j];
  1624. ast_node(fv, FieldValue, elem);
  1625. if (is_ast_range(fv->field)) {
  1626. ast_node(ie, BinaryExpr, fv->field);
  1627. TypeAndValue lo_tav = ie->left->tav;
  1628. TypeAndValue hi_tav = ie->right->tav;
  1629. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  1630. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  1631. TokenKind op = ie->op.kind;
  1632. i64 lo = exact_value_to_i64(lo_tav.value);
  1633. i64 hi = exact_value_to_i64(hi_tav.value);
  1634. if (op != Token_RangeHalf) {
  1635. hi += 1;
  1636. }
  1637. GB_ASSERT(0 <= lo && lo <= max_count);
  1638. GB_ASSERT(0 <= hi && hi <= max_count);
  1639. GB_ASSERT(lo <= hi);
  1640. TypeAndValue tav = fv->value->tav;
  1641. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1642. for (i64 k = lo; k < hi; k++) {
  1643. i64 offset = matrix_row_major_index_to_offset(type, k);
  1644. GB_ASSERT(values[offset] == nullptr);
  1645. values[offset] = val;
  1646. }
  1647. } else {
  1648. TypeAndValue index_tav = fv->field->tav;
  1649. GB_ASSERT(index_tav.mode == Addressing_Constant);
  1650. i64 index = exact_value_to_i64(index_tav.value);
  1651. GB_ASSERT(index < max_count);
  1652. TypeAndValue tav = fv->value->tav;
  1653. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1654. i64 offset = matrix_row_major_index_to_offset(type, index);
  1655. GB_ASSERT(values[offset] == nullptr);
  1656. values[offset] = val;
  1657. }
  1658. }
  1659. for (i64 i = 0; i < total_count; i++) {
  1660. if (values[i] == nullptr) {
  1661. values[i] = LLVMConstNull(lb_type(m, elem_type));
  1662. }
  1663. }
  1664. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)total_count, values, cc);
  1665. return res;
  1666. } else {
  1667. GB_ASSERT_MSG(elem_count == max_count, "%td != %td", elem_count, max_count);
  1668. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)total_count);
  1669. for_array(i, cl->elems) {
  1670. TypeAndValue tav = cl->elems[i]->tav;
  1671. GB_ASSERT(tav.mode != Addressing_Invalid);
  1672. i64 offset = 0;
  1673. offset = matrix_row_major_index_to_offset(type, i);
  1674. values[offset] = lb_const_value(m, elem_type, tav.value, cc, tav.type).value;
  1675. }
  1676. for (isize i = 0; i < total_count; i++) {
  1677. if (values[i] == nullptr) {
  1678. values[i] = LLVMConstNull(lb_type(m, elem_type));
  1679. }
  1680. }
  1681. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)total_count, values, cc);
  1682. return res;
  1683. }
  1684. } else {
  1685. return lb_const_nil(m, original_type);
  1686. }
  1687. break;
  1688. case ExactValue_Procedure:
  1689. GB_PANIC("handled earlier");
  1690. break;
  1691. case ExactValue_Typeid:
  1692. return lb_typeid(m, value.value_typeid);
  1693. }
  1694. return lb_const_nil(m, original_type);
  1695. }