llvm_backend_const.cpp 42 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304
  1. gb_internal bool lb_is_const(lbValue value) {
  2. LLVMValueRef v = value.value;
  3. if (is_type_untyped_nil(value.type)) {
  4. // TODO(bill): Is this correct behaviour?
  5. return true;
  6. }
  7. if (LLVMIsConstant(v)) {
  8. return true;
  9. }
  10. return false;
  11. }
  12. gb_internal bool lb_is_const_or_global(lbValue value) {
  13. if (lb_is_const(value)) {
  14. return true;
  15. }
  16. // TODO remove use of LLVMGetElementType
  17. #if 0
  18. if (LLVMGetValueKind(value.value) == LLVMGlobalVariableValueKind) {
  19. LLVMTypeRef t = LLVMGetElementType(LLVMTypeOf(value.value));
  20. if (!lb_is_type_kind(t, LLVMPointerTypeKind)) {
  21. return false;
  22. }
  23. LLVMTypeRef elem = LLVMGetElementType(t);
  24. return lb_is_type_kind(elem, LLVMFunctionTypeKind);
  25. }
  26. #endif
  27. return false;
  28. }
  29. gb_internal bool lb_is_elem_const(Ast *elem, Type *elem_type) {
  30. if (!elem_type_can_be_constant(elem_type)) {
  31. return false;
  32. }
  33. if (elem->kind == Ast_FieldValue) {
  34. elem = elem->FieldValue.value;
  35. }
  36. TypeAndValue tav = type_and_value_of_expr(elem);
  37. GB_ASSERT_MSG(tav.mode != Addressing_Invalid, "%s %s", expr_to_string(elem), type_to_string(tav.type));
  38. return tav.value.kind != ExactValue_Invalid;
  39. }
  40. gb_internal bool lb_is_const_nil(lbValue value) {
  41. LLVMValueRef v = value.value;
  42. if (LLVMIsConstant(v)) {
  43. if (LLVMIsAConstantAggregateZero(v)) {
  44. return true;
  45. } else if (LLVMIsAConstantPointerNull(v)) {
  46. return true;
  47. }
  48. }
  49. return false;
  50. }
  51. gb_internal bool lb_is_expr_constant_zero(Ast *expr) {
  52. GB_ASSERT(expr != nullptr);
  53. auto v = exact_value_to_integer(expr->tav.value);
  54. if (v.kind == ExactValue_Integer) {
  55. return big_int_cmp_zero(&v.value_integer) == 0;
  56. }
  57. return false;
  58. }
  59. gb_internal String lb_get_const_string(lbModule *m, lbValue value) {
  60. GB_ASSERT(lb_is_const(value));
  61. GB_ASSERT(LLVMIsConstant(value.value));
  62. Type *t = base_type(value.type);
  63. GB_ASSERT(are_types_identical(t, t_string));
  64. unsigned ptr_indices[1] = {0};
  65. unsigned len_indices[1] = {1};
  66. LLVMValueRef underlying_ptr = LLVMConstExtractValue(value.value, ptr_indices, gb_count_of(ptr_indices));
  67. LLVMValueRef underlying_len = LLVMConstExtractValue(value.value, len_indices, gb_count_of(len_indices));
  68. GB_ASSERT(LLVMGetConstOpcode(underlying_ptr) == LLVMGetElementPtr);
  69. underlying_ptr = LLVMGetOperand(underlying_ptr, 0);
  70. GB_ASSERT(LLVMIsAGlobalVariable(underlying_ptr));
  71. underlying_ptr = LLVMGetInitializer(underlying_ptr);
  72. size_t length = 0;
  73. char const *text = LLVMGetAsString(underlying_ptr, &length);
  74. isize real_length = cast(isize)LLVMConstIntGetSExtValue(underlying_len);
  75. return make_string(cast(u8 const *)text, real_length);
  76. }
  77. gb_internal LLVMValueRef llvm_const_cast(LLVMValueRef val, LLVMTypeRef dst) {
  78. LLVMTypeRef src = LLVMTypeOf(val);
  79. if (src == dst) {
  80. return val;
  81. }
  82. if (LLVMIsNull(val)) {
  83. return LLVMConstNull(dst);
  84. }
  85. GB_ASSERT_MSG(lb_sizeof(dst) == lb_sizeof(src), "%s vs %s", LLVMPrintTypeToString(dst), LLVMPrintTypeToString(src));
  86. LLVMTypeKind kind = LLVMGetTypeKind(dst);
  87. switch (kind) {
  88. case LLVMPointerTypeKind:
  89. return LLVMConstPointerCast(val, dst);
  90. case LLVMStructTypeKind:
  91. // GB_PANIC("%s -> %s", LLVMPrintValueToString(val), LLVMPrintTypeToString(dst));
  92. // NOTE(bill): It's not possible to do a bit cast on a struct, why was this code even here in the first place?
  93. // It seems mostly to exist to get around the "anonymous -> named" struct assignments
  94. // return LLVMConstBitCast(val, dst);
  95. return val;
  96. default:
  97. GB_PANIC("Unhandled const cast %s to %s", LLVMPrintTypeToString(src), LLVMPrintTypeToString(dst));
  98. }
  99. return val;
  100. }
  101. gb_internal lbValue lb_const_ptr_cast(lbModule *m, lbValue value, Type *t) {
  102. GB_ASSERT(is_type_internally_pointer_like(value.type));
  103. GB_ASSERT(is_type_internally_pointer_like(t));
  104. GB_ASSERT(lb_is_const(value));
  105. lbValue res = {};
  106. res.value = LLVMConstPointerCast(value.value, lb_type(m, t));
  107. res.type = t;
  108. return res;
  109. }
  110. gb_internal LLVMValueRef llvm_const_string_internal(lbModule *m, Type *t, LLVMValueRef data, LLVMValueRef len) {
  111. if (build_context.metrics.ptr_size < build_context.metrics.int_size) {
  112. LLVMValueRef values[3] = {
  113. data,
  114. LLVMConstNull(lb_type(m, t_i32)),
  115. len,
  116. };
  117. return llvm_const_named_struct_internal(lb_type(m, t), values, 3);
  118. } else {
  119. LLVMValueRef values[2] = {
  120. data,
  121. len,
  122. };
  123. return llvm_const_named_struct_internal(lb_type(m, t), values, 2);
  124. }
  125. }
  126. gb_internal LLVMValueRef llvm_const_named_struct(lbModule *m, Type *t, LLVMValueRef *values, isize value_count_) {
  127. LLVMTypeRef struct_type = lb_type(m, t);
  128. GB_ASSERT(LLVMGetTypeKind(struct_type) == LLVMStructTypeKind);
  129. unsigned value_count = cast(unsigned)value_count_;
  130. unsigned elem_count = LLVMCountStructElementTypes(struct_type);
  131. if (elem_count == value_count) {
  132. return llvm_const_named_struct_internal(struct_type, values, value_count_);
  133. }
  134. Type *bt = base_type(t);
  135. GB_ASSERT(bt->kind == Type_Struct);
  136. GB_ASSERT(value_count_ == bt->Struct.fields.count);
  137. auto field_remapping = lb_get_struct_remapping(m, t);
  138. unsigned values_with_padding_count = LLVMCountStructElementTypes(struct_type);
  139. LLVMValueRef *values_with_padding = gb_alloc_array(permanent_allocator(), LLVMValueRef, values_with_padding_count);
  140. for (unsigned i = 0; i < value_count; i++) {
  141. values_with_padding[field_remapping[i]] = values[i];
  142. }
  143. for (unsigned i = 0; i < values_with_padding_count; i++) {
  144. if (values_with_padding[i] == nullptr) {
  145. values_with_padding[i] = LLVMConstNull(LLVMStructGetTypeAtIndex(struct_type, i));
  146. }
  147. }
  148. return llvm_const_named_struct_internal(struct_type, values_with_padding, values_with_padding_count);
  149. }
  150. gb_internal LLVMValueRef llvm_const_named_struct_internal(LLVMTypeRef t, LLVMValueRef *values, isize value_count_) {
  151. unsigned value_count = cast(unsigned)value_count_;
  152. unsigned elem_count = LLVMCountStructElementTypes(t);
  153. GB_ASSERT_MSG(value_count == elem_count, "%s %u %u", LLVMPrintTypeToString(t), value_count, elem_count);
  154. for (unsigned i = 0; i < elem_count; i++) {
  155. LLVMTypeRef elem_type = LLVMStructGetTypeAtIndex(t, i);
  156. values[i] = llvm_const_cast(values[i], elem_type);
  157. }
  158. return LLVMConstNamedStruct(t, values, value_count);
  159. }
  160. gb_internal LLVMValueRef llvm_const_array(LLVMTypeRef elem_type, LLVMValueRef *values, isize value_count_) {
  161. unsigned value_count = cast(unsigned)value_count_;
  162. for (unsigned i = 0; i < value_count; i++) {
  163. values[i] = llvm_const_cast(values[i], elem_type);
  164. }
  165. return LLVMConstArray(elem_type, values, value_count);
  166. }
  167. gb_internal LLVMValueRef llvm_const_slice_internal(lbModule *m, LLVMValueRef data, LLVMValueRef len) {
  168. if (build_context.metrics.ptr_size < build_context.metrics.int_size) {
  169. GB_ASSERT(build_context.metrics.ptr_size == 4);
  170. GB_ASSERT(build_context.metrics.int_size == 8);
  171. LLVMValueRef vals[3] = {
  172. data,
  173. LLVMConstNull(lb_type(m, t_u32)),
  174. len,
  175. };
  176. return LLVMConstStructInContext(m->ctx, vals, gb_count_of(vals), false);
  177. } else {
  178. LLVMValueRef vals[2] = {
  179. data,
  180. len,
  181. };
  182. return LLVMConstStructInContext(m->ctx, vals, gb_count_of(vals), false);
  183. }
  184. }
  185. gb_internal LLVMValueRef llvm_const_slice(lbModule *m, lbValue data, lbValue len) {
  186. GB_ASSERT(is_type_pointer(data.type) || is_type_multi_pointer(data.type));
  187. GB_ASSERT(are_types_identical(len.type, t_int));
  188. return llvm_const_slice_internal(m, data.value, len.value);
  189. }
  190. gb_internal lbValue lb_const_nil(lbModule *m, Type *type) {
  191. LLVMValueRef v = LLVMConstNull(lb_type(m, type));
  192. return lbValue{v, type};
  193. }
  194. gb_internal lbValue lb_const_undef(lbModule *m, Type *type) {
  195. LLVMValueRef v = LLVMGetUndef(lb_type(m, type));
  196. return lbValue{v, type};
  197. }
  198. gb_internal lbValue lb_const_int(lbModule *m, Type *type, u64 value) {
  199. lbValue res = {};
  200. res.value = LLVMConstInt(lb_type(m, type), cast(unsigned long long)value, !is_type_unsigned(type));
  201. res.type = type;
  202. return res;
  203. }
  204. gb_internal lbValue lb_const_string(lbModule *m, String const &value) {
  205. return lb_const_value(m, t_string, exact_value_string(value));
  206. }
  207. gb_internal lbValue lb_const_bool(lbModule *m, Type *type, bool value) {
  208. lbValue res = {};
  209. res.value = LLVMConstInt(lb_type(m, type), value, false);
  210. res.type = type;
  211. return res;
  212. }
  213. gb_internal LLVMValueRef lb_const_f16(lbModule *m, f32 f, Type *type=t_f16) {
  214. GB_ASSERT(type_size_of(type) == 2);
  215. u16 u = f32_to_f16(f);
  216. if (is_type_different_to_arch_endianness(type)) {
  217. u = gb_endian_swap16(u);
  218. }
  219. LLVMValueRef i = LLVMConstInt(LLVMInt16TypeInContext(m->ctx), u, false);
  220. return LLVMConstBitCast(i, lb_type(m, type));
  221. }
  222. gb_internal LLVMValueRef lb_const_f32(lbModule *m, f32 f, Type *type=t_f32) {
  223. GB_ASSERT(type_size_of(type) == 4);
  224. u32 u = bit_cast<u32>(f);
  225. if (is_type_different_to_arch_endianness(type)) {
  226. u = gb_endian_swap32(u);
  227. }
  228. LLVMValueRef i = LLVMConstInt(LLVMInt32TypeInContext(m->ctx), u, false);
  229. return LLVMConstBitCast(i, lb_type(m, type));
  230. }
  231. gb_internal bool lb_is_expr_untyped_const(Ast *expr) {
  232. auto const &tv = type_and_value_of_expr(expr);
  233. if (is_type_untyped(tv.type)) {
  234. return tv.value.kind != ExactValue_Invalid;
  235. }
  236. return false;
  237. }
  238. gb_internal lbValue lb_expr_untyped_const_to_typed(lbModule *m, Ast *expr, Type *t) {
  239. GB_ASSERT(is_type_typed(t));
  240. auto const &tv = type_and_value_of_expr(expr);
  241. return lb_const_value(m, t, tv.value);
  242. }
  243. gb_internal lbValue lb_emit_source_code_location_const(lbProcedure *p, String const &procedure, TokenPos const &pos) {
  244. lbModule *m = p->module;
  245. LLVMValueRef fields[4] = {};
  246. fields[0]/*file*/ = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id)).value;
  247. fields[1]/*line*/ = lb_const_int(m, t_i32, pos.line).value;
  248. fields[2]/*column*/ = lb_const_int(m, t_i32, pos.column).value;
  249. fields[3]/*procedure*/ = lb_find_or_add_entity_string(p->module, procedure).value;
  250. lbValue res = {};
  251. res.value = llvm_const_named_struct(m, t_source_code_location, fields, gb_count_of(fields));
  252. res.type = t_source_code_location;
  253. return res;
  254. }
  255. gb_internal lbValue lb_emit_source_code_location_const(lbProcedure *p, Ast *node) {
  256. String proc_name = {};
  257. if (p->entity) {
  258. proc_name = p->entity->token.string;
  259. }
  260. TokenPos pos = {};
  261. if (node) {
  262. pos = ast_token(node).pos;
  263. }
  264. return lb_emit_source_code_location_const(p, proc_name, pos);
  265. }
  266. gb_internal lbValue lb_emit_source_code_location_as_global_ptr(lbProcedure *p, String const &procedure, TokenPos const &pos) {
  267. lbValue loc = lb_emit_source_code_location_const(p, procedure, pos);
  268. lbAddr addr = lb_add_global_generated(p->module, loc.type, loc, nullptr);
  269. lb_make_global_private_const(addr);
  270. return addr.addr;
  271. }
  272. gb_internal lbValue lb_emit_source_code_location_as_global_ptr(lbProcedure *p, Ast *node) {
  273. lbValue loc = lb_emit_source_code_location_const(p, node);
  274. lbAddr addr = lb_add_global_generated(p->module, loc.type, loc, nullptr);
  275. lb_make_global_private_const(addr);
  276. return addr.addr;
  277. }
  278. gb_internal lbValue lb_emit_source_code_location_as_global(lbProcedure *p, String const &procedure, TokenPos const &pos) {
  279. return lb_emit_load(p, lb_emit_source_code_location_as_global_ptr(p, procedure, pos));
  280. }
  281. gb_internal lbValue lb_emit_source_code_location_as_global(lbProcedure *p, Ast *node) {
  282. return lb_emit_load(p, lb_emit_source_code_location_as_global_ptr(p, node));
  283. }
  284. gb_internal LLVMValueRef lb_build_constant_array_values(lbModule *m, Type *type, Type *elem_type, isize count, LLVMValueRef *values, bool allow_local) {
  285. bool is_local = allow_local && m->curr_procedure != nullptr;
  286. bool is_const = true;
  287. if (is_local) {
  288. for (isize i = 0; i < count; i++) {
  289. GB_ASSERT(values[i] != nullptr);
  290. if (!LLVMIsConstant(values[i])) {
  291. is_const = false;
  292. break;
  293. }
  294. }
  295. }
  296. if (!is_const) {
  297. LLVMTypeRef llvm_elem_type = lb_type(m, elem_type);
  298. lbProcedure *p = m->curr_procedure;
  299. GB_ASSERT(p != nullptr);
  300. lbAddr v = lb_add_local_generated(p, type, false);
  301. lbValue ptr = lb_addr_get_ptr(p, v);
  302. for (isize i = 0; i < count; i++) {
  303. lbValue elem = lb_emit_array_epi(p, ptr, i);
  304. if (is_type_proc(elem_type)) {
  305. values[i] = LLVMConstPointerCast(values[i], llvm_elem_type);
  306. }
  307. LLVMBuildStore(p->builder, values[i], elem.value);
  308. }
  309. return lb_addr_load(p, v).value;
  310. }
  311. return llvm_const_array(lb_type(m, elem_type), values, cast(unsigned int)count);
  312. }
  313. gb_internal LLVMValueRef lb_big_int_to_llvm(lbModule *m, Type *original_type, BigInt const *a) {
  314. if (big_int_is_zero(a)) {
  315. return LLVMConstNull(lb_type(m, original_type));
  316. }
  317. size_t sz = cast(size_t)type_size_of(original_type);
  318. u64 rop64[4] = {}; // 2 u64 is the maximum we will ever need, so doubling it will be fine :P
  319. u8 *rop = cast(u8 *)rop64;
  320. size_t max_count = 0;
  321. size_t written = 0;
  322. size_t size = 1;
  323. size_t nails = 0;
  324. mp_endian endian = MP_LITTLE_ENDIAN;
  325. max_count = mp_pack_count(a, nails, size);
  326. if (sz < max_count) {
  327. debug_print_big_int(a);
  328. gb_printf_err("%s -> %tu\n", type_to_string(original_type), sz);;
  329. }
  330. GB_ASSERT_MSG(sz >= max_count, "max_count: %tu, sz: %tu, written: %tu, type %s", max_count, sz, written, type_to_string(original_type));
  331. GB_ASSERT(gb_size_of(rop64) >= sz);
  332. mp_err err = mp_pack(rop, sz, &written,
  333. MP_LSB_FIRST,
  334. size, endian, nails,
  335. a);
  336. GB_ASSERT(err == MP_OKAY);
  337. if (!is_type_endian_little(original_type)) {
  338. for (size_t i = 0; i < sz/2; i++) {
  339. u8 tmp = rop[i];
  340. rop[i] = rop[sz-1-i];
  341. rop[sz-1-i] = tmp;
  342. }
  343. }
  344. LLVMValueRef value = LLVMConstIntOfArbitraryPrecision(lb_type(m, original_type), cast(unsigned)((sz+7)/8), cast(u64 *)rop);
  345. if (big_int_is_neg(a)) {
  346. value = LLVMConstNeg(value);
  347. }
  348. return value;
  349. }
  350. gb_internal bool lb_is_nested_possibly_constant(Type *ft, Selection const &sel, Ast *elem) {
  351. GB_ASSERT(!sel.indirect);
  352. for (i32 index : sel.index) {
  353. Type *bt = base_type(ft);
  354. switch (bt->kind) {
  355. case Type_Struct:
  356. if (bt->Struct.is_raw_union) {
  357. return false;
  358. }
  359. ft = bt->Struct.fields[index]->type;
  360. break;
  361. case Type_Array:
  362. ft = bt->Array.elem;
  363. break;
  364. default:
  365. return false;
  366. }
  367. }
  368. if (is_type_raw_union(ft) || is_type_typeid(ft)) {
  369. return false;
  370. }
  371. return lb_is_elem_const(elem, ft);
  372. }
  373. gb_internal lbValue lb_const_value(lbModule *m, Type *type, ExactValue value, bool allow_local) {
  374. LLVMContextRef ctx = m->ctx;
  375. type = default_type(type);
  376. Type *original_type = type;
  377. lbValue res = {};
  378. res.type = original_type;
  379. type = core_type(type);
  380. value = convert_exact_value_for_type(value, type);
  381. if (value.kind == ExactValue_Typeid) {
  382. return lb_typeid(m, value.value_typeid);
  383. }
  384. if (value.kind == ExactValue_Invalid) {
  385. return lb_const_nil(m, type);
  386. }
  387. if (value.kind == ExactValue_Procedure) {
  388. lbValue res = {};
  389. Ast *expr = unparen_expr(value.value_procedure);
  390. GB_ASSERT(expr != nullptr);
  391. if (expr->kind == Ast_ProcLit) {
  392. res = lb_generate_anonymous_proc_lit(m, str_lit("_proclit"), expr);
  393. } else {
  394. Entity *e = entity_from_expr(expr);
  395. res = lb_find_procedure_value_from_entity(m, e);
  396. }
  397. GB_ASSERT(res.value != nullptr);
  398. GB_ASSERT(LLVMGetValueKind(res.value) == LLVMFunctionValueKind);
  399. if (LLVMGetIntrinsicID(res.value) == 0) {
  400. // NOTE(bill): do not cast intrinsics as they are not really procedures that can be casted
  401. res.value = LLVMConstPointerCast(res.value, lb_type(m, res.type));
  402. }
  403. return res;
  404. }
  405. bool is_local = allow_local && m->curr_procedure != nullptr;
  406. // GB_ASSERT_MSG(is_type_typed(type), "%s", type_to_string(type));
  407. if (is_type_slice(type)) {
  408. if (value.kind == ExactValue_String) {
  409. GB_ASSERT(is_type_slice(type));
  410. res.value = lb_find_or_add_entity_string_byte_slice_with_type(m, value.value_string, original_type).value;
  411. return res;
  412. } else {
  413. ast_node(cl, CompoundLit, value.value_compound);
  414. isize count = cl->elems.count;
  415. if (count == 0) {
  416. return lb_const_nil(m, type);
  417. }
  418. count = gb_max(cast(isize)cl->max_count, count);
  419. Type *elem = base_type(type)->Slice.elem;
  420. Type *t = alloc_type_array(elem, count);
  421. lbValue backing_array = lb_const_value(m, t, value, allow_local);
  422. LLVMValueRef array_data = nullptr;
  423. if (is_local) {
  424. // NOTE(bill, 2020-06-08): This is a bit of a hack but a "constant" slice needs
  425. // its backing data on the stack
  426. lbProcedure *p = m->curr_procedure;
  427. LLVMTypeRef llvm_type = lb_type(m, t);
  428. array_data = llvm_alloca(p, llvm_type, 16);
  429. LLVMBuildStore(p->builder, backing_array.value, array_data);
  430. {
  431. LLVMValueRef indices[2] = {llvm_zero(m), llvm_zero(m)};
  432. LLVMValueRef ptr = LLVMBuildInBoundsGEP2(p->builder, llvm_type, array_data, indices, 2, "");
  433. LLVMValueRef len = LLVMConstInt(lb_type(m, t_int), count, true);
  434. lbAddr slice = lb_add_local_generated(p, type, false);
  435. map_set(&m->exact_value_compound_literal_addr_map, value.value_compound, slice);
  436. lb_fill_slice(p, slice, {ptr, alloc_type_pointer(elem)}, {len, t_int});
  437. return lb_addr_load(p, slice);
  438. }
  439. } else {
  440. isize max_len = 7+8+1;
  441. char *str = gb_alloc_array(permanent_allocator(), char, max_len);
  442. u32 id = m->gen->global_array_index.fetch_add(1);
  443. isize len = gb_snprintf(str, max_len, "csba$%x", id);
  444. String name = make_string(cast(u8 *)str, len-1);
  445. Entity *e = alloc_entity_constant(nullptr, make_token_ident(name), t, value);
  446. array_data = LLVMAddGlobal(m->mod, lb_type(m, t), str);
  447. LLVMSetInitializer(array_data, backing_array.value);
  448. lbValue g = {};
  449. g.value = array_data;
  450. g.type = t;
  451. lb_add_entity(m, e, g);
  452. lb_add_member(m, name, g);
  453. {
  454. LLVMValueRef indices[2] = {llvm_zero(m), llvm_zero(m)};
  455. LLVMValueRef ptr = LLVMConstInBoundsGEP2(lb_type(m, t), array_data, indices, 2);
  456. LLVMValueRef len = LLVMConstInt(lb_type(m, t_int), count, true);
  457. LLVMValueRef values[2] = {ptr, len};
  458. res.value = llvm_const_named_struct(m, original_type, values, 2);
  459. return res;
  460. }
  461. }
  462. }
  463. } else if (is_type_array(type) && value.kind == ExactValue_String && !is_type_u8(core_array_type(type))) {
  464. if (is_type_rune_array(type) && value.kind == ExactValue_String) {
  465. i64 count = type->Array.count;
  466. Type *elem = type->Array.elem;
  467. LLVMTypeRef et = lb_type(m, elem);
  468. Rune rune;
  469. isize offset = 0;
  470. isize width = 1;
  471. String s = value.value_string;
  472. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, cast(isize)count);
  473. for (i64 i = 0; i < count && offset < s.len; i++) {
  474. width = utf8_decode(s.text+offset, s.len-offset, &rune);
  475. offset += width;
  476. elems[i] = LLVMConstInt(et, rune, true);
  477. }
  478. GB_ASSERT(offset == s.len);
  479. res.value = llvm_const_array(et, elems, cast(unsigned)count);
  480. return res;
  481. }
  482. // NOTE(bill, 2021-10-07): Allow for array programming value constants
  483. Type *core_elem = core_array_type(type);
  484. return lb_const_value(m, core_elem, value, allow_local);
  485. } else if (is_type_u8_array(type) && value.kind == ExactValue_String) {
  486. GB_ASSERT(type->Array.count == value.value_string.len);
  487. LLVMValueRef data = LLVMConstStringInContext(ctx,
  488. cast(char const *)value.value_string.text,
  489. cast(unsigned)value.value_string.len,
  490. true /*DontNullTerminate*/);
  491. res.value = data;
  492. return res;
  493. } else if (is_type_array(type) &&
  494. value.kind != ExactValue_Invalid &&
  495. value.kind != ExactValue_String &&
  496. value.kind != ExactValue_Compound) {
  497. i64 count = type->Array.count;
  498. Type *elem = type->Array.elem;
  499. lbValue single_elem = lb_const_value(m, elem, value, allow_local);
  500. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, cast(isize)count);
  501. for (i64 i = 0; i < count; i++) {
  502. elems[i] = single_elem.value;
  503. }
  504. res.value = llvm_const_array(lb_type(m, elem), elems, cast(unsigned)count);
  505. return res;
  506. } else if (is_type_matrix(type) &&
  507. value.kind != ExactValue_Invalid &&
  508. value.kind != ExactValue_Compound) {
  509. i64 row = type->Matrix.row_count;
  510. i64 column = type->Matrix.column_count;
  511. GB_ASSERT(row == column);
  512. Type *elem = type->Matrix.elem;
  513. lbValue single_elem = lb_const_value(m, elem, value, allow_local);
  514. single_elem.value = llvm_const_cast(single_elem.value, lb_type(m, elem));
  515. i64 total_elem_count = matrix_type_total_internal_elems(type);
  516. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, cast(isize)total_elem_count);
  517. for (i64 i = 0; i < row; i++) {
  518. elems[matrix_indices_to_offset(type, i, i)] = single_elem.value;
  519. }
  520. for (i64 i = 0; i < total_elem_count; i++) {
  521. if (elems[i] == nullptr) {
  522. elems[i] = LLVMConstNull(lb_type(m, elem));
  523. }
  524. }
  525. res.value = LLVMConstArray(lb_type(m, elem), elems, cast(unsigned)total_elem_count);
  526. return res;
  527. } else if (is_type_simd_vector(type) &&
  528. value.kind != ExactValue_Invalid &&
  529. value.kind != ExactValue_Compound) {
  530. i64 count = type->SimdVector.count;
  531. Type *elem = type->SimdVector.elem;
  532. lbValue single_elem = lb_const_value(m, elem, value, allow_local);
  533. single_elem.value = llvm_const_cast(single_elem.value, lb_type(m, elem));
  534. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, count);
  535. for (i64 i = 0; i < count; i++) {
  536. elems[i] = single_elem.value;
  537. }
  538. res.value = LLVMConstVector(elems, cast(unsigned)count);
  539. return res;
  540. }
  541. switch (value.kind) {
  542. case ExactValue_Invalid:
  543. res.value = LLVMConstNull(lb_type(m, original_type));
  544. return res;
  545. case ExactValue_Bool:
  546. res.value = LLVMConstInt(lb_type(m, original_type), value.value_bool, false);
  547. return res;
  548. case ExactValue_String:
  549. {
  550. LLVMValueRef ptr = lb_find_or_add_entity_string_ptr(m, value.value_string);
  551. lbValue res = {};
  552. res.type = default_type(original_type);
  553. if (is_type_cstring(res.type)) {
  554. res.value = ptr;
  555. } else {
  556. if (value.value_string.len == 0) {
  557. ptr = LLVMConstNull(lb_type(m, t_u8_ptr));
  558. }
  559. LLVMValueRef str_len = LLVMConstInt(lb_type(m, t_int), value.value_string.len, true);
  560. GB_ASSERT(is_type_string(original_type));
  561. res.value = llvm_const_string_internal(m, original_type, ptr, str_len);
  562. }
  563. return res;
  564. }
  565. case ExactValue_Integer:
  566. if (is_type_pointer(type) || is_type_multi_pointer(type)) {
  567. LLVMTypeRef t = lb_type(m, original_type);
  568. LLVMValueRef i = lb_big_int_to_llvm(m, t_uintptr, &value.value_integer);
  569. res.value = LLVMConstIntToPtr(i, t);
  570. } else {
  571. res.value = lb_big_int_to_llvm(m, original_type, &value.value_integer);
  572. }
  573. return res;
  574. case ExactValue_Float:
  575. if (is_type_different_to_arch_endianness(type)) {
  576. u64 u = bit_cast<u64>(value.value_float);
  577. u = gb_endian_swap64(u);
  578. res.value = LLVMConstReal(lb_type(m, original_type), bit_cast<f64>(u));
  579. } else {
  580. res.value = LLVMConstReal(lb_type(m, original_type), value.value_float);
  581. }
  582. return res;
  583. case ExactValue_Complex:
  584. {
  585. LLVMValueRef values[2] = {};
  586. switch (8*type_size_of(type)) {
  587. case 32:
  588. values[0] = lb_const_f16(m, cast(f32)value.value_complex->real);
  589. values[1] = lb_const_f16(m, cast(f32)value.value_complex->imag);
  590. break;
  591. case 64:
  592. values[0] = lb_const_f32(m, cast(f32)value.value_complex->real);
  593. values[1] = lb_const_f32(m, cast(f32)value.value_complex->imag);
  594. break;
  595. case 128:
  596. values[0] = LLVMConstReal(lb_type(m, t_f64), value.value_complex->real);
  597. values[1] = LLVMConstReal(lb_type(m, t_f64), value.value_complex->imag);
  598. break;
  599. }
  600. res.value = llvm_const_named_struct(m, original_type, values, 2);
  601. return res;
  602. }
  603. break;
  604. case ExactValue_Quaternion:
  605. {
  606. LLVMValueRef values[4] = {};
  607. switch (8*type_size_of(type)) {
  608. case 64:
  609. // @QuaternionLayout
  610. values[3] = lb_const_f16(m, cast(f32)value.value_quaternion->real);
  611. values[0] = lb_const_f16(m, cast(f32)value.value_quaternion->imag);
  612. values[1] = lb_const_f16(m, cast(f32)value.value_quaternion->jmag);
  613. values[2] = lb_const_f16(m, cast(f32)value.value_quaternion->kmag);
  614. break;
  615. case 128:
  616. // @QuaternionLayout
  617. values[3] = lb_const_f32(m, cast(f32)value.value_quaternion->real);
  618. values[0] = lb_const_f32(m, cast(f32)value.value_quaternion->imag);
  619. values[1] = lb_const_f32(m, cast(f32)value.value_quaternion->jmag);
  620. values[2] = lb_const_f32(m, cast(f32)value.value_quaternion->kmag);
  621. break;
  622. case 256:
  623. // @QuaternionLayout
  624. values[3] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->real);
  625. values[0] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->imag);
  626. values[1] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->jmag);
  627. values[2] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->kmag);
  628. break;
  629. }
  630. res.value = llvm_const_named_struct(m, original_type, values, 4);
  631. return res;
  632. }
  633. break;
  634. case ExactValue_Pointer:
  635. res.value = LLVMConstIntToPtr(LLVMConstInt(lb_type(m, t_uintptr), value.value_pointer, false), lb_type(m, original_type));
  636. return res;
  637. case ExactValue_Compound:
  638. if (is_type_slice(type)) {
  639. return lb_const_value(m, type, value, allow_local);
  640. } else if (is_type_array(type)) {
  641. ast_node(cl, CompoundLit, value.value_compound);
  642. Type *elem_type = type->Array.elem;
  643. isize elem_count = cl->elems.count;
  644. if (elem_count == 0 || !elem_type_can_be_constant(elem_type)) {
  645. return lb_const_nil(m, original_type);
  646. }
  647. if (cl->elems[0]->kind == Ast_FieldValue) {
  648. // TODO(bill): This is O(N*M) and will be quite slow; it should probably be sorted before hand
  649. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->Array.count);
  650. isize value_index = 0;
  651. for (i64 i = 0; i < type->Array.count; i++) {
  652. bool found = false;
  653. for (isize j = 0; j < elem_count; j++) {
  654. Ast *elem = cl->elems[j];
  655. ast_node(fv, FieldValue, elem);
  656. if (is_ast_range(fv->field)) {
  657. ast_node(ie, BinaryExpr, fv->field);
  658. TypeAndValue lo_tav = ie->left->tav;
  659. TypeAndValue hi_tav = ie->right->tav;
  660. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  661. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  662. TokenKind op = ie->op.kind;
  663. i64 lo = exact_value_to_i64(lo_tav.value);
  664. i64 hi = exact_value_to_i64(hi_tav.value);
  665. if (op != Token_RangeHalf) {
  666. hi += 1;
  667. }
  668. if (lo == i) {
  669. TypeAndValue tav = fv->value->tav;
  670. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  671. for (i64 k = lo; k < hi; k++) {
  672. values[value_index++] = val;
  673. }
  674. found = true;
  675. i += (hi-lo-1);
  676. break;
  677. }
  678. } else {
  679. TypeAndValue index_tav = fv->field->tav;
  680. GB_ASSERT(index_tav.mode == Addressing_Constant);
  681. i64 index = exact_value_to_i64(index_tav.value);
  682. if (index == i) {
  683. TypeAndValue tav = fv->value->tav;
  684. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  685. values[value_index++] = val;
  686. found = true;
  687. break;
  688. }
  689. }
  690. }
  691. if (!found) {
  692. values[value_index++] = LLVMConstNull(lb_type(m, elem_type));
  693. }
  694. }
  695. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->Array.count, values, allow_local);
  696. return res;
  697. } else {
  698. GB_ASSERT_MSG(elem_count == type->Array.count, "%td != %td", elem_count, type->Array.count);
  699. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->Array.count);
  700. for (isize i = 0; i < elem_count; i++) {
  701. TypeAndValue tav = cl->elems[i]->tav;
  702. GB_ASSERT(tav.mode != Addressing_Invalid);
  703. values[i] = lb_const_value(m, elem_type, tav.value, allow_local).value;
  704. }
  705. for (isize i = elem_count; i < type->Array.count; i++) {
  706. values[i] = LLVMConstNull(lb_type(m, elem_type));
  707. }
  708. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->Array.count, values, allow_local);
  709. return res;
  710. }
  711. } else if (is_type_enumerated_array(type)) {
  712. ast_node(cl, CompoundLit, value.value_compound);
  713. Type *elem_type = type->EnumeratedArray.elem;
  714. isize elem_count = cl->elems.count;
  715. if (elem_count == 0 || !elem_type_can_be_constant(elem_type)) {
  716. return lb_const_nil(m, original_type);
  717. }
  718. if (cl->elems[0]->kind == Ast_FieldValue) {
  719. // TODO(bill): This is O(N*M) and will be quite slow; it should probably be sorted before hand
  720. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->EnumeratedArray.count);
  721. isize value_index = 0;
  722. i64 total_lo = exact_value_to_i64(*type->EnumeratedArray.min_value);
  723. i64 total_hi = exact_value_to_i64(*type->EnumeratedArray.max_value);
  724. for (i64 i = total_lo; i <= total_hi; i++) {
  725. bool found = false;
  726. for (isize j = 0; j < elem_count; j++) {
  727. Ast *elem = cl->elems[j];
  728. ast_node(fv, FieldValue, elem);
  729. if (is_ast_range(fv->field)) {
  730. ast_node(ie, BinaryExpr, fv->field);
  731. TypeAndValue lo_tav = ie->left->tav;
  732. TypeAndValue hi_tav = ie->right->tav;
  733. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  734. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  735. TokenKind op = ie->op.kind;
  736. i64 lo = exact_value_to_i64(lo_tav.value);
  737. i64 hi = exact_value_to_i64(hi_tav.value);
  738. if (op != Token_RangeHalf) {
  739. hi += 1;
  740. }
  741. if (lo == i) {
  742. TypeAndValue tav = fv->value->tav;
  743. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  744. for (i64 k = lo; k < hi; k++) {
  745. values[value_index++] = val;
  746. }
  747. found = true;
  748. i += (hi-lo-1);
  749. break;
  750. }
  751. } else {
  752. TypeAndValue index_tav = fv->field->tav;
  753. GB_ASSERT(index_tav.mode == Addressing_Constant);
  754. i64 index = exact_value_to_i64(index_tav.value);
  755. if (index == i) {
  756. TypeAndValue tav = fv->value->tav;
  757. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  758. values[value_index++] = val;
  759. found = true;
  760. break;
  761. }
  762. }
  763. }
  764. if (!found) {
  765. values[value_index++] = LLVMConstNull(lb_type(m, elem_type));
  766. }
  767. }
  768. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->EnumeratedArray.count, values, allow_local);
  769. return res;
  770. } else {
  771. GB_ASSERT_MSG(elem_count == type->EnumeratedArray.count, "%td != %td", elem_count, type->EnumeratedArray.count);
  772. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->EnumeratedArray.count);
  773. for (isize i = 0; i < elem_count; i++) {
  774. TypeAndValue tav = cl->elems[i]->tav;
  775. GB_ASSERT(tav.mode != Addressing_Invalid);
  776. values[i] = lb_const_value(m, elem_type, tav.value, allow_local).value;
  777. }
  778. for (isize i = elem_count; i < type->EnumeratedArray.count; i++) {
  779. values[i] = LLVMConstNull(lb_type(m, elem_type));
  780. }
  781. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->EnumeratedArray.count, values, allow_local);
  782. return res;
  783. }
  784. } else if (is_type_simd_vector(type)) {
  785. ast_node(cl, CompoundLit, value.value_compound);
  786. Type *elem_type = type->SimdVector.elem;
  787. isize elem_count = cl->elems.count;
  788. if (elem_count == 0) {
  789. return lb_const_nil(m, original_type);
  790. }
  791. GB_ASSERT(elem_type_can_be_constant(elem_type));
  792. isize total_elem_count = cast(isize)type->SimdVector.count;
  793. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, total_elem_count);
  794. if (cl->elems[0]->kind == Ast_FieldValue) {
  795. // TODO(bill): This is O(N*M) and will be quite slow; it should probably be sorted before hand
  796. isize value_index = 0;
  797. for (i64 i = 0; i < total_elem_count; i++) {
  798. bool found = false;
  799. for (isize j = 0; j < elem_count; j++) {
  800. Ast *elem = cl->elems[j];
  801. ast_node(fv, FieldValue, elem);
  802. if (is_ast_range(fv->field)) {
  803. ast_node(ie, BinaryExpr, fv->field);
  804. TypeAndValue lo_tav = ie->left->tav;
  805. TypeAndValue hi_tav = ie->right->tav;
  806. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  807. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  808. TokenKind op = ie->op.kind;
  809. i64 lo = exact_value_to_i64(lo_tav.value);
  810. i64 hi = exact_value_to_i64(hi_tav.value);
  811. if (op != Token_RangeHalf) {
  812. hi += 1;
  813. }
  814. if (lo == i) {
  815. TypeAndValue tav = fv->value->tav;
  816. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  817. for (i64 k = lo; k < hi; k++) {
  818. values[value_index++] = val;
  819. }
  820. found = true;
  821. i += (hi-lo-1);
  822. break;
  823. }
  824. } else {
  825. TypeAndValue index_tav = fv->field->tav;
  826. GB_ASSERT(index_tav.mode == Addressing_Constant);
  827. i64 index = exact_value_to_i64(index_tav.value);
  828. if (index == i) {
  829. TypeAndValue tav = fv->value->tav;
  830. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  831. values[value_index++] = val;
  832. found = true;
  833. break;
  834. }
  835. }
  836. }
  837. if (!found) {
  838. values[value_index++] = LLVMConstNull(lb_type(m, elem_type));
  839. }
  840. }
  841. res.value = LLVMConstVector(values, cast(unsigned)total_elem_count);
  842. return res;
  843. } else {
  844. for (isize i = 0; i < elem_count; i++) {
  845. TypeAndValue tav = cl->elems[i]->tav;
  846. GB_ASSERT(tav.mode != Addressing_Invalid);
  847. values[i] = lb_const_value(m, elem_type, tav.value, allow_local).value;
  848. }
  849. LLVMTypeRef et = lb_type(m, elem_type);
  850. for (isize i = elem_count; i < total_elem_count; i++) {
  851. values[i] = LLVMConstNull(et);
  852. }
  853. for (isize i = 0; i < total_elem_count; i++) {
  854. values[i] = llvm_const_cast(values[i], et);
  855. }
  856. res.value = LLVMConstVector(values, cast(unsigned)total_elem_count);
  857. return res;
  858. }
  859. } else if (is_type_struct(type)) {
  860. ast_node(cl, CompoundLit, value.value_compound);
  861. if (cl->elems.count == 0) {
  862. return lb_const_nil(m, original_type);
  863. }
  864. if (is_type_raw_union(type)) {
  865. return lb_const_nil(m, original_type);
  866. }
  867. LLVMTypeRef struct_type = lb_type(m, original_type);
  868. auto field_remapping = lb_get_struct_remapping(m, type);
  869. unsigned value_count = LLVMCountStructElementTypes(struct_type);
  870. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, value_count);
  871. bool *visited = gb_alloc_array(temporary_allocator(), bool, value_count);
  872. if (cl->elems.count > 0) {
  873. if (cl->elems[0]->kind == Ast_FieldValue) {
  874. isize elem_count = cl->elems.count;
  875. for (isize i = 0; i < elem_count; i++) {
  876. ast_node(fv, FieldValue, cl->elems[i]);
  877. String name = fv->field->Ident.token.string;
  878. TypeAndValue tav = fv->value->tav;
  879. GB_ASSERT(tav.mode != Addressing_Invalid);
  880. Selection sel = lookup_field(type, name, false);
  881. GB_ASSERT(!sel.indirect);
  882. Entity *f = type->Struct.fields[sel.index[0]];
  883. i32 index = field_remapping[f->Variable.field_index];
  884. if (elem_type_can_be_constant(f->type)) {
  885. if (sel.index.count == 1) {
  886. values[index] = lb_const_value(m, f->type, tav.value, allow_local).value;
  887. visited[index] = true;
  888. } else {
  889. if (!visited[index]) {
  890. values[index] = lb_const_value(m, f->type, {}, false).value;
  891. visited[index] = true;
  892. }
  893. unsigned idx_list_len = cast(unsigned)sel.index.count-1;
  894. unsigned *idx_list = gb_alloc_array(temporary_allocator(), unsigned, idx_list_len);
  895. if (lb_is_nested_possibly_constant(type, sel, fv->value)) {
  896. bool is_constant = true;
  897. Type *cv_type = f->type;
  898. for (isize j = 1; j < sel.index.count; j++) {
  899. i32 index = sel.index[j];
  900. Type *cvt = base_type(cv_type);
  901. if (cvt->kind == Type_Struct) {
  902. if (cvt->Struct.is_raw_union) {
  903. // sanity check which should have been caught by `lb_is_nested_possibly_constant`
  904. is_constant = false;
  905. break;
  906. }
  907. cv_type = cvt->Struct.fields[index]->type;
  908. if (is_type_struct(cvt)) {
  909. auto cv_field_remapping = lb_get_struct_remapping(m, cvt);
  910. unsigned remapped_index = cast(unsigned)cv_field_remapping[index];
  911. idx_list[j-1] = remapped_index;
  912. } else {
  913. idx_list[j-1] = cast(unsigned)index;
  914. }
  915. } else if (cvt->kind == Type_Array) {
  916. cv_type = cvt->Array.elem;
  917. idx_list[j-1] = cast(unsigned)index;
  918. } else {
  919. GB_PANIC("UNKNOWN TYPE: %s", type_to_string(cv_type));
  920. }
  921. }
  922. if (is_constant) {
  923. LLVMValueRef elem_value = lb_const_value(m, tav.type, tav.value, allow_local).value;
  924. GB_ASSERT(LLVMIsConstant(elem_value));
  925. values[index] = LLVMConstInsertValue(values[index], elem_value, idx_list, idx_list_len);
  926. }
  927. }
  928. }
  929. }
  930. }
  931. } else {
  932. for_array(i, cl->elems) {
  933. Entity *f = type->Struct.fields[i];
  934. TypeAndValue tav = cl->elems[i]->tav;
  935. ExactValue val = {};
  936. if (tav.mode != Addressing_Invalid) {
  937. val = tav.value;
  938. }
  939. i32 index = field_remapping[f->Variable.field_index];
  940. if (elem_type_can_be_constant(f->type)) {
  941. values[index] = lb_const_value(m, f->type, val, allow_local).value;
  942. visited[index] = true;
  943. }
  944. }
  945. }
  946. }
  947. for (isize i = 0; i < value_count; i++) {
  948. if (!visited[i]) {
  949. GB_ASSERT(values[i] == nullptr);
  950. LLVMTypeRef type = LLVMStructGetTypeAtIndex(struct_type, cast(unsigned)i);
  951. values[i] = LLVMConstNull(type);
  952. }
  953. }
  954. bool is_constant = true;
  955. for (isize i = 0; i < value_count; i++) {
  956. LLVMValueRef val = values[i];
  957. if (!LLVMIsConstant(val)) {
  958. GB_ASSERT(is_local);
  959. GB_ASSERT(LLVMGetInstructionOpcode(val) == LLVMLoad);
  960. is_constant = false;
  961. }
  962. }
  963. if (is_constant) {
  964. res.value = llvm_const_named_struct_internal(struct_type, values, cast(unsigned)value_count);
  965. return res;
  966. } else {
  967. // TODO(bill): THIS IS HACK BUT IT WORKS FOR WHAT I NEED
  968. LLVMValueRef *old_values = values;
  969. LLVMValueRef *new_values = gb_alloc_array(temporary_allocator(), LLVMValueRef, value_count);
  970. for (isize i = 0; i < value_count; i++) {
  971. LLVMValueRef old_value = old_values[i];
  972. if (LLVMIsConstant(old_value)) {
  973. new_values[i] = old_value;
  974. } else {
  975. new_values[i] = LLVMConstNull(LLVMTypeOf(old_value));
  976. }
  977. }
  978. LLVMValueRef constant_value = llvm_const_named_struct_internal(struct_type, new_values, cast(unsigned)value_count);
  979. GB_ASSERT(is_local);
  980. lbProcedure *p = m->curr_procedure;
  981. lbAddr v = lb_add_local_generated(p, res.type, true);
  982. map_set(&m->exact_value_compound_literal_addr_map, value.value_compound, v);
  983. LLVMBuildStore(p->builder, constant_value, v.addr.value);
  984. for (isize i = 0; i < value_count; i++) {
  985. LLVMValueRef val = old_values[i];
  986. if (!LLVMIsConstant(val)) {
  987. LLVMValueRef dst = LLVMBuildStructGEP2(p->builder, llvm_addr_type(p->module, v.addr), v.addr.value, cast(unsigned)i, "");
  988. LLVMBuildStore(p->builder, val, dst);
  989. }
  990. }
  991. return lb_addr_load(p, v);
  992. }
  993. } else if (is_type_bit_set(type)) {
  994. ast_node(cl, CompoundLit, value.value_compound);
  995. if (cl->elems.count == 0) {
  996. return lb_const_nil(m, original_type);
  997. }
  998. i64 sz = type_size_of(type);
  999. if (sz == 0) {
  1000. return lb_const_nil(m, original_type);
  1001. }
  1002. BigInt bits = {};
  1003. BigInt one = {};
  1004. big_int_from_u64(&one, 1);
  1005. for_array(i, cl->elems) {
  1006. Ast *e = cl->elems[i];
  1007. GB_ASSERT(e->kind != Ast_FieldValue);
  1008. TypeAndValue tav = e->tav;
  1009. if (tav.mode != Addressing_Constant) {
  1010. continue;
  1011. }
  1012. GB_ASSERT(tav.value.kind == ExactValue_Integer);
  1013. i64 v = big_int_to_i64(&tav.value.value_integer);
  1014. i64 lower = type->BitSet.lower;
  1015. u64 index = cast(u64)(v-lower);
  1016. BigInt bit = {};
  1017. big_int_from_u64(&bit, index);
  1018. big_int_shl(&bit, &one, &bit);
  1019. big_int_or(&bits, &bits, &bit);
  1020. }
  1021. res.value = lb_big_int_to_llvm(m, original_type, &bits);
  1022. return res;
  1023. } else if (is_type_matrix(type)) {
  1024. ast_node(cl, CompoundLit, value.value_compound);
  1025. Type *elem_type = type->Matrix.elem;
  1026. isize elem_count = cl->elems.count;
  1027. if (elem_count == 0 || !elem_type_can_be_constant(elem_type)) {
  1028. return lb_const_nil(m, original_type);
  1029. }
  1030. i64 max_count = type->Matrix.row_count*type->Matrix.column_count;
  1031. i64 total_count = matrix_type_total_internal_elems(type);
  1032. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)total_count);
  1033. if (cl->elems[0]->kind == Ast_FieldValue) {
  1034. for_array(j, cl->elems) {
  1035. Ast *elem = cl->elems[j];
  1036. ast_node(fv, FieldValue, elem);
  1037. if (is_ast_range(fv->field)) {
  1038. ast_node(ie, BinaryExpr, fv->field);
  1039. TypeAndValue lo_tav = ie->left->tav;
  1040. TypeAndValue hi_tav = ie->right->tav;
  1041. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  1042. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  1043. TokenKind op = ie->op.kind;
  1044. i64 lo = exact_value_to_i64(lo_tav.value);
  1045. i64 hi = exact_value_to_i64(hi_tav.value);
  1046. if (op != Token_RangeHalf) {
  1047. hi += 1;
  1048. }
  1049. GB_ASSERT(0 <= lo && lo <= max_count);
  1050. GB_ASSERT(0 <= hi && hi <= max_count);
  1051. GB_ASSERT(lo <= hi);
  1052. TypeAndValue tav = fv->value->tav;
  1053. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  1054. for (i64 k = lo; k < hi; k++) {
  1055. i64 offset = matrix_row_major_index_to_offset(type, k);
  1056. GB_ASSERT(values[offset] == nullptr);
  1057. values[offset] = val;
  1058. }
  1059. } else {
  1060. TypeAndValue index_tav = fv->field->tav;
  1061. GB_ASSERT(index_tav.mode == Addressing_Constant);
  1062. i64 index = exact_value_to_i64(index_tav.value);
  1063. GB_ASSERT(index < max_count);
  1064. TypeAndValue tav = fv->value->tav;
  1065. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  1066. i64 offset = matrix_row_major_index_to_offset(type, index);
  1067. GB_ASSERT(values[offset] == nullptr);
  1068. values[offset] = val;
  1069. }
  1070. }
  1071. for (i64 i = 0; i < total_count; i++) {
  1072. if (values[i] == nullptr) {
  1073. values[i] = LLVMConstNull(lb_type(m, elem_type));
  1074. }
  1075. }
  1076. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)total_count, values, allow_local);
  1077. return res;
  1078. } else {
  1079. GB_ASSERT_MSG(elem_count == max_count, "%td != %td", elem_count, max_count);
  1080. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)total_count);
  1081. for_array(i, cl->elems) {
  1082. TypeAndValue tav = cl->elems[i]->tav;
  1083. GB_ASSERT(tav.mode != Addressing_Invalid);
  1084. i64 offset = matrix_row_major_index_to_offset(type, i);
  1085. values[offset] = lb_const_value(m, elem_type, tav.value, allow_local).value;
  1086. }
  1087. for (isize i = 0; i < total_count; i++) {
  1088. if (values[i] == nullptr) {
  1089. values[i] = LLVMConstNull(lb_type(m, elem_type));
  1090. }
  1091. }
  1092. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)total_count, values, allow_local);
  1093. return res;
  1094. }
  1095. } else {
  1096. return lb_const_nil(m, original_type);
  1097. }
  1098. break;
  1099. case ExactValue_Procedure:
  1100. GB_PANIC("handled earlier");
  1101. break;
  1102. case ExactValue_Typeid:
  1103. return lb_typeid(m, value.value_typeid);
  1104. }
  1105. return lb_const_nil(m, original_type);
  1106. }