llvm_backend_const.cpp 43 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335
  1. gb_internal bool lb_is_const(lbValue value) {
  2. LLVMValueRef v = value.value;
  3. if (is_type_untyped_nil(value.type)) {
  4. // TODO(bill): Is this correct behaviour?
  5. return true;
  6. }
  7. if (LLVMIsConstant(v)) {
  8. return true;
  9. }
  10. return false;
  11. }
  12. gb_internal bool lb_is_const_or_global(lbValue value) {
  13. if (lb_is_const(value)) {
  14. return true;
  15. }
  16. return false;
  17. }
  18. gb_internal bool lb_is_elem_const(Ast *elem, Type *elem_type) {
  19. if (!elem_type_can_be_constant(elem_type)) {
  20. return false;
  21. }
  22. if (elem->kind == Ast_FieldValue) {
  23. elem = elem->FieldValue.value;
  24. }
  25. TypeAndValue tav = type_and_value_of_expr(elem);
  26. GB_ASSERT_MSG(tav.mode != Addressing_Invalid, "%s %s", expr_to_string(elem), type_to_string(tav.type));
  27. return tav.value.kind != ExactValue_Invalid;
  28. }
  29. gb_internal bool lb_is_const_nil(lbValue value) {
  30. LLVMValueRef v = value.value;
  31. if (LLVMIsConstant(v)) {
  32. if (LLVMIsAConstantAggregateZero(v)) {
  33. return true;
  34. } else if (LLVMIsAConstantPointerNull(v)) {
  35. return true;
  36. }
  37. }
  38. return false;
  39. }
  40. gb_internal bool lb_is_expr_constant_zero(Ast *expr) {
  41. GB_ASSERT(expr != nullptr);
  42. auto v = exact_value_to_integer(expr->tav.value);
  43. if (v.kind == ExactValue_Integer) {
  44. return big_int_cmp_zero(&v.value_integer) == 0;
  45. }
  46. return false;
  47. }
  48. gb_internal String lb_get_const_string(lbModule *m, lbValue value) {
  49. GB_ASSERT(lb_is_const(value));
  50. GB_ASSERT(LLVMIsConstant(value.value));
  51. Type *t = base_type(value.type);
  52. GB_ASSERT(are_types_identical(t, t_string));
  53. unsigned ptr_indices[1] = {0};
  54. unsigned len_indices[1] = {1};
  55. LLVMValueRef underlying_ptr = llvm_const_extract_value(m, value.value, ptr_indices, gb_count_of(ptr_indices));
  56. LLVMValueRef underlying_len = llvm_const_extract_value(m, value.value, len_indices, gb_count_of(len_indices));
  57. GB_ASSERT(LLVMGetConstOpcode(underlying_ptr) == LLVMGetElementPtr);
  58. underlying_ptr = LLVMGetOperand(underlying_ptr, 0);
  59. GB_ASSERT(LLVMIsAGlobalVariable(underlying_ptr));
  60. underlying_ptr = LLVMGetInitializer(underlying_ptr);
  61. size_t length = 0;
  62. char const *text = LLVMGetAsString(underlying_ptr, &length);
  63. isize real_length = cast(isize)LLVMConstIntGetSExtValue(underlying_len);
  64. return make_string(cast(u8 const *)text, real_length);
  65. }
  66. gb_internal LLVMValueRef llvm_const_cast(LLVMValueRef val, LLVMTypeRef dst) {
  67. LLVMTypeRef src = LLVMTypeOf(val);
  68. if (src == dst) {
  69. return val;
  70. }
  71. if (LLVMIsNull(val)) {
  72. return LLVMConstNull(dst);
  73. }
  74. GB_ASSERT_MSG(lb_sizeof(dst) == lb_sizeof(src), "%s vs %s", LLVMPrintTypeToString(dst), LLVMPrintTypeToString(src));
  75. LLVMTypeKind kind = LLVMGetTypeKind(dst);
  76. switch (kind) {
  77. case LLVMPointerTypeKind:
  78. if (LB_USE_NEW_PASS_SYSTEM) {
  79. return val;
  80. }
  81. return LLVMConstPointerCast(val, dst);
  82. case LLVMStructTypeKind:
  83. // GB_PANIC("%s -> %s", LLVMPrintValueToString(val), LLVMPrintTypeToString(dst));
  84. // NOTE(bill): It's not possible to do a bit cast on a struct, why was this code even here in the first place?
  85. // It seems mostly to exist to get around the "anonymous -> named" struct assignments
  86. // return LLVMConstBitCast(val, dst);
  87. return val;
  88. default:
  89. GB_PANIC("Unhandled const cast %s to %s", LLVMPrintTypeToString(src), LLVMPrintTypeToString(dst));
  90. }
  91. return val;
  92. }
  93. gb_internal lbValue lb_const_ptr_cast(lbModule *m, lbValue value, Type *t) {
  94. GB_ASSERT(is_type_internally_pointer_like(value.type));
  95. GB_ASSERT(is_type_internally_pointer_like(t));
  96. GB_ASSERT(lb_is_const(value));
  97. lbValue res = {};
  98. res.value = LLVMConstPointerCast(value.value, lb_type(m, t));
  99. res.type = t;
  100. return res;
  101. }
  102. gb_internal LLVMValueRef llvm_const_string_internal(lbModule *m, Type *t, LLVMValueRef data, LLVMValueRef len) {
  103. if (build_context.metrics.ptr_size < build_context.metrics.int_size) {
  104. LLVMValueRef values[3] = {
  105. data,
  106. LLVMConstNull(lb_type(m, t_i32)),
  107. len,
  108. };
  109. return llvm_const_named_struct_internal(lb_type(m, t), values, 3);
  110. } else {
  111. LLVMValueRef values[2] = {
  112. data,
  113. len,
  114. };
  115. return llvm_const_named_struct_internal(lb_type(m, t), values, 2);
  116. }
  117. }
  118. gb_internal LLVMValueRef llvm_const_named_struct(lbModule *m, Type *t, LLVMValueRef *values, isize value_count_) {
  119. LLVMTypeRef struct_type = lb_type(m, t);
  120. GB_ASSERT(LLVMGetTypeKind(struct_type) == LLVMStructTypeKind);
  121. unsigned value_count = cast(unsigned)value_count_;
  122. unsigned elem_count = LLVMCountStructElementTypes(struct_type);
  123. if (elem_count == value_count) {
  124. return llvm_const_named_struct_internal(struct_type, values, value_count_);
  125. }
  126. Type *bt = base_type(t);
  127. GB_ASSERT(bt->kind == Type_Struct);
  128. GB_ASSERT(value_count_ == bt->Struct.fields.count);
  129. auto field_remapping = lb_get_struct_remapping(m, t);
  130. unsigned values_with_padding_count = LLVMCountStructElementTypes(struct_type);
  131. LLVMValueRef *values_with_padding = gb_alloc_array(permanent_allocator(), LLVMValueRef, values_with_padding_count);
  132. for (unsigned i = 0; i < value_count; i++) {
  133. values_with_padding[field_remapping[i]] = values[i];
  134. }
  135. for (unsigned i = 0; i < values_with_padding_count; i++) {
  136. if (values_with_padding[i] == nullptr) {
  137. values_with_padding[i] = LLVMConstNull(LLVMStructGetTypeAtIndex(struct_type, i));
  138. }
  139. }
  140. return llvm_const_named_struct_internal(struct_type, values_with_padding, values_with_padding_count);
  141. }
  142. gb_internal LLVMValueRef llvm_const_named_struct_internal(LLVMTypeRef t, LLVMValueRef *values, isize value_count_) {
  143. unsigned value_count = cast(unsigned)value_count_;
  144. unsigned elem_count = LLVMCountStructElementTypes(t);
  145. GB_ASSERT_MSG(value_count == elem_count, "%s %u %u", LLVMPrintTypeToString(t), value_count, elem_count);
  146. for (unsigned i = 0; i < elem_count; i++) {
  147. LLVMTypeRef elem_type = LLVMStructGetTypeAtIndex(t, i);
  148. values[i] = llvm_const_cast(values[i], elem_type);
  149. }
  150. return LLVMConstNamedStruct(t, values, value_count);
  151. }
  152. gb_internal LLVMValueRef llvm_const_array(LLVMTypeRef elem_type, LLVMValueRef *values, isize value_count_) {
  153. unsigned value_count = cast(unsigned)value_count_;
  154. for (unsigned i = 0; i < value_count; i++) {
  155. values[i] = llvm_const_cast(values[i], elem_type);
  156. }
  157. return LLVMConstArray(elem_type, values, value_count);
  158. }
  159. gb_internal LLVMValueRef llvm_const_slice_internal(lbModule *m, LLVMValueRef data, LLVMValueRef len) {
  160. if (build_context.metrics.ptr_size < build_context.metrics.int_size) {
  161. GB_ASSERT(build_context.metrics.ptr_size == 4);
  162. GB_ASSERT(build_context.metrics.int_size == 8);
  163. LLVMValueRef vals[3] = {
  164. data,
  165. LLVMConstNull(lb_type(m, t_u32)),
  166. len,
  167. };
  168. return LLVMConstStructInContext(m->ctx, vals, gb_count_of(vals), false);
  169. } else {
  170. LLVMValueRef vals[2] = {
  171. data,
  172. len,
  173. };
  174. return LLVMConstStructInContext(m->ctx, vals, gb_count_of(vals), false);
  175. }
  176. }
  177. gb_internal LLVMValueRef llvm_const_slice(lbModule *m, lbValue data, lbValue len) {
  178. GB_ASSERT(is_type_pointer(data.type) || is_type_multi_pointer(data.type));
  179. GB_ASSERT(are_types_identical(len.type, t_int));
  180. return llvm_const_slice_internal(m, data.value, len.value);
  181. }
  182. gb_internal lbValue lb_const_nil(lbModule *m, Type *type) {
  183. LLVMValueRef v = LLVMConstNull(lb_type(m, type));
  184. return lbValue{v, type};
  185. }
  186. gb_internal lbValue lb_const_undef(lbModule *m, Type *type) {
  187. LLVMValueRef v = LLVMGetUndef(lb_type(m, type));
  188. return lbValue{v, type};
  189. }
  190. gb_internal lbValue lb_const_int(lbModule *m, Type *type, u64 value) {
  191. lbValue res = {};
  192. res.value = LLVMConstInt(lb_type(m, type), cast(unsigned long long)value, !is_type_unsigned(type));
  193. res.type = type;
  194. return res;
  195. }
  196. gb_internal lbValue lb_const_string(lbModule *m, String const &value) {
  197. return lb_const_value(m, t_string, exact_value_string(value));
  198. }
  199. gb_internal lbValue lb_const_bool(lbModule *m, Type *type, bool value) {
  200. lbValue res = {};
  201. res.value = LLVMConstInt(lb_type(m, type), value, false);
  202. res.type = type;
  203. return res;
  204. }
  205. gb_internal LLVMValueRef lb_const_f16(lbModule *m, f32 f, Type *type=t_f16) {
  206. GB_ASSERT(type_size_of(type) == 2);
  207. u16 u = f32_to_f16(f);
  208. if (is_type_different_to_arch_endianness(type)) {
  209. u = gb_endian_swap16(u);
  210. }
  211. LLVMValueRef i = LLVMConstInt(LLVMInt16TypeInContext(m->ctx), u, false);
  212. return LLVMConstBitCast(i, lb_type(m, type));
  213. }
  214. gb_internal LLVMValueRef lb_const_f32(lbModule *m, f32 f, Type *type=t_f32) {
  215. GB_ASSERT(type_size_of(type) == 4);
  216. u32 u = bit_cast<u32>(f);
  217. if (is_type_different_to_arch_endianness(type)) {
  218. u = gb_endian_swap32(u);
  219. }
  220. LLVMValueRef i = LLVMConstInt(LLVMInt32TypeInContext(m->ctx), u, false);
  221. return LLVMConstBitCast(i, lb_type(m, type));
  222. }
  223. gb_internal bool lb_is_expr_untyped_const(Ast *expr) {
  224. auto const &tv = type_and_value_of_expr(expr);
  225. if (is_type_untyped(tv.type)) {
  226. return tv.value.kind != ExactValue_Invalid;
  227. }
  228. return false;
  229. }
  230. gb_internal lbValue lb_expr_untyped_const_to_typed(lbModule *m, Ast *expr, Type *t) {
  231. GB_ASSERT(is_type_typed(t));
  232. auto const &tv = type_and_value_of_expr(expr);
  233. return lb_const_value(m, t, tv.value);
  234. }
  235. gb_internal i32 lb_obfuscate_i32(i32 i) {
  236. i32 x = cast(i32)gb_fnv64a(&i, sizeof(i));
  237. if (x < 0) {
  238. x = 1-x;
  239. }
  240. return cast(i32)x;
  241. }
  242. gb_internal lbValue lb_const_source_code_location_const(lbModule *m, String const &procedure_, TokenPos const &pos) {
  243. String file = get_file_path_string(pos.file_id);
  244. String procedure = procedure_;
  245. i32 line = pos.line;
  246. i32 column = pos.column;
  247. if (build_context.obfuscate_source_code_locations) {
  248. file = obfuscate_string(file, "F");
  249. procedure = obfuscate_string(procedure, "P");
  250. line = lb_obfuscate_i32(line);
  251. column = lb_obfuscate_i32(column);
  252. }
  253. LLVMValueRef fields[4] = {};
  254. fields[0]/*file*/ = lb_find_or_add_entity_string(m, file).value;
  255. fields[1]/*line*/ = lb_const_int(m, t_i32, line).value;
  256. fields[2]/*column*/ = lb_const_int(m, t_i32, column).value;
  257. fields[3]/*procedure*/ = lb_find_or_add_entity_string(m, procedure).value;
  258. lbValue res = {};
  259. res.value = llvm_const_named_struct(m, t_source_code_location, fields, gb_count_of(fields));
  260. res.type = t_source_code_location;
  261. return res;
  262. }
  263. gb_internal lbValue lb_emit_source_code_location_const(lbProcedure *p, String const &procedure, TokenPos const &pos) {
  264. lbModule *m = p->module;
  265. return lb_const_source_code_location_const(m, procedure, pos);
  266. }
  267. gb_internal lbValue lb_emit_source_code_location_const(lbProcedure *p, Ast *node) {
  268. String proc_name = {};
  269. if (p->entity) {
  270. proc_name = p->entity->token.string;
  271. }
  272. TokenPos pos = {};
  273. if (node) {
  274. pos = ast_token(node).pos;
  275. }
  276. return lb_emit_source_code_location_const(p, proc_name, pos);
  277. }
  278. gb_internal lbValue lb_emit_source_code_location_as_global_ptr(lbProcedure *p, String const &procedure, TokenPos const &pos) {
  279. lbValue loc = lb_emit_source_code_location_const(p, procedure, pos);
  280. lbAddr addr = lb_add_global_generated(p->module, loc.type, loc, nullptr);
  281. lb_make_global_private_const(addr);
  282. return addr.addr;
  283. }
  284. gb_internal lbValue lb_emit_source_code_location_as_global_ptr(lbProcedure *p, Ast *node) {
  285. lbValue loc = lb_emit_source_code_location_const(p, node);
  286. lbAddr addr = lb_add_global_generated(p->module, loc.type, loc, nullptr);
  287. lb_make_global_private_const(addr);
  288. return addr.addr;
  289. }
  290. gb_internal lbValue lb_emit_source_code_location_as_global(lbProcedure *p, String const &procedure, TokenPos const &pos) {
  291. return lb_emit_load(p, lb_emit_source_code_location_as_global_ptr(p, procedure, pos));
  292. }
  293. gb_internal lbValue lb_emit_source_code_location_as_global(lbProcedure *p, Ast *node) {
  294. return lb_emit_load(p, lb_emit_source_code_location_as_global_ptr(p, node));
  295. }
  296. gb_internal LLVMValueRef lb_build_constant_array_values(lbModule *m, Type *type, Type *elem_type, isize count, LLVMValueRef *values, bool allow_local) {
  297. bool is_local = allow_local && m->curr_procedure != nullptr;
  298. bool is_const = true;
  299. if (is_local) {
  300. for (isize i = 0; i < count; i++) {
  301. GB_ASSERT(values[i] != nullptr);
  302. if (!LLVMIsConstant(values[i])) {
  303. is_const = false;
  304. break;
  305. }
  306. }
  307. }
  308. if (!is_const) {
  309. LLVMTypeRef llvm_elem_type = lb_type(m, elem_type);
  310. lbProcedure *p = m->curr_procedure;
  311. GB_ASSERT(p != nullptr);
  312. lbAddr v = lb_add_local_generated(p, type, false);
  313. lbValue ptr = lb_addr_get_ptr(p, v);
  314. for (isize i = 0; i < count; i++) {
  315. lbValue elem = lb_emit_array_epi(p, ptr, i);
  316. if (is_type_proc(elem_type)) {
  317. values[i] = LLVMConstPointerCast(values[i], llvm_elem_type);
  318. }
  319. LLVMBuildStore(p->builder, values[i], elem.value);
  320. }
  321. return lb_addr_load(p, v).value;
  322. }
  323. return llvm_const_array(lb_type(m, elem_type), values, cast(unsigned int)count);
  324. }
  325. gb_internal LLVMValueRef lb_big_int_to_llvm(lbModule *m, Type *original_type, BigInt const *a) {
  326. if (big_int_is_zero(a)) {
  327. return LLVMConstNull(lb_type(m, original_type));
  328. }
  329. size_t sz = cast(size_t)type_size_of(original_type);
  330. u64 rop64[4] = {}; // 2 u64 is the maximum we will ever need, so doubling it will be fine :P
  331. u8 *rop = cast(u8 *)rop64;
  332. size_t max_count = 0;
  333. size_t written = 0;
  334. size_t size = 1;
  335. size_t nails = 0;
  336. mp_endian endian = MP_LITTLE_ENDIAN;
  337. max_count = mp_pack_count(a, nails, size);
  338. if (sz < max_count) {
  339. debug_print_big_int(a);
  340. gb_printf_err("%s -> %tu\n", type_to_string(original_type), sz);;
  341. }
  342. GB_ASSERT_MSG(sz >= max_count, "max_count: %tu, sz: %tu, written: %tu, type %s", max_count, sz, written, type_to_string(original_type));
  343. GB_ASSERT(gb_size_of(rop64) >= sz);
  344. mp_err err = mp_pack(rop, sz, &written,
  345. MP_LSB_FIRST,
  346. size, endian, nails,
  347. a);
  348. GB_ASSERT(err == MP_OKAY);
  349. if (!is_type_endian_little(original_type)) {
  350. for (size_t i = 0; i < sz/2; i++) {
  351. u8 tmp = rop[i];
  352. rop[i] = rop[sz-1-i];
  353. rop[sz-1-i] = tmp;
  354. }
  355. }
  356. LLVMValueRef value = LLVMConstIntOfArbitraryPrecision(lb_type(m, original_type), cast(unsigned)((sz+7)/8), cast(u64 *)rop);
  357. if (big_int_is_neg(a)) {
  358. value = LLVMConstNeg(value);
  359. }
  360. return value;
  361. }
  362. gb_internal bool lb_is_nested_possibly_constant(Type *ft, Selection const &sel, Ast *elem) {
  363. GB_ASSERT(!sel.indirect);
  364. for (i32 index : sel.index) {
  365. Type *bt = base_type(ft);
  366. switch (bt->kind) {
  367. case Type_Struct:
  368. if (bt->Struct.is_raw_union) {
  369. return false;
  370. }
  371. ft = bt->Struct.fields[index]->type;
  372. break;
  373. case Type_Array:
  374. ft = bt->Array.elem;
  375. break;
  376. default:
  377. return false;
  378. }
  379. }
  380. if (is_type_raw_union(ft) || is_type_typeid(ft)) {
  381. return false;
  382. }
  383. return lb_is_elem_const(elem, ft);
  384. }
  385. gb_internal lbValue lb_const_value(lbModule *m, Type *type, ExactValue value, bool allow_local) {
  386. LLVMContextRef ctx = m->ctx;
  387. type = default_type(type);
  388. Type *original_type = type;
  389. lbValue res = {};
  390. res.type = original_type;
  391. type = core_type(type);
  392. value = convert_exact_value_for_type(value, type);
  393. if (value.kind == ExactValue_Typeid) {
  394. return lb_typeid(m, value.value_typeid);
  395. }
  396. if (value.kind == ExactValue_Invalid) {
  397. return lb_const_nil(m, original_type);
  398. }
  399. if (value.kind == ExactValue_Procedure) {
  400. lbValue res = {};
  401. Ast *expr = unparen_expr(value.value_procedure);
  402. GB_ASSERT(expr != nullptr);
  403. if (expr->kind == Ast_ProcLit) {
  404. res = lb_generate_anonymous_proc_lit(m, str_lit("_proclit"), expr);
  405. } else {
  406. Entity *e = entity_from_expr(expr);
  407. res = lb_find_procedure_value_from_entity(m, e);
  408. }
  409. GB_ASSERT(res.value != nullptr);
  410. GB_ASSERT(LLVMGetValueKind(res.value) == LLVMFunctionValueKind);
  411. if (LLVMGetIntrinsicID(res.value) == 0) {
  412. // NOTE(bill): do not cast intrinsics as they are not really procedures that can be casted
  413. res.value = LLVMConstPointerCast(res.value, lb_type(m, res.type));
  414. }
  415. return res;
  416. }
  417. bool is_local = allow_local && m->curr_procedure != nullptr;
  418. // GB_ASSERT_MSG(is_type_typed(type), "%s", type_to_string(type));
  419. if (is_type_slice(type)) {
  420. if (value.kind == ExactValue_String) {
  421. GB_ASSERT(is_type_slice(type));
  422. res.value = lb_find_or_add_entity_string_byte_slice_with_type(m, value.value_string, original_type).value;
  423. return res;
  424. } else {
  425. ast_node(cl, CompoundLit, value.value_compound);
  426. isize count = cl->elems.count;
  427. if (count == 0) {
  428. return lb_const_nil(m, type);
  429. }
  430. count = gb_max(cast(isize)cl->max_count, count);
  431. Type *elem = base_type(type)->Slice.elem;
  432. Type *t = alloc_type_array(elem, count);
  433. lbValue backing_array = lb_const_value(m, t, value, allow_local);
  434. LLVMValueRef array_data = nullptr;
  435. if (is_local) {
  436. // NOTE(bill, 2020-06-08): This is a bit of a hack but a "constant" slice needs
  437. // its backing data on the stack
  438. lbProcedure *p = m->curr_procedure;
  439. LLVMTypeRef llvm_type = lb_type(m, t);
  440. array_data = llvm_alloca(p, llvm_type, 16);
  441. LLVMBuildStore(p->builder, backing_array.value, array_data);
  442. {
  443. LLVMValueRef indices[2] = {llvm_zero(m), llvm_zero(m)};
  444. LLVMValueRef ptr = LLVMBuildInBoundsGEP2(p->builder, llvm_type, array_data, indices, 2, "");
  445. LLVMValueRef len = LLVMConstInt(lb_type(m, t_int), count, true);
  446. lbAddr slice = lb_add_local_generated(p, type, false);
  447. map_set(&m->exact_value_compound_literal_addr_map, value.value_compound, slice);
  448. lb_fill_slice(p, slice, {ptr, alloc_type_pointer(elem)}, {len, t_int});
  449. return lb_addr_load(p, slice);
  450. }
  451. } else {
  452. isize max_len = 7+8+1;
  453. char *str = gb_alloc_array(permanent_allocator(), char, max_len);
  454. u32 id = m->gen->global_array_index.fetch_add(1);
  455. isize len = gb_snprintf(str, max_len, "csba$%x", id);
  456. String name = make_string(cast(u8 *)str, len-1);
  457. Entity *e = alloc_entity_constant(nullptr, make_token_ident(name), t, value);
  458. array_data = LLVMAddGlobal(m->mod, lb_type(m, t), str);
  459. LLVMSetInitializer(array_data, backing_array.value);
  460. lbValue g = {};
  461. g.value = array_data;
  462. g.type = t;
  463. lb_add_entity(m, e, g);
  464. lb_add_member(m, name, g);
  465. {
  466. LLVMValueRef indices[2] = {llvm_zero(m), llvm_zero(m)};
  467. LLVMValueRef ptr = LLVMConstInBoundsGEP2(lb_type(m, t), array_data, indices, 2);
  468. LLVMValueRef len = LLVMConstInt(lb_type(m, t_int), count, true);
  469. LLVMValueRef values[2] = {ptr, len};
  470. res.value = llvm_const_named_struct(m, original_type, values, 2);
  471. return res;
  472. }
  473. }
  474. }
  475. } else if (is_type_array(type) && value.kind == ExactValue_String && !is_type_u8(core_array_type(type))) {
  476. if (is_type_rune_array(type)) {
  477. i64 count = type->Array.count;
  478. Type *elem = type->Array.elem;
  479. LLVMTypeRef et = lb_type(m, elem);
  480. Rune rune;
  481. isize offset = 0;
  482. isize width = 1;
  483. String s = value.value_string;
  484. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, cast(isize)count);
  485. for (i64 i = 0; i < count && offset < s.len; i++) {
  486. width = utf8_decode(s.text+offset, s.len-offset, &rune);
  487. offset += width;
  488. elems[i] = LLVMConstInt(et, rune, true);
  489. }
  490. GB_ASSERT(offset == s.len);
  491. res.value = llvm_const_array(et, elems, cast(unsigned)count);
  492. return res;
  493. }
  494. // NOTE(bill, 2021-10-07): Allow for array programming value constants
  495. Type *core_elem = core_array_type(type);
  496. return lb_const_value(m, core_elem, value, allow_local);
  497. } else if (is_type_u8_array(type) && value.kind == ExactValue_String) {
  498. GB_ASSERT(type->Array.count == value.value_string.len);
  499. LLVMValueRef data = LLVMConstStringInContext(ctx,
  500. cast(char const *)value.value_string.text,
  501. cast(unsigned)value.value_string.len,
  502. true /*DontNullTerminate*/);
  503. res.value = data;
  504. return res;
  505. } else if (is_type_array(type) &&
  506. value.kind != ExactValue_Invalid &&
  507. value.kind != ExactValue_String &&
  508. value.kind != ExactValue_Compound) {
  509. i64 count = type->Array.count;
  510. Type *elem = type->Array.elem;
  511. lbValue single_elem = lb_const_value(m, elem, value, allow_local);
  512. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, cast(isize)count);
  513. for (i64 i = 0; i < count; i++) {
  514. elems[i] = single_elem.value;
  515. }
  516. res.value = llvm_const_array(lb_type(m, elem), elems, cast(unsigned)count);
  517. return res;
  518. } else if (is_type_matrix(type) &&
  519. value.kind != ExactValue_Invalid &&
  520. value.kind != ExactValue_Compound) {
  521. i64 row = type->Matrix.row_count;
  522. i64 column = type->Matrix.column_count;
  523. GB_ASSERT(row == column);
  524. Type *elem = type->Matrix.elem;
  525. lbValue single_elem = lb_const_value(m, elem, value, allow_local);
  526. single_elem.value = llvm_const_cast(single_elem.value, lb_type(m, elem));
  527. i64 total_elem_count = matrix_type_total_internal_elems(type);
  528. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, cast(isize)total_elem_count);
  529. for (i64 i = 0; i < row; i++) {
  530. elems[matrix_indices_to_offset(type, i, i)] = single_elem.value;
  531. }
  532. for (i64 i = 0; i < total_elem_count; i++) {
  533. if (elems[i] == nullptr) {
  534. elems[i] = LLVMConstNull(lb_type(m, elem));
  535. }
  536. }
  537. res.value = LLVMConstArray(lb_type(m, elem), elems, cast(unsigned)total_elem_count);
  538. return res;
  539. } else if (is_type_simd_vector(type) &&
  540. value.kind != ExactValue_Invalid &&
  541. value.kind != ExactValue_Compound) {
  542. i64 count = type->SimdVector.count;
  543. Type *elem = type->SimdVector.elem;
  544. lbValue single_elem = lb_const_value(m, elem, value, allow_local);
  545. single_elem.value = llvm_const_cast(single_elem.value, lb_type(m, elem));
  546. LLVMValueRef *elems = gb_alloc_array(permanent_allocator(), LLVMValueRef, count);
  547. for (i64 i = 0; i < count; i++) {
  548. elems[i] = single_elem.value;
  549. }
  550. res.value = LLVMConstVector(elems, cast(unsigned)count);
  551. return res;
  552. }
  553. switch (value.kind) {
  554. case ExactValue_Invalid:
  555. res.value = LLVMConstNull(lb_type(m, original_type));
  556. return res;
  557. case ExactValue_Bool:
  558. res.value = LLVMConstInt(lb_type(m, original_type), value.value_bool, false);
  559. return res;
  560. case ExactValue_String:
  561. {
  562. LLVMValueRef ptr = lb_find_or_add_entity_string_ptr(m, value.value_string);
  563. lbValue res = {};
  564. res.type = default_type(original_type);
  565. if (is_type_cstring(res.type)) {
  566. res.value = ptr;
  567. } else {
  568. if (value.value_string.len == 0) {
  569. ptr = LLVMConstNull(lb_type(m, t_u8_ptr));
  570. }
  571. LLVMValueRef str_len = LLVMConstInt(lb_type(m, t_int), value.value_string.len, true);
  572. GB_ASSERT(is_type_string(original_type));
  573. res.value = llvm_const_string_internal(m, original_type, ptr, str_len);
  574. }
  575. return res;
  576. }
  577. case ExactValue_Integer:
  578. if (is_type_pointer(type) || is_type_multi_pointer(type)) {
  579. LLVMTypeRef t = lb_type(m, original_type);
  580. LLVMValueRef i = lb_big_int_to_llvm(m, t_uintptr, &value.value_integer);
  581. res.value = LLVMConstIntToPtr(i, t);
  582. } else {
  583. res.value = lb_big_int_to_llvm(m, original_type, &value.value_integer);
  584. }
  585. return res;
  586. case ExactValue_Float:
  587. if (is_type_different_to_arch_endianness(type)) {
  588. if (type->Basic.kind == Basic_f32le || type->Basic.kind == Basic_f32be) {
  589. f32 f = static_cast<float>(value.value_float);
  590. u32 u = bit_cast<u32>(f);
  591. u = gb_endian_swap32(u);
  592. res.value = LLVMConstReal(lb_type(m, original_type), bit_cast<f32>(u));
  593. } else if (type->Basic.kind == Basic_f16le || type->Basic.kind == Basic_f16be) {
  594. f32 f = static_cast<float>(value.value_float);
  595. u16 u = f32_to_f16(f);
  596. u = gb_endian_swap16(u);
  597. res.value = LLVMConstReal(lb_type(m, original_type), f16_to_f32(u));
  598. } else {
  599. u64 u = bit_cast<u64>(value.value_float);
  600. u = gb_endian_swap64(u);
  601. res.value = LLVMConstReal(lb_type(m, original_type), bit_cast<f64>(u));
  602. }
  603. } else {
  604. res.value = LLVMConstReal(lb_type(m, original_type), value.value_float);
  605. }
  606. return res;
  607. case ExactValue_Complex:
  608. {
  609. LLVMValueRef values[2] = {};
  610. switch (8*type_size_of(type)) {
  611. case 32:
  612. values[0] = lb_const_f16(m, cast(f32)value.value_complex->real);
  613. values[1] = lb_const_f16(m, cast(f32)value.value_complex->imag);
  614. break;
  615. case 64:
  616. values[0] = lb_const_f32(m, cast(f32)value.value_complex->real);
  617. values[1] = lb_const_f32(m, cast(f32)value.value_complex->imag);
  618. break;
  619. case 128:
  620. values[0] = LLVMConstReal(lb_type(m, t_f64), value.value_complex->real);
  621. values[1] = LLVMConstReal(lb_type(m, t_f64), value.value_complex->imag);
  622. break;
  623. }
  624. res.value = llvm_const_named_struct(m, original_type, values, 2);
  625. return res;
  626. }
  627. break;
  628. case ExactValue_Quaternion:
  629. {
  630. LLVMValueRef values[4] = {};
  631. switch (8*type_size_of(type)) {
  632. case 64:
  633. // @QuaternionLayout
  634. values[3] = lb_const_f16(m, cast(f32)value.value_quaternion->real);
  635. values[0] = lb_const_f16(m, cast(f32)value.value_quaternion->imag);
  636. values[1] = lb_const_f16(m, cast(f32)value.value_quaternion->jmag);
  637. values[2] = lb_const_f16(m, cast(f32)value.value_quaternion->kmag);
  638. break;
  639. case 128:
  640. // @QuaternionLayout
  641. values[3] = lb_const_f32(m, cast(f32)value.value_quaternion->real);
  642. values[0] = lb_const_f32(m, cast(f32)value.value_quaternion->imag);
  643. values[1] = lb_const_f32(m, cast(f32)value.value_quaternion->jmag);
  644. values[2] = lb_const_f32(m, cast(f32)value.value_quaternion->kmag);
  645. break;
  646. case 256:
  647. // @QuaternionLayout
  648. values[3] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->real);
  649. values[0] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->imag);
  650. values[1] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->jmag);
  651. values[2] = LLVMConstReal(lb_type(m, t_f64), value.value_quaternion->kmag);
  652. break;
  653. }
  654. res.value = llvm_const_named_struct(m, original_type, values, 4);
  655. return res;
  656. }
  657. break;
  658. case ExactValue_Pointer:
  659. res.value = LLVMConstIntToPtr(LLVMConstInt(lb_type(m, t_uintptr), value.value_pointer, false), lb_type(m, original_type));
  660. return res;
  661. case ExactValue_Compound:
  662. if (is_type_slice(type)) {
  663. return lb_const_value(m, type, value, allow_local);
  664. } else if (is_type_array(type)) {
  665. ast_node(cl, CompoundLit, value.value_compound);
  666. Type *elem_type = type->Array.elem;
  667. isize elem_count = cl->elems.count;
  668. if (elem_count == 0 || !elem_type_can_be_constant(elem_type)) {
  669. return lb_const_nil(m, original_type);
  670. }
  671. if (cl->elems[0]->kind == Ast_FieldValue) {
  672. // TODO(bill): This is O(N*M) and will be quite slow; it should probably be sorted before hand
  673. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->Array.count);
  674. isize value_index = 0;
  675. for (i64 i = 0; i < type->Array.count; i++) {
  676. bool found = false;
  677. for (isize j = 0; j < elem_count; j++) {
  678. Ast *elem = cl->elems[j];
  679. ast_node(fv, FieldValue, elem);
  680. if (is_ast_range(fv->field)) {
  681. ast_node(ie, BinaryExpr, fv->field);
  682. TypeAndValue lo_tav = ie->left->tav;
  683. TypeAndValue hi_tav = ie->right->tav;
  684. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  685. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  686. TokenKind op = ie->op.kind;
  687. i64 lo = exact_value_to_i64(lo_tav.value);
  688. i64 hi = exact_value_to_i64(hi_tav.value);
  689. if (op != Token_RangeHalf) {
  690. hi += 1;
  691. }
  692. if (lo == i) {
  693. TypeAndValue tav = fv->value->tav;
  694. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  695. for (i64 k = lo; k < hi; k++) {
  696. values[value_index++] = val;
  697. }
  698. found = true;
  699. i += (hi-lo-1);
  700. break;
  701. }
  702. } else {
  703. TypeAndValue index_tav = fv->field->tav;
  704. GB_ASSERT(index_tav.mode == Addressing_Constant);
  705. i64 index = exact_value_to_i64(index_tav.value);
  706. if (index == i) {
  707. TypeAndValue tav = fv->value->tav;
  708. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  709. values[value_index++] = val;
  710. found = true;
  711. break;
  712. }
  713. }
  714. }
  715. if (!found) {
  716. values[value_index++] = LLVMConstNull(lb_type(m, elem_type));
  717. }
  718. }
  719. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->Array.count, values, allow_local);
  720. return res;
  721. } else {
  722. GB_ASSERT_MSG(elem_count == type->Array.count, "%td != %td", elem_count, type->Array.count);
  723. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->Array.count);
  724. for (isize i = 0; i < elem_count; i++) {
  725. TypeAndValue tav = cl->elems[i]->tav;
  726. GB_ASSERT(tav.mode != Addressing_Invalid);
  727. values[i] = lb_const_value(m, elem_type, tav.value, allow_local).value;
  728. }
  729. for (isize i = elem_count; i < type->Array.count; i++) {
  730. values[i] = LLVMConstNull(lb_type(m, elem_type));
  731. }
  732. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->Array.count, values, allow_local);
  733. return res;
  734. }
  735. } else if (is_type_enumerated_array(type)) {
  736. ast_node(cl, CompoundLit, value.value_compound);
  737. Type *elem_type = type->EnumeratedArray.elem;
  738. isize elem_count = cl->elems.count;
  739. if (elem_count == 0 || !elem_type_can_be_constant(elem_type)) {
  740. return lb_const_nil(m, original_type);
  741. }
  742. if (cl->elems[0]->kind == Ast_FieldValue) {
  743. // TODO(bill): This is O(N*M) and will be quite slow; it should probably be sorted before hand
  744. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->EnumeratedArray.count);
  745. isize value_index = 0;
  746. i64 total_lo = exact_value_to_i64(*type->EnumeratedArray.min_value);
  747. i64 total_hi = exact_value_to_i64(*type->EnumeratedArray.max_value);
  748. for (i64 i = total_lo; i <= total_hi; i++) {
  749. bool found = false;
  750. for (isize j = 0; j < elem_count; j++) {
  751. Ast *elem = cl->elems[j];
  752. ast_node(fv, FieldValue, elem);
  753. if (is_ast_range(fv->field)) {
  754. ast_node(ie, BinaryExpr, fv->field);
  755. TypeAndValue lo_tav = ie->left->tav;
  756. TypeAndValue hi_tav = ie->right->tav;
  757. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  758. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  759. TokenKind op = ie->op.kind;
  760. i64 lo = exact_value_to_i64(lo_tav.value);
  761. i64 hi = exact_value_to_i64(hi_tav.value);
  762. if (op != Token_RangeHalf) {
  763. hi += 1;
  764. }
  765. if (lo == i) {
  766. TypeAndValue tav = fv->value->tav;
  767. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  768. for (i64 k = lo; k < hi; k++) {
  769. values[value_index++] = val;
  770. }
  771. found = true;
  772. i += (hi-lo-1);
  773. break;
  774. }
  775. } else {
  776. TypeAndValue index_tav = fv->field->tav;
  777. GB_ASSERT(index_tav.mode == Addressing_Constant);
  778. i64 index = exact_value_to_i64(index_tav.value);
  779. if (index == i) {
  780. TypeAndValue tav = fv->value->tav;
  781. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  782. values[value_index++] = val;
  783. found = true;
  784. break;
  785. }
  786. }
  787. }
  788. if (!found) {
  789. values[value_index++] = LLVMConstNull(lb_type(m, elem_type));
  790. }
  791. }
  792. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->EnumeratedArray.count, values, allow_local);
  793. return res;
  794. } else {
  795. GB_ASSERT_MSG(elem_count == type->EnumeratedArray.count, "%td != %td", elem_count, type->EnumeratedArray.count);
  796. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)type->EnumeratedArray.count);
  797. for (isize i = 0; i < elem_count; i++) {
  798. TypeAndValue tav = cl->elems[i]->tav;
  799. GB_ASSERT(tav.mode != Addressing_Invalid);
  800. values[i] = lb_const_value(m, elem_type, tav.value, allow_local).value;
  801. }
  802. for (isize i = elem_count; i < type->EnumeratedArray.count; i++) {
  803. values[i] = LLVMConstNull(lb_type(m, elem_type));
  804. }
  805. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)type->EnumeratedArray.count, values, allow_local);
  806. return res;
  807. }
  808. } else if (is_type_simd_vector(type)) {
  809. ast_node(cl, CompoundLit, value.value_compound);
  810. Type *elem_type = type->SimdVector.elem;
  811. isize elem_count = cl->elems.count;
  812. if (elem_count == 0) {
  813. return lb_const_nil(m, original_type);
  814. }
  815. GB_ASSERT(elem_type_can_be_constant(elem_type));
  816. isize total_elem_count = cast(isize)type->SimdVector.count;
  817. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, total_elem_count);
  818. if (cl->elems[0]->kind == Ast_FieldValue) {
  819. // TODO(bill): This is O(N*M) and will be quite slow; it should probably be sorted before hand
  820. isize value_index = 0;
  821. for (i64 i = 0; i < total_elem_count; i++) {
  822. bool found = false;
  823. for (isize j = 0; j < elem_count; j++) {
  824. Ast *elem = cl->elems[j];
  825. ast_node(fv, FieldValue, elem);
  826. if (is_ast_range(fv->field)) {
  827. ast_node(ie, BinaryExpr, fv->field);
  828. TypeAndValue lo_tav = ie->left->tav;
  829. TypeAndValue hi_tav = ie->right->tav;
  830. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  831. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  832. TokenKind op = ie->op.kind;
  833. i64 lo = exact_value_to_i64(lo_tav.value);
  834. i64 hi = exact_value_to_i64(hi_tav.value);
  835. if (op != Token_RangeHalf) {
  836. hi += 1;
  837. }
  838. if (lo == i) {
  839. TypeAndValue tav = fv->value->tav;
  840. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  841. for (i64 k = lo; k < hi; k++) {
  842. values[value_index++] = val;
  843. }
  844. found = true;
  845. i += (hi-lo-1);
  846. break;
  847. }
  848. } else {
  849. TypeAndValue index_tav = fv->field->tav;
  850. GB_ASSERT(index_tav.mode == Addressing_Constant);
  851. i64 index = exact_value_to_i64(index_tav.value);
  852. if (index == i) {
  853. TypeAndValue tav = fv->value->tav;
  854. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  855. values[value_index++] = val;
  856. found = true;
  857. break;
  858. }
  859. }
  860. }
  861. if (!found) {
  862. values[value_index++] = LLVMConstNull(lb_type(m, elem_type));
  863. }
  864. }
  865. res.value = LLVMConstVector(values, cast(unsigned)total_elem_count);
  866. return res;
  867. } else {
  868. for (isize i = 0; i < elem_count; i++) {
  869. TypeAndValue tav = cl->elems[i]->tav;
  870. GB_ASSERT(tav.mode != Addressing_Invalid);
  871. values[i] = lb_const_value(m, elem_type, tav.value, allow_local).value;
  872. }
  873. LLVMTypeRef et = lb_type(m, elem_type);
  874. for (isize i = elem_count; i < total_elem_count; i++) {
  875. values[i] = LLVMConstNull(et);
  876. }
  877. for (isize i = 0; i < total_elem_count; i++) {
  878. values[i] = llvm_const_cast(values[i], et);
  879. }
  880. res.value = LLVMConstVector(values, cast(unsigned)total_elem_count);
  881. return res;
  882. }
  883. } else if (is_type_struct(type)) {
  884. ast_node(cl, CompoundLit, value.value_compound);
  885. if (cl->elems.count == 0) {
  886. return lb_const_nil(m, original_type);
  887. }
  888. if (is_type_raw_union(type)) {
  889. return lb_const_nil(m, original_type);
  890. }
  891. LLVMTypeRef struct_type = lb_type(m, original_type);
  892. auto field_remapping = lb_get_struct_remapping(m, type);
  893. unsigned value_count = LLVMCountStructElementTypes(struct_type);
  894. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, value_count);
  895. bool *visited = gb_alloc_array(temporary_allocator(), bool, value_count);
  896. if (cl->elems[0]->kind == Ast_FieldValue) {
  897. isize elem_count = cl->elems.count;
  898. for (isize i = 0; i < elem_count; i++) {
  899. ast_node(fv, FieldValue, cl->elems[i]);
  900. String name = fv->field->Ident.token.string;
  901. TypeAndValue tav = fv->value->tav;
  902. GB_ASSERT(tav.mode != Addressing_Invalid);
  903. Selection sel = lookup_field(type, name, false);
  904. GB_ASSERT(!sel.indirect);
  905. Entity *f = type->Struct.fields[sel.index[0]];
  906. i32 index = field_remapping[f->Variable.field_index];
  907. if (elem_type_can_be_constant(f->type)) {
  908. if (sel.index.count == 1) {
  909. values[index] = lb_const_value(m, f->type, tav.value, allow_local).value;
  910. visited[index] = true;
  911. } else {
  912. if (!visited[index]) {
  913. values[index] = lb_const_value(m, f->type, {}, false).value;
  914. visited[index] = true;
  915. }
  916. unsigned idx_list_len = cast(unsigned)sel.index.count-1;
  917. unsigned *idx_list = gb_alloc_array(temporary_allocator(), unsigned, idx_list_len);
  918. if (lb_is_nested_possibly_constant(type, sel, fv->value)) {
  919. bool is_constant = true;
  920. Type *cv_type = f->type;
  921. for (isize j = 1; j < sel.index.count; j++) {
  922. i32 index = sel.index[j];
  923. Type *cvt = base_type(cv_type);
  924. if (cvt->kind == Type_Struct) {
  925. if (cvt->Struct.is_raw_union) {
  926. // sanity check which should have been caught by `lb_is_nested_possibly_constant`
  927. is_constant = false;
  928. break;
  929. }
  930. cv_type = cvt->Struct.fields[index]->type;
  931. if (is_type_struct(cvt)) {
  932. auto cv_field_remapping = lb_get_struct_remapping(m, cvt);
  933. unsigned remapped_index = cast(unsigned)cv_field_remapping[index];
  934. idx_list[j-1] = remapped_index;
  935. } else {
  936. idx_list[j-1] = cast(unsigned)index;
  937. }
  938. } else if (cvt->kind == Type_Array) {
  939. cv_type = cvt->Array.elem;
  940. idx_list[j-1] = cast(unsigned)index;
  941. } else {
  942. GB_PANIC("UNKNOWN TYPE: %s", type_to_string(cv_type));
  943. }
  944. }
  945. if (is_constant) {
  946. LLVMValueRef elem_value = lb_const_value(m, tav.type, tav.value, allow_local).value;
  947. if (LLVMIsConstant(elem_value)) {
  948. values[index] = llvm_const_insert_value(m, values[index], elem_value, idx_list, idx_list_len);
  949. } else {
  950. is_constant = false;
  951. }
  952. }
  953. }
  954. }
  955. }
  956. }
  957. } else {
  958. for_array(i, cl->elems) {
  959. Entity *f = type->Struct.fields[i];
  960. TypeAndValue tav = cl->elems[i]->tav;
  961. ExactValue val = {};
  962. if (tav.mode != Addressing_Invalid) {
  963. val = tav.value;
  964. }
  965. i32 index = field_remapping[f->Variable.field_index];
  966. if (elem_type_can_be_constant(f->type)) {
  967. values[index] = lb_const_value(m, f->type, val, allow_local).value;
  968. visited[index] = true;
  969. }
  970. }
  971. }
  972. for (isize i = 0; i < value_count; i++) {
  973. if (!visited[i]) {
  974. GB_ASSERT(values[i] == nullptr);
  975. LLVMTypeRef type = LLVMStructGetTypeAtIndex(struct_type, cast(unsigned)i);
  976. values[i] = LLVMConstNull(type);
  977. }
  978. }
  979. bool is_constant = true;
  980. for (isize i = 0; i < value_count; i++) {
  981. LLVMValueRef val = values[i];
  982. if (!LLVMIsConstant(val)) {
  983. GB_ASSERT(is_local);
  984. GB_ASSERT(LLVMGetInstructionOpcode(val) == LLVMLoad);
  985. is_constant = false;
  986. }
  987. }
  988. if (is_constant) {
  989. res.value = llvm_const_named_struct_internal(struct_type, values, cast(unsigned)value_count);
  990. return res;
  991. } else {
  992. // TODO(bill): THIS IS HACK BUT IT WORKS FOR WHAT I NEED
  993. LLVMValueRef *old_values = values;
  994. LLVMValueRef *new_values = gb_alloc_array(temporary_allocator(), LLVMValueRef, value_count);
  995. for (isize i = 0; i < value_count; i++) {
  996. LLVMValueRef old_value = old_values[i];
  997. if (LLVMIsConstant(old_value)) {
  998. new_values[i] = old_value;
  999. } else {
  1000. new_values[i] = LLVMConstNull(LLVMTypeOf(old_value));
  1001. }
  1002. }
  1003. LLVMValueRef constant_value = llvm_const_named_struct_internal(struct_type, new_values, cast(unsigned)value_count);
  1004. GB_ASSERT(is_local);
  1005. lbProcedure *p = m->curr_procedure;
  1006. lbAddr v = lb_add_local_generated(p, res.type, true);
  1007. map_set(&m->exact_value_compound_literal_addr_map, value.value_compound, v);
  1008. LLVMBuildStore(p->builder, constant_value, v.addr.value);
  1009. for (isize i = 0; i < value_count; i++) {
  1010. LLVMValueRef val = old_values[i];
  1011. if (!LLVMIsConstant(val)) {
  1012. LLVMValueRef dst = LLVMBuildStructGEP2(p->builder, llvm_addr_type(p->module, v.addr), v.addr.value, cast(unsigned)i, "");
  1013. LLVMBuildStore(p->builder, val, dst);
  1014. }
  1015. }
  1016. return lb_addr_load(p, v);
  1017. }
  1018. } else if (is_type_bit_set(type)) {
  1019. ast_node(cl, CompoundLit, value.value_compound);
  1020. if (cl->elems.count == 0) {
  1021. return lb_const_nil(m, original_type);
  1022. }
  1023. i64 sz = type_size_of(type);
  1024. if (sz == 0) {
  1025. return lb_const_nil(m, original_type);
  1026. }
  1027. BigInt bits = {};
  1028. BigInt one = {};
  1029. big_int_from_u64(&one, 1);
  1030. for_array(i, cl->elems) {
  1031. Ast *e = cl->elems[i];
  1032. GB_ASSERT(e->kind != Ast_FieldValue);
  1033. TypeAndValue tav = e->tav;
  1034. if (tav.mode != Addressing_Constant) {
  1035. continue;
  1036. }
  1037. GB_ASSERT(tav.value.kind == ExactValue_Integer);
  1038. i64 v = big_int_to_i64(&tav.value.value_integer);
  1039. i64 lower = type->BitSet.lower;
  1040. u64 index = cast(u64)(v-lower);
  1041. BigInt bit = {};
  1042. big_int_from_u64(&bit, index);
  1043. big_int_shl(&bit, &one, &bit);
  1044. big_int_or(&bits, &bits, &bit);
  1045. }
  1046. res.value = lb_big_int_to_llvm(m, original_type, &bits);
  1047. return res;
  1048. } else if (is_type_matrix(type)) {
  1049. ast_node(cl, CompoundLit, value.value_compound);
  1050. Type *elem_type = type->Matrix.elem;
  1051. isize elem_count = cl->elems.count;
  1052. if (elem_count == 0 || !elem_type_can_be_constant(elem_type)) {
  1053. return lb_const_nil(m, original_type);
  1054. }
  1055. i64 max_count = type->Matrix.row_count*type->Matrix.column_count;
  1056. i64 total_count = matrix_type_total_internal_elems(type);
  1057. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)total_count);
  1058. if (cl->elems[0]->kind == Ast_FieldValue) {
  1059. for_array(j, cl->elems) {
  1060. Ast *elem = cl->elems[j];
  1061. ast_node(fv, FieldValue, elem);
  1062. if (is_ast_range(fv->field)) {
  1063. ast_node(ie, BinaryExpr, fv->field);
  1064. TypeAndValue lo_tav = ie->left->tav;
  1065. TypeAndValue hi_tav = ie->right->tav;
  1066. GB_ASSERT(lo_tav.mode == Addressing_Constant);
  1067. GB_ASSERT(hi_tav.mode == Addressing_Constant);
  1068. TokenKind op = ie->op.kind;
  1069. i64 lo = exact_value_to_i64(lo_tav.value);
  1070. i64 hi = exact_value_to_i64(hi_tav.value);
  1071. if (op != Token_RangeHalf) {
  1072. hi += 1;
  1073. }
  1074. GB_ASSERT(0 <= lo && lo <= max_count);
  1075. GB_ASSERT(0 <= hi && hi <= max_count);
  1076. GB_ASSERT(lo <= hi);
  1077. TypeAndValue tav = fv->value->tav;
  1078. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  1079. for (i64 k = lo; k < hi; k++) {
  1080. i64 offset = matrix_row_major_index_to_offset(type, k);
  1081. GB_ASSERT(values[offset] == nullptr);
  1082. values[offset] = val;
  1083. }
  1084. } else {
  1085. TypeAndValue index_tav = fv->field->tav;
  1086. GB_ASSERT(index_tav.mode == Addressing_Constant);
  1087. i64 index = exact_value_to_i64(index_tav.value);
  1088. GB_ASSERT(index < max_count);
  1089. TypeAndValue tav = fv->value->tav;
  1090. LLVMValueRef val = lb_const_value(m, elem_type, tav.value, allow_local).value;
  1091. i64 offset = matrix_row_major_index_to_offset(type, index);
  1092. GB_ASSERT(values[offset] == nullptr);
  1093. values[offset] = val;
  1094. }
  1095. }
  1096. for (i64 i = 0; i < total_count; i++) {
  1097. if (values[i] == nullptr) {
  1098. values[i] = LLVMConstNull(lb_type(m, elem_type));
  1099. }
  1100. }
  1101. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)total_count, values, allow_local);
  1102. return res;
  1103. } else {
  1104. GB_ASSERT_MSG(elem_count == max_count, "%td != %td", elem_count, max_count);
  1105. LLVMValueRef *values = gb_alloc_array(temporary_allocator(), LLVMValueRef, cast(isize)total_count);
  1106. for_array(i, cl->elems) {
  1107. TypeAndValue tav = cl->elems[i]->tav;
  1108. GB_ASSERT(tav.mode != Addressing_Invalid);
  1109. i64 offset = 0;
  1110. offset = matrix_row_major_index_to_offset(type, i);
  1111. values[offset] = lb_const_value(m, elem_type, tav.value, allow_local).value;
  1112. }
  1113. for (isize i = 0; i < total_count; i++) {
  1114. if (values[i] == nullptr) {
  1115. values[i] = LLVMConstNull(lb_type(m, elem_type));
  1116. }
  1117. }
  1118. res.value = lb_build_constant_array_values(m, type, elem_type, cast(isize)total_count, values, allow_local);
  1119. return res;
  1120. }
  1121. } else {
  1122. return lb_const_nil(m, original_type);
  1123. }
  1124. break;
  1125. case ExactValue_Procedure:
  1126. GB_PANIC("handled earlier");
  1127. break;
  1128. case ExactValue_Typeid:
  1129. return lb_typeid(m, value.value_typeid);
  1130. }
  1131. return lb_const_nil(m, original_type);
  1132. }