types.cpp 66 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444
  1. struct Scope;
  2. struct AstNode;
  3. enum BasicKind {
  4. Basic_Invalid,
  5. Basic_bool,
  6. Basic_i8,
  7. Basic_u8,
  8. Basic_i16,
  9. Basic_u16,
  10. Basic_i32,
  11. Basic_u32,
  12. Basic_i64,
  13. Basic_u64,
  14. Basic_i128,
  15. Basic_u128,
  16. Basic_rune,
  17. // Basic_f16,
  18. Basic_f32,
  19. Basic_f64,
  20. // Basic_complex32,
  21. Basic_complex64,
  22. Basic_complex128,
  23. Basic_int,
  24. Basic_uint,
  25. Basic_rawptr,
  26. Basic_string, // ^u8 + int
  27. Basic_any, // ^Type_Info + rawptr
  28. Basic_UntypedBool,
  29. Basic_UntypedInteger,
  30. Basic_UntypedFloat,
  31. Basic_UntypedComplex,
  32. Basic_UntypedString,
  33. Basic_UntypedRune,
  34. Basic_UntypedNil,
  35. Basic_UntypedUndef,
  36. Basic_COUNT,
  37. Basic_byte = Basic_u8,
  38. };
  39. enum BasicFlag {
  40. BasicFlag_Boolean = GB_BIT(0),
  41. BasicFlag_Integer = GB_BIT(1),
  42. BasicFlag_Unsigned = GB_BIT(2),
  43. BasicFlag_Float = GB_BIT(3),
  44. BasicFlag_Complex = GB_BIT(4),
  45. BasicFlag_Pointer = GB_BIT(5),
  46. BasicFlag_String = GB_BIT(6),
  47. BasicFlag_Rune = GB_BIT(7),
  48. BasicFlag_Untyped = GB_BIT(8),
  49. BasicFlag_Numeric = BasicFlag_Integer | BasicFlag_Float | BasicFlag_Complex,
  50. BasicFlag_Ordered = BasicFlag_Integer | BasicFlag_Float | BasicFlag_String | BasicFlag_Pointer | BasicFlag_Rune,
  51. BasicFlag_ConstantType = BasicFlag_Boolean | BasicFlag_Numeric | BasicFlag_String | BasicFlag_Pointer | BasicFlag_Rune,
  52. };
  53. struct BasicType {
  54. BasicKind kind;
  55. u32 flags;
  56. i64 size; // -1 if arch. dep.
  57. String name;
  58. };
  59. struct TypeStruct {
  60. Array<Entity *> fields;
  61. Array<Entity *> fields_in_src_order;
  62. AstNode *node;
  63. Scope * scope;
  64. Array<i64> offsets;
  65. bool are_offsets_set;
  66. bool are_offsets_being_processed;
  67. bool is_packed;
  68. bool is_ordered;
  69. bool is_raw_union;
  70. bool is_polymorphic;
  71. bool is_poly_specialized;
  72. Type * polymorphic_params; // Type_Tuple
  73. Type * polymorphic_parent;
  74. i64 custom_align; // NOTE(bill): Only used in structs at the moment
  75. Entity * names;
  76. };
  77. #define TYPE_KINDS \
  78. TYPE_KIND(Basic, BasicType) \
  79. TYPE_KIND(Generic, struct { \
  80. i64 id; \
  81. String name; \
  82. Type * specialized; \
  83. }) \
  84. TYPE_KIND(Pointer, struct { Type *elem; }) \
  85. TYPE_KIND(Array, struct { Type *elem; i64 count; }) \
  86. TYPE_KIND(DynamicArray, struct { Type *elem; }) \
  87. TYPE_KIND(Vector, struct { Type *elem; i64 count; }) \
  88. TYPE_KIND(Slice, struct { Type *elem; }) \
  89. TYPE_KIND(Struct, TypeStruct) \
  90. TYPE_KIND(Enum, struct { \
  91. Entity **fields; \
  92. i32 field_count; \
  93. AstNode *node; \
  94. Scope * scope; \
  95. Entity * names; \
  96. Type * base_type; \
  97. Entity * count; \
  98. Entity * min_value; \
  99. Entity * max_value; \
  100. }) \
  101. TYPE_KIND(Union, struct { \
  102. Array<Type *> variants; \
  103. AstNode *node; \
  104. Scope * scope; \
  105. Entity * union__type_info; \
  106. i64 variant_block_size; \
  107. i64 custom_align; \
  108. }) \
  109. TYPE_KIND(Named, struct { \
  110. String name; \
  111. Type * base; \
  112. Entity *type_name; /* Entity_TypeName */ \
  113. }) \
  114. TYPE_KIND(Tuple, struct { \
  115. Array<Entity *> variables; /* Entity_Variable */ \
  116. Array<i64> offsets; \
  117. bool are_offsets_set; \
  118. }) \
  119. TYPE_KIND(Proc, struct { \
  120. AstNode *node; \
  121. Scope * scope; \
  122. Type * params; /* Type_Tuple */ \
  123. Type * results; /* Type_Tuple */ \
  124. i32 param_count; \
  125. i32 result_count; \
  126. bool return_by_pointer; \
  127. Type ** abi_compat_params; \
  128. Type * abi_compat_result_type; \
  129. bool variadic; \
  130. bool require_results; \
  131. bool c_vararg; \
  132. bool is_polymorphic; \
  133. bool is_poly_specialized; \
  134. isize specialization_count; \
  135. ProcCallingConvention calling_convention; \
  136. }) \
  137. TYPE_KIND(Map, struct { \
  138. Type * key; \
  139. Type * value; \
  140. Type * entry_type; \
  141. Type * generated_struct_type; \
  142. Type * lookup_result_type; \
  143. }) \
  144. TYPE_KIND(BitFieldValue, struct { u32 bits; }) \
  145. TYPE_KIND(BitField, struct { \
  146. Scope * scope; \
  147. Entity **fields; \
  148. i32 field_count; \
  149. u32 * offsets; \
  150. u32 * sizes; \
  151. i64 custom_align; \
  152. }) \
  153. enum TypeKind {
  154. Type_Invalid,
  155. #define TYPE_KIND(k, ...) GB_JOIN2(Type_, k),
  156. TYPE_KINDS
  157. #undef TYPE_KIND
  158. Type_Count,
  159. };
  160. String const type_strings[] = {
  161. {cast(u8 *)"Invalid", gb_size_of("Invalid")},
  162. #define TYPE_KIND(k, ...) {cast(u8 *)#k, gb_size_of(#k)-1},
  163. TYPE_KINDS
  164. #undef TYPE_KIND
  165. };
  166. #define TYPE_KIND(k, ...) typedef __VA_ARGS__ GB_JOIN2(Type, k);
  167. TYPE_KINDS
  168. #undef TYPE_KIND
  169. struct Type {
  170. TypeKind kind;
  171. union {
  172. #define TYPE_KIND(k, ...) GB_JOIN2(Type, k) k;
  173. TYPE_KINDS
  174. #undef TYPE_KIND
  175. };
  176. bool failure;
  177. };
  178. // TODO(bill): Should I add extra information here specifying the kind of selection?
  179. // e.g. field, constant, vector field, type field, etc.
  180. struct Selection {
  181. Entity * entity;
  182. Array<i32> index;
  183. bool indirect; // Set if there was a pointer deref anywhere down the line
  184. };
  185. Selection empty_selection = {0};
  186. Selection make_selection(Entity *entity, Array<i32> index, bool indirect) {
  187. Selection s = {entity, index, indirect};
  188. return s;
  189. }
  190. void selection_add_index(Selection *s, isize index) {
  191. // IMPORTANT NOTE(bill): this requires a stretchy buffer/dynamic array so it requires some form
  192. // of heap allocation
  193. // TODO(bill): Find a way to use a backing buffer for initial use as the general case is probably .count<3
  194. if (s->index.data == nullptr) {
  195. array_init(&s->index, heap_allocator());
  196. }
  197. array_add(&s->index, cast(i32)index);
  198. }
  199. gb_global Type basic_types[] = {
  200. {Type_Basic, {Basic_Invalid, 0, 0, STR_LIT("invalid type")}},
  201. {Type_Basic, {Basic_bool, BasicFlag_Boolean, 1, STR_LIT("bool")}},
  202. {Type_Basic, {Basic_i8, BasicFlag_Integer, 1, STR_LIT("i8")}},
  203. {Type_Basic, {Basic_u8, BasicFlag_Integer | BasicFlag_Unsigned, 1, STR_LIT("u8")}},
  204. {Type_Basic, {Basic_i16, BasicFlag_Integer, 2, STR_LIT("i16")}},
  205. {Type_Basic, {Basic_u16, BasicFlag_Integer | BasicFlag_Unsigned, 2, STR_LIT("u16")}},
  206. {Type_Basic, {Basic_i32, BasicFlag_Integer, 4, STR_LIT("i32")}},
  207. {Type_Basic, {Basic_u32, BasicFlag_Integer | BasicFlag_Unsigned, 4, STR_LIT("u32")}},
  208. {Type_Basic, {Basic_i64, BasicFlag_Integer, 8, STR_LIT("i64")}},
  209. {Type_Basic, {Basic_u64, BasicFlag_Integer | BasicFlag_Unsigned, 8, STR_LIT("u64")}},
  210. {Type_Basic, {Basic_i128, BasicFlag_Integer, 16, STR_LIT("i128")}},
  211. {Type_Basic, {Basic_u128, BasicFlag_Integer | BasicFlag_Unsigned, 16, STR_LIT("u128")}},
  212. {Type_Basic, {Basic_rune, BasicFlag_Integer | BasicFlag_Rune, 4, STR_LIT("rune")}},
  213. // {Type_Basic, {Basic_f16, BasicFlag_Float, 2, STR_LIT("f16")}},
  214. {Type_Basic, {Basic_f32, BasicFlag_Float, 4, STR_LIT("f32")}},
  215. {Type_Basic, {Basic_f64, BasicFlag_Float, 8, STR_LIT("f64")}},
  216. // {Type_Basic, {Basic_complex32, BasicFlag_Complex, 4, STR_LIT("complex32")}},
  217. {Type_Basic, {Basic_complex64, BasicFlag_Complex, 8, STR_LIT("complex64")}},
  218. {Type_Basic, {Basic_complex128, BasicFlag_Complex, 16, STR_LIT("complex128")}},
  219. {Type_Basic, {Basic_int, BasicFlag_Integer, -1, STR_LIT("int")}},
  220. {Type_Basic, {Basic_uint, BasicFlag_Integer | BasicFlag_Unsigned, -1, STR_LIT("uint")}},
  221. {Type_Basic, {Basic_rawptr, BasicFlag_Pointer, -1, STR_LIT("rawptr")}},
  222. {Type_Basic, {Basic_string, BasicFlag_String, -1, STR_LIT("string")}},
  223. {Type_Basic, {Basic_any, 0, -1, STR_LIT("any")}},
  224. {Type_Basic, {Basic_UntypedBool, BasicFlag_Boolean | BasicFlag_Untyped, 0, STR_LIT("untyped bool")}},
  225. {Type_Basic, {Basic_UntypedInteger, BasicFlag_Integer | BasicFlag_Untyped, 0, STR_LIT("untyped integer")}},
  226. {Type_Basic, {Basic_UntypedFloat, BasicFlag_Float | BasicFlag_Untyped, 0, STR_LIT("untyped float")}},
  227. {Type_Basic, {Basic_UntypedComplex, BasicFlag_Complex | BasicFlag_Untyped, 0, STR_LIT("untyped complex")}},
  228. {Type_Basic, {Basic_UntypedString, BasicFlag_String | BasicFlag_Untyped, 0, STR_LIT("untyped string")}},
  229. {Type_Basic, {Basic_UntypedRune, BasicFlag_Integer | BasicFlag_Untyped, 0, STR_LIT("untyped rune")}},
  230. {Type_Basic, {Basic_UntypedNil, BasicFlag_Untyped, 0, STR_LIT("untyped nil")}},
  231. {Type_Basic, {Basic_UntypedUndef, BasicFlag_Untyped, 0, STR_LIT("untyped undefined")}},
  232. };
  233. // gb_global Type basic_type_aliases[] = {
  234. // // {Type_Basic, {Basic_byte, BasicFlag_Integer | BasicFlag_Unsigned, 1, STR_LIT("byte")}},
  235. // // {Type_Basic, {Basic_rune, BasicFlag_Integer, 4, STR_LIT("rune")}},
  236. // };
  237. gb_global Type *t_invalid = &basic_types[Basic_Invalid];
  238. gb_global Type *t_bool = &basic_types[Basic_bool];
  239. gb_global Type *t_i8 = &basic_types[Basic_i8];
  240. gb_global Type *t_u8 = &basic_types[Basic_u8];
  241. gb_global Type *t_i16 = &basic_types[Basic_i16];
  242. gb_global Type *t_u16 = &basic_types[Basic_u16];
  243. gb_global Type *t_i32 = &basic_types[Basic_i32];
  244. gb_global Type *t_u32 = &basic_types[Basic_u32];
  245. gb_global Type *t_i64 = &basic_types[Basic_i64];
  246. gb_global Type *t_u64 = &basic_types[Basic_u64];
  247. gb_global Type *t_i128 = &basic_types[Basic_i128];
  248. gb_global Type *t_u128 = &basic_types[Basic_u128];
  249. gb_global Type *t_rune = &basic_types[Basic_rune];
  250. // gb_global Type *t_f16 = &basic_types[Basic_f16];
  251. gb_global Type *t_f32 = &basic_types[Basic_f32];
  252. gb_global Type *t_f64 = &basic_types[Basic_f64];
  253. // gb_global Type *t_complex32 = &basic_types[Basic_complex32];
  254. gb_global Type *t_complex64 = &basic_types[Basic_complex64];
  255. gb_global Type *t_complex128 = &basic_types[Basic_complex128];
  256. gb_global Type *t_int = &basic_types[Basic_int];
  257. gb_global Type *t_uint = &basic_types[Basic_uint];
  258. gb_global Type *t_rawptr = &basic_types[Basic_rawptr];
  259. gb_global Type *t_string = &basic_types[Basic_string];
  260. gb_global Type *t_any = &basic_types[Basic_any];
  261. gb_global Type *t_untyped_bool = &basic_types[Basic_UntypedBool];
  262. gb_global Type *t_untyped_integer = &basic_types[Basic_UntypedInteger];
  263. gb_global Type *t_untyped_float = &basic_types[Basic_UntypedFloat];
  264. gb_global Type *t_untyped_complex = &basic_types[Basic_UntypedComplex];
  265. gb_global Type *t_untyped_string = &basic_types[Basic_UntypedString];
  266. gb_global Type *t_untyped_rune = &basic_types[Basic_UntypedRune];
  267. gb_global Type *t_untyped_nil = &basic_types[Basic_UntypedNil];
  268. gb_global Type *t_untyped_undef = &basic_types[Basic_UntypedUndef];
  269. gb_global Type *t_u8_ptr = nullptr;
  270. gb_global Type *t_int_ptr = nullptr;
  271. gb_global Type *t_i64_ptr = nullptr;
  272. gb_global Type *t_i128_ptr = nullptr;
  273. gb_global Type *t_f64_ptr = nullptr;
  274. gb_global Type *t_u8_slice = nullptr;
  275. gb_global Type *t_string_slice = nullptr;
  276. // Type generated for the "preload" file
  277. gb_global Type *t_type_info = nullptr;
  278. gb_global Type *t_type_info_enum_value = nullptr;
  279. gb_global Type *t_type_info_ptr = nullptr;
  280. gb_global Type *t_type_info_enum_value_ptr = nullptr;
  281. gb_global Type *t_type_info_named = nullptr;
  282. gb_global Type *t_type_info_integer = nullptr;
  283. gb_global Type *t_type_info_rune = nullptr;
  284. gb_global Type *t_type_info_float = nullptr;
  285. gb_global Type *t_type_info_complex = nullptr;
  286. gb_global Type *t_type_info_any = nullptr;
  287. gb_global Type *t_type_info_string = nullptr;
  288. gb_global Type *t_type_info_boolean = nullptr;
  289. gb_global Type *t_type_info_pointer = nullptr;
  290. gb_global Type *t_type_info_procedure = nullptr;
  291. gb_global Type *t_type_info_array = nullptr;
  292. gb_global Type *t_type_info_dynamic_array = nullptr;
  293. gb_global Type *t_type_info_slice = nullptr;
  294. gb_global Type *t_type_info_vector = nullptr;
  295. gb_global Type *t_type_info_tuple = nullptr;
  296. gb_global Type *t_type_info_struct = nullptr;
  297. gb_global Type *t_type_info_union = nullptr;
  298. gb_global Type *t_type_info_enum = nullptr;
  299. gb_global Type *t_type_info_map = nullptr;
  300. gb_global Type *t_type_info_bit_field = nullptr;
  301. gb_global Type *t_type_info_named_ptr = nullptr;
  302. gb_global Type *t_type_info_integer_ptr = nullptr;
  303. gb_global Type *t_type_info_rune_ptr = nullptr;
  304. gb_global Type *t_type_info_float_ptr = nullptr;
  305. gb_global Type *t_type_info_complex_ptr = nullptr;
  306. gb_global Type *t_type_info_quaternion_ptr = nullptr;
  307. gb_global Type *t_type_info_any_ptr = nullptr;
  308. gb_global Type *t_type_info_string_ptr = nullptr;
  309. gb_global Type *t_type_info_boolean_ptr = nullptr;
  310. gb_global Type *t_type_info_pointer_ptr = nullptr;
  311. gb_global Type *t_type_info_procedure_ptr = nullptr;
  312. gb_global Type *t_type_info_array_ptr = nullptr;
  313. gb_global Type *t_type_info_dynamic_array_ptr = nullptr;
  314. gb_global Type *t_type_info_slice_ptr = nullptr;
  315. gb_global Type *t_type_info_vector_ptr = nullptr;
  316. gb_global Type *t_type_info_tuple_ptr = nullptr;
  317. gb_global Type *t_type_info_struct_ptr = nullptr;
  318. gb_global Type *t_type_info_union_ptr = nullptr;
  319. gb_global Type *t_type_info_enum_ptr = nullptr;
  320. gb_global Type *t_type_info_map_ptr = nullptr;
  321. gb_global Type *t_type_info_bit_field_ptr = nullptr;
  322. gb_global Type *t_allocator = nullptr;
  323. gb_global Type *t_allocator_ptr = nullptr;
  324. gb_global Type *t_context = nullptr;
  325. gb_global Type *t_context_ptr = nullptr;
  326. gb_global Type *t_source_code_location = nullptr;
  327. gb_global Type *t_source_code_location_ptr = nullptr;
  328. gb_global Type *t_map_key = nullptr;
  329. gb_global Type *t_map_header = nullptr;
  330. i64 type_size_of (gbAllocator allocator, Type *t);
  331. i64 type_align_of (gbAllocator allocator, Type *t);
  332. i64 type_offset_of (gbAllocator allocator, Type *t, i32 index);
  333. gbString type_to_string (Type *type);
  334. void generate_map_internal_types(gbAllocator a, Type *type);
  335. Type *base_type(Type *t) {
  336. for (;;) {
  337. if (t == nullptr) {
  338. break;
  339. }
  340. if (t->kind != Type_Named) {
  341. break;
  342. }
  343. if (t == t->Named.base) {
  344. return t_invalid;
  345. }
  346. t = t->Named.base;
  347. }
  348. return t;
  349. }
  350. Type *base_enum_type(Type *t) {
  351. Type *bt = base_type(t);
  352. if (bt != nullptr &&
  353. bt->kind == Type_Enum) {
  354. return bt->Enum.base_type;
  355. }
  356. return t;
  357. }
  358. Type *core_type(Type *t) {
  359. for (;;) {
  360. if (t == nullptr) {
  361. break;
  362. }
  363. switch (t->kind) {
  364. case Type_Named:
  365. if (t == t->Named.base) {
  366. return t_invalid;
  367. }
  368. t = t->Named.base;
  369. continue;
  370. case Type_Enum:
  371. t = t->Enum.base_type;
  372. continue;
  373. }
  374. break;
  375. }
  376. return t;
  377. }
  378. void set_base_type(Type *t, Type *base) {
  379. if (t && t->kind == Type_Named) {
  380. t->Named.base = base;
  381. }
  382. }
  383. Type *alloc_type(gbAllocator a, TypeKind kind) {
  384. Type *t = gb_alloc_item(a, Type);
  385. gb_zero_item(t);
  386. t->kind = kind;
  387. return t;
  388. }
  389. Type *make_type_basic(gbAllocator a, BasicType basic) {
  390. Type *t = alloc_type(a, Type_Basic);
  391. t->Basic = basic;
  392. return t;
  393. }
  394. Type *make_type_generic(gbAllocator a, i64 id, String name, Type *specialized) {
  395. Type *t = alloc_type(a, Type_Generic);
  396. t->Generic.id = id;
  397. t->Generic.name = name;
  398. t->Generic.specialized = specialized;
  399. return t;
  400. }
  401. Type *make_type_pointer(gbAllocator a, Type *elem) {
  402. Type *t = alloc_type(a, Type_Pointer);
  403. t->Pointer.elem = elem;
  404. return t;
  405. }
  406. Type *make_type_array(gbAllocator a, Type *elem, i64 count) {
  407. Type *t = alloc_type(a, Type_Array);
  408. t->Array.elem = elem;
  409. t->Array.count = count;
  410. return t;
  411. }
  412. Type *make_type_dynamic_array(gbAllocator a, Type *elem) {
  413. Type *t = alloc_type(a, Type_DynamicArray);
  414. t->DynamicArray.elem = elem;
  415. return t;
  416. }
  417. Type *make_type_vector(gbAllocator a, Type *elem, i64 count) {
  418. Type *t = alloc_type(a, Type_Vector);
  419. t->Vector.elem = elem;
  420. t->Vector.count = count;
  421. return t;
  422. }
  423. Type *make_type_slice(gbAllocator a, Type *elem) {
  424. Type *t = alloc_type(a, Type_Slice);
  425. t->Array.elem = elem;
  426. return t;
  427. }
  428. Type *make_type_struct(gbAllocator a) {
  429. Type *t = alloc_type(a, Type_Struct);
  430. return t;
  431. }
  432. Type *make_type_union(gbAllocator a) {
  433. Type *t = alloc_type(a, Type_Union);
  434. return t;
  435. }
  436. Type *make_type_enum(gbAllocator a) {
  437. Type *t = alloc_type(a, Type_Enum);
  438. return t;
  439. }
  440. Type *make_type_named(gbAllocator a, String name, Type *base, Entity *type_name) {
  441. Type *t = alloc_type(a, Type_Named);
  442. t->Named.name = name;
  443. t->Named.base = base;
  444. t->Named.type_name = type_name;
  445. return t;
  446. }
  447. Type *make_type_tuple(gbAllocator a) {
  448. Type *t = alloc_type(a, Type_Tuple);
  449. return t;
  450. }
  451. Type *make_type_proc(gbAllocator a, Scope *scope, Type *params, isize param_count, Type *results, isize result_count, bool variadic, ProcCallingConvention calling_convention) {
  452. Type *t = alloc_type(a, Type_Proc);
  453. if (variadic) {
  454. if (param_count == 0) {
  455. GB_PANIC("variadic procedure must have at least one parameter");
  456. }
  457. GB_ASSERT(params != nullptr && params->kind == Type_Tuple);
  458. Entity *e = params->Tuple.variables[param_count-1];
  459. if (base_type(e->type)->kind != Type_Slice) {
  460. // NOTE(bill): For custom calling convention
  461. GB_PANIC("variadic parameter must be of type slice");
  462. }
  463. }
  464. t->Proc.scope = scope;
  465. t->Proc.params = params;
  466. t->Proc.param_count = cast(i32)param_count;
  467. t->Proc.results = results;
  468. t->Proc.result_count = cast(i32)result_count;
  469. t->Proc.variadic = variadic;
  470. t->Proc.calling_convention = calling_convention;
  471. return t;
  472. }
  473. bool is_type_valid_for_keys(Type *t);
  474. Type *make_type_map(gbAllocator a, i64 count, Type *key, Type *value) {
  475. Type *t = alloc_type(a, Type_Map);
  476. if (key != nullptr) {
  477. GB_ASSERT(is_type_valid_for_keys(key));
  478. }
  479. t->Map.key = key;
  480. t->Map.value = value;
  481. return t;
  482. }
  483. Type *make_type_bit_field_value(gbAllocator a, u32 bits) {
  484. Type *t = alloc_type(a, Type_BitFieldValue);
  485. t->BitFieldValue.bits = bits;
  486. return t;
  487. }
  488. Type *make_type_bit_field(gbAllocator a) {
  489. Type *t = alloc_type(a, Type_BitField);
  490. return t;
  491. }
  492. ////////////////////////////////////////////////////////////////
  493. Type *type_deref(Type *t) {
  494. if (t != nullptr) {
  495. Type *bt = base_type(t);
  496. if (bt == nullptr)
  497. return nullptr;
  498. if (bt != nullptr && bt->kind == Type_Pointer)
  499. return bt->Pointer.elem;
  500. }
  501. return t;
  502. }
  503. bool is_type_named(Type *t) {
  504. if (t->kind == Type_Basic) {
  505. return true;
  506. }
  507. return t->kind == Type_Named;
  508. }
  509. bool is_type_named_alias(Type *t) {
  510. if (!is_type_named(t)) {
  511. return false;
  512. }
  513. Entity *e = t->Named.type_name;
  514. if (e == nullptr) {
  515. return false;
  516. }
  517. if (e->kind != Entity_TypeName) {
  518. return false;
  519. }
  520. return e->TypeName.is_type_alias;
  521. }
  522. bool is_type_boolean(Type *t) {
  523. t = core_type(t);
  524. if (t->kind == Type_Basic) {
  525. return (t->Basic.flags & BasicFlag_Boolean) != 0;
  526. }
  527. return false;
  528. }
  529. bool is_type_integer(Type *t) {
  530. t = core_type(t);
  531. if (t->kind == Type_Basic) {
  532. return (t->Basic.flags & BasicFlag_Integer) != 0;
  533. }
  534. return false;
  535. }
  536. bool is_type_unsigned(Type *t) {
  537. t = core_type(t);
  538. if (t->kind == Type_Basic) {
  539. return (t->Basic.flags & BasicFlag_Unsigned) != 0;
  540. }
  541. return false;
  542. }
  543. bool is_type_rune(Type *t) {
  544. t = core_type(t);
  545. if (t->kind == Type_Basic) {
  546. return (t->Basic.flags & BasicFlag_Rune) != 0;
  547. }
  548. return false;
  549. }
  550. bool is_type_numeric(Type *t) {
  551. t = core_type(t);
  552. if (t->kind == Type_Basic) {
  553. return (t->Basic.flags & BasicFlag_Numeric) != 0;
  554. }
  555. // TODO(bill): Should this be here?
  556. if (t->kind == Type_Vector) {
  557. return is_type_numeric(t->Vector.elem);
  558. }
  559. return false;
  560. }
  561. bool is_type_string(Type *t) {
  562. t = base_type(t);
  563. if (t->kind == Type_Basic) {
  564. return (t->Basic.flags & BasicFlag_String) != 0;
  565. }
  566. return false;
  567. }
  568. bool is_type_typed(Type *t) {
  569. t = base_type(t);
  570. if (t == nullptr) {
  571. return false;
  572. }
  573. if (t->kind == Type_Basic) {
  574. return (t->Basic.flags & BasicFlag_Untyped) == 0;
  575. }
  576. return true;
  577. }
  578. bool is_type_untyped(Type *t) {
  579. t = base_type(t);
  580. if (t->kind == Type_Basic) {
  581. return (t->Basic.flags & BasicFlag_Untyped) != 0;
  582. }
  583. return false;
  584. }
  585. bool is_type_ordered(Type *t) {
  586. t = core_type(t);
  587. switch (t->kind) {
  588. case Type_Basic:
  589. return (t->Basic.flags & BasicFlag_Ordered) != 0;
  590. case Type_Pointer:
  591. return true;
  592. case Type_Vector:
  593. return is_type_ordered(t->Vector.elem);
  594. }
  595. return false;
  596. }
  597. bool is_type_constant_type(Type *t) {
  598. t = core_type(t);
  599. if (t->kind == Type_Basic) {
  600. return (t->Basic.flags & BasicFlag_ConstantType) != 0;
  601. }
  602. return false;
  603. }
  604. bool is_type_float(Type *t) {
  605. t = core_type(t);
  606. if (t->kind == Type_Basic) {
  607. return (t->Basic.flags & BasicFlag_Float) != 0;
  608. }
  609. return false;
  610. }
  611. bool is_type_complex(Type *t) {
  612. t = core_type(t);
  613. if (t->kind == Type_Basic) {
  614. return (t->Basic.flags & BasicFlag_Complex) != 0;
  615. }
  616. return false;
  617. }
  618. bool is_type_f32(Type *t) {
  619. t = core_type(t);
  620. if (t->kind == Type_Basic) {
  621. return t->Basic.kind == Basic_f32;
  622. }
  623. return false;
  624. }
  625. bool is_type_f64(Type *t) {
  626. t = core_type(t);
  627. if (t->kind == Type_Basic) {
  628. return t->Basic.kind == Basic_f64;
  629. }
  630. return false;
  631. }
  632. bool is_type_pointer(Type *t) {
  633. t = base_type(t);
  634. if (t->kind == Type_Basic) {
  635. return (t->Basic.flags & BasicFlag_Pointer) != 0;
  636. }
  637. return t->kind == Type_Pointer;
  638. }
  639. bool is_type_tuple(Type *t) {
  640. t = base_type(t);
  641. return t->kind == Type_Tuple;
  642. }
  643. bool is_type_int_or_uint(Type *t) {
  644. if (t->kind == Type_Basic) {
  645. return (t->Basic.kind == Basic_int) || (t->Basic.kind == Basic_uint);
  646. }
  647. return false;
  648. }
  649. bool is_type_i128_or_u128(Type *t) {
  650. if (t->kind == Type_Basic) {
  651. return (t->Basic.kind == Basic_i128) || (t->Basic.kind == Basic_u128);
  652. }
  653. return false;
  654. }
  655. bool is_type_rawptr(Type *t) {
  656. if (t->kind == Type_Basic) {
  657. return t->Basic.kind == Basic_rawptr;
  658. }
  659. return false;
  660. }
  661. bool is_type_u8(Type *t) {
  662. if (t->kind == Type_Basic) {
  663. return t->Basic.kind == Basic_u8;
  664. }
  665. return false;
  666. }
  667. bool is_type_array(Type *t) {
  668. t = base_type(t);
  669. return t->kind == Type_Array;
  670. }
  671. bool is_type_dynamic_array(Type *t) {
  672. t = base_type(t);
  673. return t->kind == Type_DynamicArray;
  674. }
  675. bool is_type_slice(Type *t) {
  676. t = base_type(t);
  677. return t->kind == Type_Slice;
  678. }
  679. bool is_type_u8_slice(Type *t) {
  680. t = base_type(t);
  681. if (t->kind == Type_Slice) {
  682. return is_type_u8(t->Slice.elem);
  683. }
  684. return false;
  685. }
  686. bool is_type_vector(Type *t) {
  687. t = base_type(t);
  688. return t->kind == Type_Vector;
  689. }
  690. bool is_type_proc(Type *t) {
  691. t = base_type(t);
  692. return t->kind == Type_Proc;
  693. }
  694. bool is_type_poly_proc(Type *t) {
  695. t = base_type(t);
  696. return t->kind == Type_Proc && t->Proc.is_polymorphic;
  697. }
  698. Type *base_vector_type(Type *t) {
  699. if (is_type_vector(t)) {
  700. t = base_type(t);
  701. return t->Vector.elem;
  702. }
  703. return t;
  704. }
  705. Type *base_complex_elem_type(Type *t) {
  706. t = core_type(t);
  707. if (is_type_complex(t)) {
  708. switch (t->Basic.kind) {
  709. // case Basic_complex32: return t_f16;
  710. case Basic_complex64: return t_f32;
  711. case Basic_complex128: return t_f64;
  712. case Basic_UntypedComplex: return t_untyped_float;
  713. }
  714. }
  715. GB_PANIC("Invalid complex type");
  716. return t_invalid;
  717. }
  718. bool is_type_struct(Type *t) {
  719. t = base_type(t);
  720. return (t->kind == Type_Struct && !t->Struct.is_raw_union);
  721. }
  722. bool is_type_union(Type *t) {
  723. t = base_type(t);
  724. return t->kind == Type_Union;
  725. }
  726. bool is_type_raw_union(Type *t) {
  727. t = base_type(t);
  728. return (t->kind == Type_Struct && t->Struct.is_raw_union);
  729. }
  730. bool is_type_enum(Type *t) {
  731. t = base_type(t);
  732. return (t->kind == Type_Enum);
  733. }
  734. bool is_type_bit_field(Type *t) {
  735. t = base_type(t);
  736. return (t->kind == Type_BitField);
  737. }
  738. bool is_type_bit_field_value(Type *t) {
  739. t = base_type(t);
  740. return (t->kind == Type_BitFieldValue);
  741. }
  742. bool is_type_map(Type *t) {
  743. t = base_type(t);
  744. return t->kind == Type_Map;
  745. }
  746. bool is_type_any(Type *t) {
  747. t = base_type(t);
  748. return (t->kind == Type_Basic && t->Basic.kind == Basic_any);
  749. }
  750. bool is_type_untyped_nil(Type *t) {
  751. t = base_type(t);
  752. return (t->kind == Type_Basic && t->Basic.kind == Basic_UntypedNil);
  753. }
  754. bool is_type_untyped_undef(Type *t) {
  755. t = base_type(t);
  756. return (t->kind == Type_Basic && t->Basic.kind == Basic_UntypedUndef);
  757. }
  758. bool is_type_empty_union(Type *t) {
  759. t = base_type(t);
  760. return t->kind == Type_Union && t->Union.variants.count == 0;
  761. }
  762. bool is_type_empty_struct(Type *t) {
  763. t = base_type(t);
  764. return t->kind == Type_Struct && !t->Struct.is_raw_union && t->Struct.fields.count == 0;
  765. }
  766. bool is_type_valid_for_keys(Type *t) {
  767. t = core_type(t);
  768. if (t->kind == Type_Generic) {
  769. return true;
  770. }
  771. if (is_type_untyped(t)) {
  772. return false;
  773. }
  774. if (is_type_integer(t)) {
  775. return true;
  776. }
  777. if (is_type_float(t)) {
  778. return true;
  779. }
  780. if (is_type_string(t)) {
  781. return true;
  782. }
  783. if (is_type_pointer(t)) {
  784. return true;
  785. }
  786. return false;
  787. }
  788. bool is_type_indexable(Type *t) {
  789. return is_type_array(t) || is_type_slice(t) || is_type_vector(t) || is_type_string(t);
  790. }
  791. bool is_type_polymorphic_struct(Type *t) {
  792. t = base_type(t);
  793. if (t->kind == Type_Struct) {
  794. return t->Struct.is_polymorphic;
  795. }
  796. return false;
  797. }
  798. bool is_type_polymorphic_struct_specialized(Type *t) {
  799. t = base_type(t);
  800. if (t->kind == Type_Struct) {
  801. return t->Struct.is_polymorphic && t->Struct.is_poly_specialized;
  802. }
  803. return false;
  804. }
  805. bool is_type_polymorphic(Type *t) {
  806. switch (t->kind) {
  807. case Type_Generic:
  808. return true;
  809. case Type_Named:
  810. return is_type_polymorphic_struct(t->Named.base);
  811. case Type_Pointer:
  812. return is_type_polymorphic(t->Pointer.elem);
  813. case Type_Array:
  814. return is_type_polymorphic(t->Array.elem);
  815. case Type_DynamicArray:
  816. return is_type_polymorphic(t->DynamicArray.elem);
  817. case Type_Vector:
  818. return is_type_polymorphic(t->Vector.elem);
  819. case Type_Slice:
  820. return is_type_polymorphic(t->Slice.elem);
  821. case Type_Tuple:
  822. for_array(i, t->Tuple.variables) {
  823. if (is_type_polymorphic(t->Tuple.variables[i]->type)) {
  824. return true;
  825. }
  826. }
  827. break;
  828. case Type_Proc:
  829. if (t->Proc.is_polymorphic) {
  830. return true;
  831. }
  832. #if 1
  833. if (t->Proc.param_count > 0 &&
  834. is_type_polymorphic(t->Proc.params)) {
  835. return true;
  836. }
  837. if (t->Proc.result_count > 0 &&
  838. is_type_polymorphic(t->Proc.results)) {
  839. return true;
  840. }
  841. #endif
  842. break;
  843. case Type_Enum:
  844. if (t->kind == Type_Enum) {
  845. if (t->Enum.base_type != nullptr) {
  846. return is_type_polymorphic(t->Enum.base_type);
  847. }
  848. return false;
  849. }
  850. break;
  851. case Type_Union:
  852. for_array(i, t->Union.variants) {
  853. if (is_type_polymorphic(t->Union.variants[i])) {
  854. return true;
  855. }
  856. }
  857. break;
  858. case Type_Struct:
  859. if (t->Struct.is_polymorphic) {
  860. return true;
  861. }
  862. for_array(i, t->Struct.fields) {
  863. if (is_type_polymorphic(t->Struct.fields[i]->type)) {
  864. return true;
  865. }
  866. }
  867. break;
  868. case Type_Map:
  869. if (is_type_polymorphic(t->Map.key)) {
  870. return true;
  871. }
  872. if (is_type_polymorphic(t->Map.value)) {
  873. return true;
  874. }
  875. break;
  876. }
  877. return false;
  878. }
  879. bool type_has_undef(Type *t) {
  880. t = base_type(t);
  881. return true;
  882. }
  883. bool type_has_nil(Type *t) {
  884. t = base_type(t);
  885. switch (t->kind) {
  886. case Type_Basic: {
  887. switch (t->Basic.kind) {
  888. case Basic_rawptr:
  889. case Basic_any:
  890. return true;
  891. }
  892. return false;
  893. } break;
  894. case Type_Slice:
  895. case Type_Proc:
  896. case Type_Pointer:
  897. case Type_DynamicArray:
  898. case Type_Map:
  899. return true;
  900. case Type_Union:
  901. return true;
  902. case Type_Struct:
  903. return false;
  904. }
  905. return false;
  906. }
  907. bool is_type_comparable(Type *t) {
  908. t = base_type(t);
  909. switch (t->kind) {
  910. case Type_Basic:
  911. switch (t->Basic.kind) {
  912. case Basic_UntypedNil:
  913. case Basic_any:
  914. return false;
  915. case Basic_rune:
  916. return true;
  917. }
  918. return true;
  919. case Type_Pointer:
  920. return true;
  921. case Type_Enum:
  922. return is_type_comparable(core_type(t));
  923. case Type_Array:
  924. return false;
  925. case Type_Vector:
  926. return is_type_comparable(t->Vector.elem);
  927. case Type_Proc:
  928. return true;
  929. }
  930. return false;
  931. }
  932. bool are_types_identical(Type *x, Type *y) {
  933. if (x == y) {
  934. return true;
  935. }
  936. if ((x == nullptr && y != nullptr) ||
  937. (x != nullptr && y == nullptr)) {
  938. return false;
  939. }
  940. switch (x->kind) {
  941. case Type_Generic:
  942. if (y->kind == Type_Generic) {
  943. return are_types_identical(x->Generic.specialized, y->Generic.specialized);
  944. }
  945. break;
  946. case Type_Basic:
  947. if (y->kind == Type_Basic) {
  948. return x->Basic.kind == y->Basic.kind;
  949. }
  950. break;
  951. case Type_Array:
  952. if (y->kind == Type_Array) {
  953. return (x->Array.count == y->Array.count) && are_types_identical(x->Array.elem, y->Array.elem);
  954. }
  955. break;
  956. case Type_DynamicArray:
  957. if (y->kind == Type_DynamicArray) {
  958. return are_types_identical(x->DynamicArray.elem, y->DynamicArray.elem);
  959. }
  960. break;
  961. case Type_Vector:
  962. if (y->kind == Type_Vector) {
  963. return (x->Vector.count == y->Vector.count) && are_types_identical(x->Vector.elem, y->Vector.elem);
  964. }
  965. break;
  966. case Type_Slice:
  967. if (y->kind == Type_Slice) {
  968. return are_types_identical(x->Slice.elem, y->Slice.elem);
  969. }
  970. break;
  971. case Type_Enum:
  972. return x == y; // NOTE(bill): All enums are unique
  973. case Type_Union:
  974. if (y->kind == Type_Union) {
  975. if (x->Union.variants.count == y->Union.variants.count &&
  976. x->Union.custom_align == y->Union.custom_align) {
  977. // NOTE(bill): zeroth variant is nullptr
  978. for_array(i, x->Union.variants) {
  979. if (!are_types_identical(x->Union.variants[i], y->Union.variants[i])) {
  980. return false;
  981. }
  982. }
  983. return true;
  984. }
  985. }
  986. break;
  987. case Type_Struct:
  988. if (y->kind == Type_Struct) {
  989. if (x->Struct.is_raw_union == y->Struct.is_raw_union &&
  990. x->Struct.fields.count == y->Struct.fields.count &&
  991. x->Struct.is_packed == y->Struct.is_packed &&
  992. x->Struct.is_ordered == y->Struct.is_ordered &&
  993. x->Struct.custom_align == y->Struct.custom_align) {
  994. // TODO(bill); Fix the custom alignment rule
  995. for_array(i, x->Struct.fields) {
  996. Entity *xf = x->Struct.fields[i];
  997. Entity *yf = y->Struct.fields[i];
  998. if (!are_types_identical(xf->type, yf->type)) {
  999. return false;
  1000. }
  1001. if (xf->token.string != yf->token.string) {
  1002. return false;
  1003. }
  1004. bool xf_is_using = (xf->flags&EntityFlag_Using) != 0;
  1005. bool yf_is_using = (yf->flags&EntityFlag_Using) != 0;
  1006. if (xf_is_using ^ yf_is_using) {
  1007. return false;
  1008. }
  1009. }
  1010. return true;
  1011. }
  1012. }
  1013. break;
  1014. case Type_Pointer:
  1015. if (y->kind == Type_Pointer) {
  1016. return are_types_identical(x->Pointer.elem, y->Pointer.elem);
  1017. }
  1018. break;
  1019. case Type_Named:
  1020. if (y->kind == Type_Named) {
  1021. return x->Named.type_name == y->Named.type_name;
  1022. }
  1023. break;
  1024. case Type_Tuple:
  1025. if (y->kind == Type_Tuple) {
  1026. if (x->Tuple.variables.count == y->Tuple.variables.count) {
  1027. for_array(i, x->Tuple.variables) {
  1028. Entity *xe = x->Tuple.variables[i];
  1029. Entity *ye = y->Tuple.variables[i];
  1030. if (xe->kind != ye->kind || !are_types_identical(xe->type, ye->type)) {
  1031. return false;
  1032. }
  1033. }
  1034. return true;
  1035. }
  1036. }
  1037. break;
  1038. case Type_Proc:
  1039. if (y->kind == Type_Proc) {
  1040. return x->Proc.calling_convention == y->Proc.calling_convention &&
  1041. x->Proc.c_vararg == y->Proc.c_vararg &&
  1042. x->Proc.variadic == y->Proc.variadic &&
  1043. are_types_identical(x->Proc.params, y->Proc.params) &&
  1044. are_types_identical(x->Proc.results, y->Proc.results);
  1045. }
  1046. break;
  1047. case Type_Map:
  1048. if (y->kind == Type_Map) {
  1049. return are_types_identical(x->Map.key, y->Map.key) &&
  1050. are_types_identical(x->Map.value, y->Map.value);
  1051. }
  1052. break;
  1053. }
  1054. return false;
  1055. }
  1056. Type *default_bit_field_value_type(Type *type) {
  1057. if (type == nullptr) {
  1058. return t_invalid;
  1059. }
  1060. Type *t = base_type(type);
  1061. if (t->kind == Type_BitFieldValue) {
  1062. i32 bits = t->BitFieldValue.bits;
  1063. i32 size = 8*next_pow2((bits+7)/8);
  1064. switch (size) {
  1065. case 8: return t_u8;
  1066. case 16: return t_u16;
  1067. case 32: return t_u32;
  1068. case 64: return t_u64;
  1069. case 128: return t_u128;
  1070. default: GB_PANIC("Too big of a bit size!"); break;
  1071. }
  1072. }
  1073. return type;
  1074. }
  1075. Type *default_type(Type *type) {
  1076. if (type == nullptr) {
  1077. return t_invalid;
  1078. }
  1079. if (type->kind == Type_Basic) {
  1080. switch (type->Basic.kind) {
  1081. case Basic_UntypedBool: return t_bool;
  1082. case Basic_UntypedInteger: return t_int;
  1083. case Basic_UntypedFloat: return t_f64;
  1084. case Basic_UntypedComplex: return t_complex128;
  1085. case Basic_UntypedString: return t_string;
  1086. case Basic_UntypedRune: return t_rune;
  1087. }
  1088. }
  1089. if (type->kind == Type_BitFieldValue) {
  1090. return default_bit_field_value_type(type);
  1091. }
  1092. return type;
  1093. }
  1094. // NOTE(bill): Valid Compile time execution #run type
  1095. bool is_type_cte_safe(Type *type) {
  1096. type = default_type(base_type(type));
  1097. switch (type->kind) {
  1098. case Type_Basic:
  1099. switch (type->Basic.kind) {
  1100. case Basic_rawptr:
  1101. case Basic_any:
  1102. return false;
  1103. }
  1104. return true;
  1105. case Type_Pointer:
  1106. return false;
  1107. case Type_Array:
  1108. return is_type_cte_safe(type->Array.elem);
  1109. case Type_DynamicArray:
  1110. return false;
  1111. case Type_Map:
  1112. return false;
  1113. case Type_Vector: // NOTE(bill): This should always to be true but this is for sanity reasons
  1114. return is_type_cte_safe(type->Vector.elem);
  1115. case Type_Slice:
  1116. return false;
  1117. case Type_Struct: {
  1118. if (type->Struct.is_raw_union) {
  1119. return false;
  1120. }
  1121. for_array(i, type->Struct.fields) {
  1122. Entity *v = type->Struct.fields[i];
  1123. if (!is_type_cte_safe(v->type)) {
  1124. return false;
  1125. }
  1126. }
  1127. return true;
  1128. }
  1129. case Type_Tuple: {
  1130. for_array(i, type->Tuple.variables) {
  1131. Entity *v = type->Tuple.variables[i];
  1132. if (!is_type_cte_safe(v->type)) {
  1133. return false;
  1134. }
  1135. }
  1136. return true;
  1137. }
  1138. case Type_Proc:
  1139. // TODO(bill): How should I handle procedures in the CTE stage?
  1140. // return type->Proc.calling_convention == ProcCC_Odin;
  1141. return false;
  1142. }
  1143. return false;
  1144. }
  1145. enum ProcTypeOverloadKind {
  1146. ProcOverload_Identical, // The types are identical
  1147. ProcOverload_CallingConvention,
  1148. ProcOverload_ParamCount,
  1149. ProcOverload_ParamVariadic,
  1150. ProcOverload_ParamTypes,
  1151. ProcOverload_ResultCount,
  1152. ProcOverload_ResultTypes,
  1153. ProcOverload_Polymorphic,
  1154. ProcOverload_NotProcedure,
  1155. };
  1156. ProcTypeOverloadKind are_proc_types_overload_safe(Type *x, Type *y) {
  1157. if (x == nullptr && y == nullptr) return ProcOverload_NotProcedure;
  1158. if (x == nullptr && y != nullptr) return ProcOverload_NotProcedure;
  1159. if (x != nullptr && y == nullptr) return ProcOverload_NotProcedure;
  1160. if (!is_type_proc(x)) return ProcOverload_NotProcedure;
  1161. if (!is_type_proc(y)) return ProcOverload_NotProcedure;
  1162. TypeProc px = base_type(x)->Proc;
  1163. TypeProc py = base_type(y)->Proc;
  1164. // if (px.calling_convention != py.calling_convention) {
  1165. // return ProcOverload_CallingConvention;
  1166. // }
  1167. // if (px.is_polymorphic != py.is_polymorphic) {
  1168. // return ProcOverload_Polymorphic;
  1169. // }
  1170. if (px.param_count != py.param_count) {
  1171. return ProcOverload_ParamCount;
  1172. }
  1173. for (isize i = 0; i < px.param_count; i++) {
  1174. Entity *ex = px.params->Tuple.variables[i];
  1175. Entity *ey = py.params->Tuple.variables[i];
  1176. if (!are_types_identical(ex->type, ey->type)) {
  1177. return ProcOverload_ParamTypes;
  1178. }
  1179. }
  1180. // IMPORTANT TODO(bill): Determine the rules for overloading procedures with variadic parameters
  1181. if (px.variadic != py.variadic) {
  1182. return ProcOverload_ParamVariadic;
  1183. }
  1184. if (px.is_polymorphic != py.is_polymorphic) {
  1185. return ProcOverload_Polymorphic;
  1186. }
  1187. if (px.result_count != py.result_count) {
  1188. return ProcOverload_ResultCount;
  1189. }
  1190. for (isize i = 0; i < px.result_count; i++) {
  1191. Entity *ex = px.results->Tuple.variables[i];
  1192. Entity *ey = py.results->Tuple.variables[i];
  1193. if (!are_types_identical(ex->type, ey->type)) {
  1194. return ProcOverload_ResultTypes;
  1195. }
  1196. }
  1197. if (px.params != nullptr && py.params != nullptr) {
  1198. Entity *ex = px.params->Tuple.variables[0];
  1199. Entity *ey = py.params->Tuple.variables[0];
  1200. bool ok = are_types_identical(ex->type, ey->type);
  1201. if (ok) {
  1202. }
  1203. }
  1204. return ProcOverload_Identical;
  1205. }
  1206. Selection lookup_field_with_selection(gbAllocator a, Type *type_, String field_name, bool is_type, Selection sel);
  1207. Selection lookup_field(gbAllocator a, Type *type_, String field_name, bool is_type) {
  1208. return lookup_field_with_selection(a, type_, field_name, is_type, empty_selection);
  1209. }
  1210. Selection lookup_field_from_index(gbAllocator a, Type *type, i64 index) {
  1211. GB_ASSERT(is_type_struct(type) || is_type_union(type) || is_type_tuple(type));
  1212. type = base_type(type);
  1213. isize max_count = 0;
  1214. switch (type->kind) {
  1215. case Type_Struct: max_count = type->Struct.fields.count; break;
  1216. case Type_Tuple: max_count = type->Tuple.variables.count; break;
  1217. case Type_BitField: max_count = type->BitField.field_count; break;
  1218. }
  1219. if (index >= max_count) {
  1220. return empty_selection;
  1221. }
  1222. switch (type->kind) {
  1223. case Type_Struct:
  1224. for (isize i = 0; i < max_count; i++) {
  1225. Entity *f = type->Struct.fields[i];
  1226. if (f->kind == Entity_Variable) {
  1227. if (f->Variable.field_src_index == index) {
  1228. Array<i32> sel_array = {0};
  1229. array_init_count(&sel_array, a, 1);
  1230. sel_array[0] = cast(i32)i;
  1231. return make_selection(f, sel_array, false);
  1232. }
  1233. }
  1234. }
  1235. break;
  1236. case Type_Tuple:
  1237. for (isize i = 0; i < max_count; i++) {
  1238. Entity *f = type->Tuple.variables[i];
  1239. if (i == index) {
  1240. Array<i32> sel_array = {0};
  1241. array_init_count(&sel_array, a, 1);
  1242. sel_array[0] = cast(i32)i;
  1243. return make_selection(f, sel_array, false);
  1244. }
  1245. }
  1246. break;
  1247. case Type_BitField: {
  1248. Array<i32> sel_array = {0};
  1249. array_init_count(&sel_array, a, 1);
  1250. sel_array[0] = cast(i32)index;
  1251. return make_selection(type->BitField.fields[index], sel_array, false);
  1252. } break;
  1253. }
  1254. GB_PANIC("Illegal index");
  1255. return empty_selection;
  1256. }
  1257. gb_global Entity *entity__any_data = nullptr;
  1258. gb_global Entity *entity__any_type_info = nullptr;
  1259. Entity *current_scope_lookup_entity(Scope *s, String name);
  1260. Selection lookup_field_with_selection(gbAllocator a, Type *type_, String field_name, bool is_type, Selection sel) {
  1261. GB_ASSERT(type_ != nullptr);
  1262. if (is_blank_ident(field_name)) {
  1263. return empty_selection;
  1264. }
  1265. Type *type = type_deref(type_);
  1266. bool is_ptr = type != type_;
  1267. sel.indirect = sel.indirect || is_ptr;
  1268. type = base_type(type);
  1269. if (type->kind == Type_Basic) {
  1270. switch (type->Basic.kind) {
  1271. case Basic_any: {
  1272. #if 1
  1273. // IMPORTANT TODO(bill): Should these members be available to should I only allow them with
  1274. // `Raw_Any` type?
  1275. String data_str = str_lit("data");
  1276. String type_info_str = str_lit("type_info");
  1277. if (entity__any_data == nullptr) {
  1278. entity__any_data = make_entity_field(a, nullptr, make_token_ident(data_str), t_rawptr, false, 0);
  1279. }
  1280. if (entity__any_type_info == nullptr) {
  1281. entity__any_type_info = make_entity_field(a, nullptr, make_token_ident(type_info_str), t_type_info_ptr, false, 1);
  1282. }
  1283. if (field_name == data_str) {
  1284. selection_add_index(&sel, 0);
  1285. sel.entity = entity__any_data;;
  1286. return sel;
  1287. } else if (field_name == type_info_str) {
  1288. selection_add_index(&sel, 1);
  1289. sel.entity = entity__any_type_info;
  1290. return sel;
  1291. }
  1292. #endif
  1293. } break;
  1294. }
  1295. return sel;
  1296. } else if (type->kind == Type_Vector) {
  1297. if (type->Vector.count <= 4 && !is_type_boolean(type->Vector.elem)) {
  1298. // HACK(bill): Memory leak
  1299. switch (type->Vector.count) {
  1300. #define _VECTOR_FIELD_CASE(_length, _name) \
  1301. case (_length): \
  1302. if (field_name == _name) { \
  1303. selection_add_index(&sel, (_length)-1); \
  1304. sel.entity = make_entity_vector_elem(a, nullptr, make_token_ident(str_lit(_name)), type->Vector.elem, (_length)-1); \
  1305. return sel; \
  1306. } \
  1307. /*fallthrough*/
  1308. _VECTOR_FIELD_CASE(4, "w");
  1309. _VECTOR_FIELD_CASE(3, "z");
  1310. _VECTOR_FIELD_CASE(2, "y");
  1311. _VECTOR_FIELD_CASE(1, "x");
  1312. default: break;
  1313. #undef _VECTOR_FIELD_CASE
  1314. }
  1315. }
  1316. }
  1317. if (is_type) {
  1318. switch (type->kind) {
  1319. case Type_Struct:
  1320. if (type->Struct.names != nullptr &&
  1321. field_name == "names") {
  1322. sel.entity = type->Struct.names;
  1323. return sel;
  1324. }
  1325. break;
  1326. case Type_Enum:
  1327. if (type->Enum.names != nullptr &&
  1328. field_name == "names") {
  1329. sel.entity = type->Enum.names;
  1330. return sel;
  1331. }
  1332. break;
  1333. }
  1334. if (is_type_enum(type)) {
  1335. // NOTE(bill): These may not have been added yet, so check in case
  1336. if (type->Enum.count != nullptr) {
  1337. if (field_name == "count") {
  1338. sel.entity = type->Enum.count;
  1339. return sel;
  1340. }
  1341. if (field_name == "min_value") {
  1342. sel.entity = type->Enum.min_value;
  1343. return sel;
  1344. }
  1345. if (field_name == "max_value") {
  1346. sel.entity = type->Enum.max_value;
  1347. return sel;
  1348. }
  1349. }
  1350. for (isize i = 0; i < type->Enum.field_count; i++) {
  1351. Entity *f = type->Enum.fields[i];
  1352. GB_ASSERT(f->kind == Entity_Constant);
  1353. String str = f->token.string;
  1354. if (field_name == str) {
  1355. sel.entity = f;
  1356. // selection_add_index(&sel, i);
  1357. return sel;
  1358. }
  1359. }
  1360. }
  1361. if (type->kind == Type_Struct) {
  1362. Scope *s = type->Struct.scope;
  1363. if (s != nullptr) {
  1364. Entity *found = current_scope_lookup_entity(s, field_name);
  1365. if (found != nullptr && found->kind != Entity_Variable) {
  1366. sel.entity = found;
  1367. return sel;
  1368. }
  1369. }
  1370. }
  1371. if (type->kind == Type_Generic && type->Generic.specialized != nullptr) {
  1372. Type *specialized = type->Generic.specialized;
  1373. return lookup_field_with_selection(a, specialized, field_name, is_type, sel);
  1374. }
  1375. } else if (type->kind == Type_Union) {
  1376. if (field_name == "__type_info") {
  1377. Entity *e = type->Union.union__type_info;
  1378. if (e == nullptr) {
  1379. Entity *__type_info = make_entity_field(a, nullptr, make_token_ident(str_lit("__type_info")), t_type_info_ptr, false, -1);
  1380. type->Union.union__type_info = __type_info;
  1381. e = __type_info;
  1382. }
  1383. GB_ASSERT(e != nullptr);
  1384. selection_add_index(&sel, -1); // HACK(bill): Leaky memory
  1385. sel.entity = e;
  1386. return sel;
  1387. }
  1388. } else if (type->kind == Type_Struct) {
  1389. for_array(i, type->Struct.fields) {
  1390. Entity *f = type->Struct.fields[i];
  1391. if (f->kind != Entity_Variable || (f->flags & EntityFlag_Field) == 0) {
  1392. continue;
  1393. }
  1394. String str = f->token.string;
  1395. if (field_name == str) {
  1396. selection_add_index(&sel, i); // HACK(bill): Leaky memory
  1397. sel.entity = f;
  1398. return sel;
  1399. }
  1400. if (f->flags & EntityFlag_Using) {
  1401. isize prev_count = sel.index.count;
  1402. selection_add_index(&sel, i); // HACK(bill): Leaky memory
  1403. sel = lookup_field_with_selection(a, f->type, field_name, is_type, sel);
  1404. if (sel.entity != nullptr) {
  1405. if (is_type_pointer(f->type)) {
  1406. sel.indirect = true;
  1407. }
  1408. return sel;
  1409. }
  1410. sel.index.count = prev_count;
  1411. }
  1412. }
  1413. } else if (type->kind == Type_BitField) {
  1414. for (isize i = 0; i < type->BitField.field_count; i++) {
  1415. Entity *f = type->BitField.fields[i];
  1416. if (f->kind != Entity_Variable ||
  1417. (f->flags & EntityFlag_BitFieldValue) == 0) {
  1418. continue;
  1419. }
  1420. String str = f->token.string;
  1421. if (field_name == str) {
  1422. selection_add_index(&sel, i); // HACK(bill): Leaky memory
  1423. sel.entity = f;
  1424. return sel;
  1425. }
  1426. }
  1427. }
  1428. return sel;
  1429. }
  1430. struct TypePath {
  1431. Array<Type *> path; // Entity_TypeName;
  1432. bool failure;
  1433. };
  1434. void type_path_init(TypePath *tp) {
  1435. // TODO(bill): Use an allocator that uses a backing array if it can and then use alternative allocator when exhausted
  1436. array_init(&tp->path, heap_allocator());
  1437. }
  1438. void type_path_free(TypePath *tp) {
  1439. array_free(&tp->path);
  1440. }
  1441. void type_path_print_illegal_cycle(TypePath *tp, isize start_index) {
  1442. GB_ASSERT(tp != nullptr);
  1443. GB_ASSERT(start_index < tp->path.count);
  1444. Type *t = tp->path[start_index];
  1445. GB_ASSERT(t != nullptr);
  1446. GB_ASSERT_MSG(is_type_named(t), "%s", type_to_string(t));
  1447. Entity *e = t->Named.type_name;
  1448. error(e->token, "Illegal declaration cycle of `%.*s`", LIT(t->Named.name));
  1449. // NOTE(bill): Print cycle, if it's deep enough
  1450. for (isize j = start_index; j < tp->path.count; j++) {
  1451. Type *t = tp->path[j];
  1452. GB_ASSERT_MSG(is_type_named(t), "%s", type_to_string(t));
  1453. Entity *e = t->Named.type_name;
  1454. error(e->token, "\t%.*s refers to", LIT(t->Named.name));
  1455. }
  1456. // NOTE(bill): This will only print if the path count > 1
  1457. error(e->token, "\t%.*s", LIT(t->Named.name));
  1458. tp->failure = true;
  1459. t->failure = true;
  1460. }
  1461. TypePath *type_path_push(TypePath *tp, Type *t) {
  1462. GB_ASSERT(tp != nullptr);
  1463. for (isize i = 0; i < tp->path.count; i++) {
  1464. if (tp->path[i] == t) {
  1465. type_path_print_illegal_cycle(tp, i);
  1466. }
  1467. }
  1468. if (!tp->failure && is_type_named(t)) {
  1469. array_add(&tp->path, t);
  1470. }
  1471. return tp;
  1472. }
  1473. void type_path_pop(TypePath *tp) {
  1474. if (tp != nullptr && tp->path.count > 0) {
  1475. array_pop(&tp->path);
  1476. }
  1477. }
  1478. #define FAILURE_SIZE 0
  1479. #define FAILURE_ALIGNMENT 0
  1480. i64 type_size_of_internal (gbAllocator allocator, Type *t, TypePath *path);
  1481. i64 type_align_of_internal(gbAllocator allocator, Type *t, TypePath *path);
  1482. i64 align_formula(i64 size, i64 align) {
  1483. if (align > 0) {
  1484. i64 result = size + align-1;
  1485. return result - result%align;
  1486. }
  1487. return size;
  1488. }
  1489. i64 type_size_of(gbAllocator allocator, Type *t) {
  1490. if (t == nullptr) {
  1491. return 0;
  1492. }
  1493. i64 size;
  1494. TypePath path = {0};
  1495. type_path_init(&path);
  1496. size = type_size_of_internal(allocator, t, &path);
  1497. type_path_free(&path);
  1498. return size;
  1499. }
  1500. i64 type_align_of(gbAllocator allocator, Type *t) {
  1501. if (t == nullptr) {
  1502. return 1;
  1503. }
  1504. i64 align;
  1505. TypePath path = {0};
  1506. type_path_init(&path);
  1507. align = type_align_of_internal(allocator, t, &path);
  1508. type_path_free(&path);
  1509. return align;
  1510. }
  1511. i64 type_align_of_internal(gbAllocator allocator, Type *t, TypePath *path) {
  1512. if (t->failure) {
  1513. return FAILURE_ALIGNMENT;
  1514. }
  1515. t = base_type(t);
  1516. switch (t->kind) {
  1517. case Type_Basic: {
  1518. GB_ASSERT(is_type_typed(t));
  1519. switch (t->Basic.kind) {
  1520. case Basic_string: return build_context.word_size;
  1521. case Basic_any: return build_context.word_size;
  1522. case Basic_int: case Basic_uint: case Basic_rawptr:
  1523. return build_context.word_size;
  1524. case Basic_complex64: case Basic_complex128:
  1525. return type_size_of_internal(allocator, t, path) / 2;
  1526. }
  1527. } break;
  1528. case Type_Array: {
  1529. Type *elem = t->Array.elem;
  1530. type_path_push(path, elem);
  1531. if (path->failure) {
  1532. return FAILURE_ALIGNMENT;
  1533. }
  1534. i64 align = type_align_of_internal(allocator, t->Array.elem, path);
  1535. type_path_pop(path);
  1536. return align;
  1537. }
  1538. case Type_DynamicArray:
  1539. // data, count, capacity, allocator
  1540. return build_context.word_size;
  1541. case Type_Slice:
  1542. return build_context.word_size;
  1543. case Type_Vector: {
  1544. Type *elem = t->Vector.elem;
  1545. type_path_push(path, elem);
  1546. if (path->failure) {
  1547. return FAILURE_ALIGNMENT;
  1548. }
  1549. i64 size = type_size_of_internal(allocator, t->Vector.elem, path);
  1550. type_path_pop(path);
  1551. i64 count = gb_max(prev_pow2(t->Vector.count), 1);
  1552. i64 total = size * count;
  1553. return gb_clamp(total, 1, build_context.max_align);
  1554. } break;
  1555. case Type_Tuple: {
  1556. i64 max = 1;
  1557. for_array(i, t->Tuple.variables) {
  1558. i64 align = type_align_of_internal(allocator, t->Tuple.variables[i]->type, path);
  1559. if (max < align) {
  1560. max = align;
  1561. }
  1562. }
  1563. return max;
  1564. } break;
  1565. case Type_Map:
  1566. generate_map_internal_types(allocator, t);
  1567. return type_align_of_internal(allocator, t->Map.generated_struct_type, path);
  1568. case Type_Enum:
  1569. return type_align_of_internal(allocator, t->Enum.base_type, path);
  1570. case Type_Union: {
  1571. if (t->Union.variants.count == 0) {
  1572. return 1;
  1573. }
  1574. if (t->Union.custom_align > 0) {
  1575. return gb_clamp(t->Union.custom_align, 1, build_context.max_align);
  1576. }
  1577. i64 max = build_context.word_size;
  1578. for_array(i, t->Union.variants) {
  1579. Type *variant = t->Union.variants[i];
  1580. type_path_push(path, variant);
  1581. if (path->failure) {
  1582. return FAILURE_ALIGNMENT;
  1583. }
  1584. i64 align = type_align_of_internal(allocator, variant, path);
  1585. type_path_pop(path);
  1586. if (max < align) {
  1587. max = align;
  1588. }
  1589. }
  1590. return max;
  1591. } break;
  1592. case Type_Struct: {
  1593. if (t->Struct.custom_align > 0) {
  1594. return gb_clamp(t->Struct.custom_align, 1, build_context.max_align);
  1595. }
  1596. if (t->Struct.is_raw_union) {
  1597. i64 max = 1;
  1598. for_array(i, t->Struct.fields) {
  1599. Type *field_type = t->Struct.fields[i]->type;
  1600. type_path_push(path, field_type);
  1601. if (path->failure) {
  1602. return FAILURE_ALIGNMENT;
  1603. }
  1604. i64 align = type_align_of_internal(allocator, field_type, path);
  1605. type_path_pop(path);
  1606. if (max < align) {
  1607. max = align;
  1608. }
  1609. }
  1610. return max;
  1611. } else if (t->Struct.fields.count > 0) {
  1612. i64 max = 1;
  1613. // NOTE(bill): Check the fields to check for cyclic definitions
  1614. for_array(i, t->Struct.fields) {
  1615. Type *field_type = t->Struct.fields[i]->type;
  1616. type_path_push(path, field_type);
  1617. if (path->failure) return FAILURE_ALIGNMENT;
  1618. i64 align = type_align_of_internal(allocator, field_type, path);
  1619. type_path_pop(path);
  1620. if (max < align) {
  1621. max = align;
  1622. }
  1623. }
  1624. if (t->Struct.is_packed) {
  1625. return 1;
  1626. }
  1627. return max;
  1628. }
  1629. } break;
  1630. case Type_BitField: {
  1631. i64 align = 1;
  1632. if (t->BitField.custom_align > 0) {
  1633. align = t->BitField.custom_align;
  1634. }
  1635. return gb_clamp(next_pow2(align), 1, build_context.max_align);
  1636. } break;
  1637. }
  1638. // return gb_clamp(next_pow2(type_size_of(allocator, t)), 1, build_context.max_align);
  1639. // NOTE(bill): Things that are bigger than build_context.word_size, are actually comprised of smaller types
  1640. // TODO(bill): Is this correct for 128-bit types (integers)?
  1641. return gb_clamp(next_pow2(type_size_of_internal(allocator, t, path)), 1, build_context.word_size);
  1642. }
  1643. Array<i64> type_set_offsets_of(gbAllocator allocator, Array<Entity *> fields, bool is_packed, bool is_raw_union) {
  1644. Array<i64> offsets = {};
  1645. array_init_count(&offsets, allocator, fields.count);
  1646. i64 curr_offset = 0;
  1647. if (is_raw_union) {
  1648. for_array(i, fields) {
  1649. offsets[i] = 0;
  1650. }
  1651. } else if (is_packed) {
  1652. for_array(i, fields) {
  1653. i64 size = type_size_of(allocator, fields[i]->type);
  1654. offsets[i] = curr_offset;
  1655. curr_offset += size;
  1656. }
  1657. } else {
  1658. for_array(i, fields) {
  1659. i64 align = gb_max(type_align_of(allocator, fields[i]->type), 1);
  1660. i64 size = gb_max(type_size_of(allocator, fields[i]->type), 0);
  1661. curr_offset = align_formula(curr_offset, align);
  1662. offsets[i] = curr_offset;
  1663. curr_offset += size;
  1664. }
  1665. }
  1666. return offsets;
  1667. }
  1668. bool type_set_offsets(gbAllocator allocator, Type *t) {
  1669. t = base_type(t);
  1670. if (t->kind == Type_Struct) {
  1671. if (!t->Struct.are_offsets_set) {
  1672. t->Struct.are_offsets_being_processed = true;
  1673. t->Struct.offsets = type_set_offsets_of(allocator, t->Struct.fields, t->Struct.is_packed, t->Struct.is_raw_union);
  1674. t->Struct.are_offsets_set = true;
  1675. return true;
  1676. }
  1677. } else if (is_type_tuple(t)) {
  1678. if (!t->Tuple.are_offsets_set) {
  1679. t->Struct.are_offsets_being_processed = true;
  1680. t->Tuple.offsets = type_set_offsets_of(allocator, t->Tuple.variables, false, false);
  1681. t->Tuple.are_offsets_set = true;
  1682. return true;
  1683. }
  1684. } else {
  1685. GB_PANIC("Invalid type for setting offsets");
  1686. }
  1687. return false;
  1688. }
  1689. i64 type_size_of_internal(gbAllocator allocator, Type *t, TypePath *path) {
  1690. if (t->failure) {
  1691. return FAILURE_SIZE;
  1692. }
  1693. switch (t->kind) {
  1694. case Type_Named: {
  1695. type_path_push(path, t);
  1696. if (path->failure) {
  1697. return FAILURE_ALIGNMENT;
  1698. }
  1699. i64 size = type_size_of_internal(allocator, t->Named.base, path);
  1700. type_path_pop(path);
  1701. return size;
  1702. } break;
  1703. case Type_Basic: {
  1704. GB_ASSERT_MSG(is_type_typed(t), "%s", type_to_string(t));
  1705. BasicKind kind = t->Basic.kind;
  1706. i64 size = t->Basic.size;
  1707. if (size > 0) {
  1708. return size;
  1709. }
  1710. switch (kind) {
  1711. case Basic_string: return 2*build_context.word_size;
  1712. case Basic_any: return 2*build_context.word_size;
  1713. case Basic_int: case Basic_uint: case Basic_rawptr:
  1714. return build_context.word_size;
  1715. }
  1716. } break;
  1717. case Type_Array: {
  1718. i64 count, align, size, alignment;
  1719. count = t->Array.count;
  1720. if (count == 0) {
  1721. return 0;
  1722. }
  1723. align = type_align_of_internal(allocator, t->Array.elem, path);
  1724. if (path->failure) {
  1725. return FAILURE_SIZE;
  1726. }
  1727. size = type_size_of_internal( allocator, t->Array.elem, path);
  1728. alignment = align_formula(size, align);
  1729. return alignment*(count-1) + size;
  1730. } break;
  1731. case Type_Vector: {
  1732. #if 0
  1733. i64 count, bit_size, total_size_in_bits, total_size;
  1734. count = t->Vector.count;
  1735. if (count == 0) {
  1736. return 0;
  1737. }
  1738. type_path_push(path, t->Vector.elem);
  1739. if (path->failure) {
  1740. return FAILURE_SIZE;
  1741. }
  1742. bit_size = 8*type_size_of_internal(allocator, t->Vector.elem, path);
  1743. type_path_pop(path);
  1744. if (is_type_boolean(t->Vector.elem)) {
  1745. bit_size = 1; // NOTE(bill): LLVM can store booleans as 1 bit because a boolean _is_ an `i1`
  1746. // Silly LLVM spec
  1747. }
  1748. total_size_in_bits = bit_size * count;
  1749. total_size = (total_size_in_bits+7)/8;
  1750. return total_size;
  1751. #else
  1752. i64 count = t->Vector.count;
  1753. if (count == 0) {
  1754. return 0;
  1755. }
  1756. i64 elem_align = type_align_of_internal(allocator, t->Vector.elem, path);
  1757. if (path->failure) {
  1758. return FAILURE_SIZE;
  1759. }
  1760. i64 vector_align = type_align_of_internal(allocator, t, path);
  1761. i64 elem_size = type_size_of_internal(allocator, t->Vector.elem, path);
  1762. i64 alignment = align_formula(elem_size, elem_align);
  1763. return align_formula(alignment*(count-1) + elem_size, vector_align);
  1764. #endif
  1765. } break;
  1766. case Type_Slice: // ptr + count
  1767. return 3 * build_context.word_size;
  1768. case Type_DynamicArray:
  1769. // data + len + cap + allocator(procedure+data)
  1770. return 3*build_context.word_size + 2*build_context.word_size;
  1771. case Type_Map:
  1772. generate_map_internal_types(allocator, t);
  1773. return type_size_of_internal(allocator, t->Map.generated_struct_type, path);
  1774. case Type_Tuple: {
  1775. i64 count, align, size;
  1776. count = t->Tuple.variables.count;
  1777. if (count == 0) {
  1778. return 0;
  1779. }
  1780. align = type_align_of_internal(allocator, t, path);
  1781. type_set_offsets(allocator, t);
  1782. size = t->Tuple.offsets[count-1] + type_size_of_internal(allocator, t->Tuple.variables[count-1]->type, path);
  1783. return align_formula(size, align);
  1784. } break;
  1785. case Type_Enum:
  1786. return type_size_of_internal(allocator, t->Enum.base_type, path);
  1787. case Type_Union: {
  1788. if (t->Union.variants.count == 0) {
  1789. return 0;
  1790. }
  1791. i64 align = type_align_of_internal(allocator, t, path);
  1792. if (path->failure) {
  1793. return FAILURE_SIZE;
  1794. }
  1795. i64 max = 0;
  1796. i64 field_size = 0;
  1797. for_array(i, t->Union.variants) {
  1798. Type *variant_type = t->Union.variants[i];
  1799. i64 size = type_size_of_internal(allocator, variant_type, path);
  1800. if (max < size) {
  1801. max = size;
  1802. }
  1803. }
  1804. // NOTE(bill): Align to int
  1805. i64 size = align_formula(max, build_context.word_size);
  1806. // NOTE(bill): Calculate the padding between the common fields and the tag
  1807. t->Union.variant_block_size = size - field_size;
  1808. size += type_size_of(allocator, t_int);
  1809. size = align_formula(size, align);
  1810. return size;
  1811. } break;
  1812. case Type_Struct: {
  1813. if (t->Struct.is_raw_union) {
  1814. i64 count = t->Struct.fields.count;
  1815. i64 align = type_align_of_internal(allocator, t, path);
  1816. if (path->failure) {
  1817. return FAILURE_SIZE;
  1818. }
  1819. i64 max = 0;
  1820. for (isize i = 0; i < count; i++) {
  1821. i64 size = type_size_of_internal(allocator, t->Struct.fields[i]->type, path);
  1822. if (max < size) {
  1823. max = size;
  1824. }
  1825. }
  1826. // TODO(bill): Is this how it should work?
  1827. return align_formula(max, align);
  1828. } else {
  1829. i64 count = t->Struct.fields.count;
  1830. if (count == 0) {
  1831. return 0;
  1832. }
  1833. i64 align = type_align_of_internal(allocator, t, path);
  1834. if (path->failure) {
  1835. return FAILURE_SIZE;
  1836. }
  1837. if (t->Struct.are_offsets_being_processed && t->Struct.offsets.data == nullptr) {
  1838. type_path_print_illegal_cycle(path, path->path.count-1);
  1839. return FAILURE_SIZE;
  1840. }
  1841. type_set_offsets(allocator, t);
  1842. i64 size = t->Struct.offsets[count-1] + type_size_of_internal(allocator, t->Struct.fields[count-1]->type, path);
  1843. return align_formula(size, align);
  1844. }
  1845. } break;
  1846. case Type_BitField: {
  1847. i64 align = 8*type_align_of_internal(allocator, t, path);
  1848. i64 end = 0;
  1849. if (t->BitField.field_count > 0) {
  1850. i64 last = t->BitField.field_count-1;
  1851. end = t->BitField.offsets[last] + t->BitField.sizes[last];
  1852. }
  1853. i64 bits = align_formula(end, align);
  1854. GB_ASSERT((bits%8) == 0);
  1855. return bits/8;
  1856. } break;
  1857. }
  1858. // Catch all
  1859. return build_context.word_size;
  1860. }
  1861. i64 type_offset_of(gbAllocator allocator, Type *t, i32 index) {
  1862. t = base_type(t);
  1863. if (t->kind == Type_Struct) {
  1864. type_set_offsets(allocator, t);
  1865. if (gb_is_between(index, 0, t->Struct.fields.count-1)) {
  1866. return t->Struct.offsets[index];
  1867. }
  1868. } else if (t->kind == Type_Tuple) {
  1869. type_set_offsets(allocator, t);
  1870. if (gb_is_between(index, 0, t->Tuple.variables.count-1)) {
  1871. return t->Tuple.offsets[index];
  1872. }
  1873. } else if (t->kind == Type_Basic) {
  1874. if (t->Basic.kind == Basic_string) {
  1875. switch (index) {
  1876. case 0: return 0; // data
  1877. case 1: return build_context.word_size; // len
  1878. }
  1879. } else if (t->Basic.kind == Basic_any) {
  1880. switch (index) {
  1881. case 0: return 0; // type_info
  1882. case 1: return build_context.word_size; // data
  1883. }
  1884. }
  1885. } else if (t->kind == Type_Slice) {
  1886. switch (index) {
  1887. case 0: return 0; // data
  1888. case 1: return 1*build_context.word_size; // len
  1889. case 2: return 2*build_context.word_size; // cap
  1890. }
  1891. } else if (t->kind == Type_DynamicArray) {
  1892. switch (index) {
  1893. case 0: return 0; // data
  1894. case 1: return 1*build_context.word_size; // len
  1895. case 2: return 2*build_context.word_size; // cap
  1896. case 3: return 3*build_context.word_size; // allocator
  1897. }
  1898. } else if (t->kind == Type_Union) {
  1899. i64 s = type_size_of(allocator, t);
  1900. switch (index) {
  1901. case -1: return align_formula(t->Union.variant_block_size, build_context.word_size); // __type_info
  1902. }
  1903. }
  1904. return 0;
  1905. }
  1906. i64 type_offset_of_from_selection(gbAllocator allocator, Type *type, Selection sel) {
  1907. GB_ASSERT(sel.indirect == false);
  1908. Type *t = type;
  1909. i64 offset = 0;
  1910. for_array(i, sel.index) {
  1911. i32 index = sel.index[i];
  1912. t = base_type(t);
  1913. offset += type_offset_of(allocator, t, index);
  1914. if (t->kind == Type_Struct && !t->Struct.is_raw_union) {
  1915. t = t->Struct.fields[index]->type;
  1916. } else {
  1917. // NOTE(bill): No need to worry about custom types, just need the alignment
  1918. switch (t->kind) {
  1919. case Type_Basic:
  1920. if (t->Basic.kind == Basic_string) {
  1921. switch (index) {
  1922. case 0: t = t_rawptr; break;
  1923. case 1: t = t_int; break;
  1924. }
  1925. } else if (t->Basic.kind == Basic_any) {
  1926. switch (index) {
  1927. case 0: t = t_type_info_ptr; break;
  1928. case 1: t = t_rawptr; break;
  1929. }
  1930. }
  1931. break;
  1932. case Type_Slice:
  1933. switch (index) {
  1934. case 0: t = t_rawptr; break;
  1935. case 1: t = t_int; break;
  1936. case 2: t = t_int; break;
  1937. }
  1938. break;
  1939. case Type_DynamicArray:
  1940. switch (index) {
  1941. case 0: t = t_rawptr; break;
  1942. case 1: t = t_int; break;
  1943. case 2: t = t_int; break;
  1944. case 3: t = t_allocator; break;
  1945. }
  1946. break;
  1947. }
  1948. }
  1949. }
  1950. return offset;
  1951. }
  1952. gbString write_type_to_string(gbString str, Type *type) {
  1953. if (type == nullptr) {
  1954. return gb_string_appendc(str, "<no type>");
  1955. }
  1956. switch (type->kind) {
  1957. case Type_Basic:
  1958. str = gb_string_append_length(str, type->Basic.name.text, type->Basic.name.len);
  1959. break;
  1960. case Type_Generic:
  1961. if (type->Generic.name.len == 0) {
  1962. str = gb_string_appendc(str, "type");
  1963. } else {
  1964. String name = type->Generic.name;
  1965. str = gb_string_append_rune(str, '$');
  1966. str = gb_string_append_length(str, name.text, name.len);
  1967. if (type->Generic.specialized != nullptr) {
  1968. str = gb_string_append_rune(str, '/');
  1969. str = write_type_to_string(str, type->Generic.specialized);
  1970. }
  1971. }
  1972. break;
  1973. case Type_Pointer:
  1974. str = gb_string_append_rune(str, '^');
  1975. str = write_type_to_string(str, type->Pointer.elem);
  1976. break;
  1977. case Type_Array:
  1978. str = gb_string_appendc(str, gb_bprintf("[%d]", cast(int)type->Array.count));
  1979. str = write_type_to_string(str, type->Array.elem);
  1980. break;
  1981. case Type_Vector:
  1982. str = gb_string_appendc(str, gb_bprintf("[vector %d]", cast(int)type->Vector.count));
  1983. str = write_type_to_string(str, type->Vector.elem);
  1984. break;
  1985. case Type_Slice:
  1986. str = gb_string_appendc(str, "[]");
  1987. str = write_type_to_string(str, type->Array.elem);
  1988. break;
  1989. case Type_DynamicArray:
  1990. str = gb_string_appendc(str, "[dynamic]");
  1991. str = write_type_to_string(str, type->DynamicArray.elem);
  1992. break;
  1993. case Type_Enum:
  1994. str = gb_string_appendc(str, "enum");
  1995. if (type->Enum.base_type != nullptr) {
  1996. str = gb_string_appendc(str, " ");
  1997. str = write_type_to_string(str, type->Enum.base_type);
  1998. }
  1999. str = gb_string_appendc(str, " {");
  2000. for (isize i = 0; i < type->Enum.field_count; i++) {
  2001. Entity *f = type->Enum.fields[i];
  2002. GB_ASSERT(f->kind == Entity_Constant);
  2003. if (i > 0) {
  2004. str = gb_string_appendc(str, ", ");
  2005. }
  2006. str = gb_string_append_length(str, f->token.string.text, f->token.string.len);
  2007. // str = gb_string_appendc(str, " = ");
  2008. }
  2009. str = gb_string_append_rune(str, '}');
  2010. break;
  2011. case Type_Union:
  2012. str = gb_string_appendc(str, "union {");
  2013. for_array(i, type->Union.variants) {
  2014. Type *t = type->Union.variants[i];
  2015. if (i > 0) str = gb_string_appendc(str, ", ");
  2016. str = write_type_to_string(str, t);
  2017. }
  2018. str = gb_string_append_rune(str, '}');
  2019. break;
  2020. case Type_Struct: {
  2021. str = gb_string_appendc(str, "struct");
  2022. if (type->Struct.is_packed) str = gb_string_appendc(str, " #packed");
  2023. if (type->Struct.is_ordered) str = gb_string_appendc(str, " #ordered");
  2024. if (type->Struct.is_raw_union) str = gb_string_appendc(str, " #raw_union");
  2025. str = gb_string_appendc(str, " {");
  2026. for_array(i, type->Struct.fields) {
  2027. Entity *f = type->Struct.fields[i];
  2028. GB_ASSERT(f->kind == Entity_Variable);
  2029. if (i > 0) {
  2030. str = gb_string_appendc(str, ", ");
  2031. }
  2032. str = gb_string_append_length(str, f->token.string.text, f->token.string.len);
  2033. str = gb_string_appendc(str, ": ");
  2034. str = write_type_to_string(str, f->type);
  2035. }
  2036. str = gb_string_append_rune(str, '}');
  2037. } break;
  2038. case Type_Map: {
  2039. str = gb_string_appendc(str, "map[");
  2040. str = write_type_to_string(str, type->Map.key);
  2041. str = gb_string_append_rune(str, ']');
  2042. str = write_type_to_string(str, type->Map.value);
  2043. } break;
  2044. case Type_Named:
  2045. if (type->Named.type_name != nullptr) {
  2046. str = gb_string_append_length(str, type->Named.name.text, type->Named.name.len);
  2047. } else {
  2048. // NOTE(bill): Just in case
  2049. str = gb_string_appendc(str, "<named type>");
  2050. }
  2051. break;
  2052. case Type_Tuple:
  2053. if (type->Tuple.variables.count > 0) {
  2054. for_array(i, type->Tuple.variables) {
  2055. Entity *var = type->Tuple.variables[i];
  2056. if (var != nullptr) {
  2057. if (i > 0) {
  2058. str = gb_string_appendc(str, ", ");
  2059. }
  2060. if (var->kind == Entity_Variable) {
  2061. if (var->flags&EntityFlag_CVarArg) {
  2062. str = gb_string_appendc(str, "#c_vararg ");
  2063. }
  2064. if (var->flags&EntityFlag_Ellipsis) {
  2065. Type *slice = base_type(var->type);
  2066. str = gb_string_appendc(str, "...");
  2067. GB_ASSERT(var->type->kind == Type_Slice);
  2068. str = write_type_to_string(str, slice->Slice.elem);
  2069. } else {
  2070. str = write_type_to_string(str, var->type);
  2071. }
  2072. } else {
  2073. GB_ASSERT(var->kind == Entity_TypeName);
  2074. if (var->type->kind == Type_Generic) {
  2075. str = gb_string_appendc(str, "type/");
  2076. str = write_type_to_string(str, var->type);
  2077. } else {
  2078. str = gb_string_appendc(str, "type");
  2079. }
  2080. }
  2081. }
  2082. }
  2083. }
  2084. break;
  2085. case Type_Proc:
  2086. str = gb_string_appendc(str, "proc(");
  2087. if (type->Proc.params) {
  2088. str = write_type_to_string(str, type->Proc.params);
  2089. }
  2090. str = gb_string_appendc(str, ")");
  2091. if (type->Proc.results) {
  2092. str = gb_string_appendc(str, " -> ");
  2093. str = write_type_to_string(str, type->Proc.results);
  2094. }
  2095. switch (type->Proc.calling_convention) {
  2096. case ProcCC_Odin:
  2097. // str = gb_string_appendc(str, " #cc_odin");
  2098. break;
  2099. case ProcCC_C:
  2100. str = gb_string_appendc(str, " #cc_c");
  2101. break;
  2102. case ProcCC_Std:
  2103. str = gb_string_appendc(str, " #cc_std");
  2104. break;
  2105. case ProcCC_Fast:
  2106. str = gb_string_appendc(str, " #cc_fast");
  2107. break;
  2108. }
  2109. break;
  2110. case Type_BitField:
  2111. str = gb_string_appendc(str, "bit_field ");
  2112. if (type->BitField.custom_align != 0) {
  2113. str = gb_string_append_fmt(str, "#align %d ", cast(int)type->BitField.custom_align);
  2114. }
  2115. str = gb_string_append_rune(str, '{');
  2116. for (isize i = 0; i < type->BitField.field_count; i++) {
  2117. Entity *f = type->BitField.fields[i];
  2118. GB_ASSERT(f->kind == Entity_Variable);
  2119. GB_ASSERT(f->type != nullptr && f->type->kind == Type_BitFieldValue);
  2120. str = gb_string_append_rune(str, '{');
  2121. if (i > 0) {
  2122. str = gb_string_appendc(str, ", ");
  2123. }
  2124. str = gb_string_append_length(str, f->token.string.text, f->token.string.len);
  2125. str = gb_string_appendc(str, ": ");
  2126. str = gb_string_append_fmt(str, "%lld", cast(long long)f->type->BitFieldValue.bits);
  2127. }
  2128. str = gb_string_append_rune(str, '}');
  2129. break;
  2130. case Type_BitFieldValue:
  2131. str = gb_string_append_fmt(str, "(bit field value with %d bits)", cast(int)type->BitFieldValue.bits);
  2132. break;
  2133. }
  2134. return str;
  2135. }
  2136. gbString type_to_string(Type *type) {
  2137. return write_type_to_string(gb_string_make(heap_allocator(), ""), type);
  2138. }