types.cpp 73 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776
  1. struct Scope;
  2. struct Ast;
  3. enum BasicKind {
  4. Basic_Invalid,
  5. Basic_llvm_bool,
  6. Basic_bool,
  7. Basic_b8,
  8. Basic_b16,
  9. Basic_b32,
  10. Basic_b64,
  11. Basic_i8,
  12. Basic_u8,
  13. Basic_i16,
  14. Basic_u16,
  15. Basic_i32,
  16. Basic_u32,
  17. Basic_i64,
  18. Basic_u64,
  19. Basic_rune,
  20. // Basic_f16,
  21. Basic_f32,
  22. Basic_f64,
  23. // Basic_complex32,
  24. Basic_complex64,
  25. Basic_complex128,
  26. Basic_int,
  27. Basic_uint,
  28. Basic_uintptr,
  29. Basic_rawptr,
  30. Basic_string, // ^u8 + int
  31. Basic_cstring, // ^u8
  32. Basic_any, // rawptr + ^Type_Info
  33. Basic_typeid,
  34. Basic_UntypedBool,
  35. Basic_UntypedInteger,
  36. Basic_UntypedFloat,
  37. Basic_UntypedComplex,
  38. Basic_UntypedString,
  39. Basic_UntypedRune,
  40. Basic_UntypedNil,
  41. Basic_UntypedUndef,
  42. Basic_COUNT,
  43. Basic_byte = Basic_u8,
  44. };
  45. enum BasicFlag {
  46. BasicFlag_Boolean = GB_BIT(0),
  47. BasicFlag_Integer = GB_BIT(1),
  48. BasicFlag_Unsigned = GB_BIT(2),
  49. BasicFlag_Float = GB_BIT(3),
  50. BasicFlag_Complex = GB_BIT(4),
  51. BasicFlag_Pointer = GB_BIT(5),
  52. BasicFlag_String = GB_BIT(6),
  53. BasicFlag_Rune = GB_BIT(7),
  54. BasicFlag_Untyped = GB_BIT(8),
  55. BasicFlag_LLVM = GB_BIT(10),
  56. BasicFlag_Numeric = BasicFlag_Integer | BasicFlag_Float | BasicFlag_Complex,
  57. BasicFlag_Ordered = BasicFlag_Integer | BasicFlag_Float | BasicFlag_String | BasicFlag_Pointer | BasicFlag_Rune,
  58. BasicFlag_OrderedNumeric = BasicFlag_Integer | BasicFlag_Float | BasicFlag_Rune,
  59. BasicFlag_ConstantType = BasicFlag_Boolean | BasicFlag_Numeric | BasicFlag_String | BasicFlag_Pointer | BasicFlag_Rune,
  60. };
  61. struct BasicType {
  62. BasicKind kind;
  63. u32 flags;
  64. i64 size; // -1 if arch. dep.
  65. String name;
  66. };
  67. struct TypeStruct {
  68. Array<Entity *> fields;
  69. Ast *node;
  70. Scope * scope;
  71. Array<i64> offsets;
  72. bool are_offsets_set;
  73. bool are_offsets_being_processed;
  74. bool is_packed;
  75. bool is_raw_union;
  76. bool is_polymorphic;
  77. bool is_poly_specialized;
  78. Type * polymorphic_params; // Type_Tuple
  79. Type * polymorphic_parent;
  80. i64 custom_align; // NOTE(bill): Only used in structs at the moment
  81. Entity * names;
  82. };
  83. struct TypeUnion {
  84. Array<Type *> variants;
  85. Ast * node;
  86. Scope * scope;
  87. i64 variant_block_size;
  88. i64 custom_align;
  89. i64 tag_size;
  90. bool is_polymorphic;
  91. bool is_poly_specialized;
  92. Type * polymorphic_params; // Type_Tuple
  93. Type * polymorphic_parent;
  94. };
  95. #define TYPE_KINDS \
  96. TYPE_KIND(Basic, BasicType) \
  97. TYPE_KIND(Named, struct { \
  98. String name; \
  99. Type * base; \
  100. Entity *type_name; /* Entity_TypeName */ \
  101. }) \
  102. TYPE_KIND(Generic, struct { \
  103. i64 id; \
  104. String name; \
  105. Type * specialized; \
  106. Scope *scope; \
  107. }) \
  108. TYPE_KIND(Pointer, struct { Type *elem; }) \
  109. TYPE_KIND(Opaque, struct { Type *elem; }) \
  110. TYPE_KIND(Array, struct { \
  111. Type *elem; \
  112. i64 count; \
  113. Type *generic_count; \
  114. }) \
  115. TYPE_KIND(Slice, struct { Type *elem; }) \
  116. TYPE_KIND(DynamicArray, struct { Type *elem; }) \
  117. TYPE_KIND(Map, struct { \
  118. Type *key; \
  119. Type *value; \
  120. Type *entry_type; \
  121. Type *generated_struct_type; \
  122. Type *internal_type; \
  123. Type *lookup_result_type; \
  124. }) \
  125. TYPE_KIND(Struct, TypeStruct) \
  126. TYPE_KIND(Union, TypeUnion) \
  127. TYPE_KIND(Enum, struct { \
  128. Array<Entity *> fields; \
  129. Ast *node; \
  130. Scope * scope; \
  131. Entity * names; \
  132. Type * base_type; \
  133. }) \
  134. TYPE_KIND(Tuple, struct { \
  135. Array<Entity *> variables; /* Entity_Variable */ \
  136. Array<i64> offsets; \
  137. bool are_offsets_set; \
  138. }) \
  139. TYPE_KIND(Proc, struct { \
  140. Ast *node; \
  141. Scope * scope; \
  142. Type * params; /* Type_Tuple */ \
  143. Type * results; /* Type_Tuple */ \
  144. i32 param_count; \
  145. i32 result_count; \
  146. Array<Type *> abi_compat_params; \
  147. Type * abi_compat_result_type; \
  148. bool return_by_pointer; \
  149. bool variadic; \
  150. i32 variadic_index; \
  151. bool require_results; \
  152. bool c_vararg; \
  153. bool is_polymorphic; \
  154. bool is_poly_specialized; \
  155. bool has_proc_default_values; \
  156. bool has_named_results; \
  157. bool diverging; /* no return */ \
  158. isize specialization_count; \
  159. ProcCallingConvention calling_convention; \
  160. }) \
  161. TYPE_KIND(BitFieldValue, struct { u32 bits; }) \
  162. TYPE_KIND(BitField, struct { \
  163. Array<Entity *> fields; \
  164. Array<u32> offsets; \
  165. Array<u32> sizes; \
  166. Scope * scope; \
  167. i64 custom_align; \
  168. }) \
  169. TYPE_KIND(BitSet, struct { \
  170. Type *elem; \
  171. Type *underlying; \
  172. i64 lower; \
  173. i64 upper; \
  174. }) \
  175. enum TypeKind {
  176. Type_Invalid,
  177. #define TYPE_KIND(k, ...) GB_JOIN2(Type_, k),
  178. TYPE_KINDS
  179. #undef TYPE_KIND
  180. Type_Count,
  181. };
  182. String const type_strings[] = {
  183. {cast(u8 *)"Invalid", gb_size_of("Invalid")},
  184. #define TYPE_KIND(k, ...) {cast(u8 *)#k, gb_size_of(#k)-1},
  185. TYPE_KINDS
  186. #undef TYPE_KIND
  187. };
  188. #define TYPE_KIND(k, ...) typedef __VA_ARGS__ GB_JOIN2(Type, k);
  189. TYPE_KINDS
  190. #undef TYPE_KIND
  191. struct Type {
  192. TypeKind kind;
  193. union {
  194. #define TYPE_KIND(k, ...) GB_JOIN2(Type, k) k;
  195. TYPE_KINDS
  196. #undef TYPE_KIND
  197. };
  198. // NOTE(bill): These need to be at the end to not affect the unionized data
  199. i64 cached_size;
  200. i64 cached_align;
  201. bool failure;
  202. };
  203. // TODO(bill): Should I add extra information here specifying the kind of selection?
  204. // e.g. field, constant, array field, type field, etc.
  205. struct Selection {
  206. Entity * entity;
  207. Array<i32> index;
  208. bool indirect; // Set if there was a pointer deref anywhere down the line
  209. };
  210. Selection empty_selection = {0};
  211. Selection make_selection(Entity *entity, Array<i32> index, bool indirect) {
  212. Selection s = {entity, index, indirect};
  213. return s;
  214. }
  215. void selection_add_index(Selection *s, isize index) {
  216. // IMPORTANT NOTE(bill): this requires a stretchy buffer/dynamic array so it requires some form
  217. // of heap allocation
  218. // TODO(bill): Find a way to use a backing buffer for initial use as the general case is probably .count<3
  219. if (s->index.data == nullptr) {
  220. array_init(&s->index, heap_allocator());
  221. }
  222. array_add(&s->index, cast(i32)index);
  223. }
  224. Selection selection_combine(Selection const &lhs, Selection const &rhs) {
  225. Selection new_sel = lhs;
  226. new_sel.indirect = lhs.indirect || rhs.indirect;
  227. new_sel.index = array_make<i32>(heap_allocator(), lhs.index.count+rhs.index.count);
  228. array_copy(&new_sel.index, lhs.index, 0);
  229. array_copy(&new_sel.index, rhs.index, lhs.index.count);
  230. return new_sel;
  231. }
  232. gb_global Type basic_types[] = {
  233. {Type_Basic, {Basic_Invalid, 0, 0, STR_LIT("invalid type")}},
  234. {Type_Basic, {Basic_llvm_bool, BasicFlag_Boolean | BasicFlag_LLVM, 1, STR_LIT("llvm bool")}},
  235. {Type_Basic, {Basic_bool, BasicFlag_Boolean, 1, STR_LIT("bool")}},
  236. {Type_Basic, {Basic_b8, BasicFlag_Boolean, 1, STR_LIT("b8")}},
  237. {Type_Basic, {Basic_b16, BasicFlag_Boolean, 2, STR_LIT("b16")}},
  238. {Type_Basic, {Basic_b32, BasicFlag_Boolean, 4, STR_LIT("b32")}},
  239. {Type_Basic, {Basic_b64, BasicFlag_Boolean, 8, STR_LIT("b64")}},
  240. {Type_Basic, {Basic_i8, BasicFlag_Integer, 1, STR_LIT("i8")}},
  241. {Type_Basic, {Basic_u8, BasicFlag_Integer | BasicFlag_Unsigned, 1, STR_LIT("u8")}},
  242. {Type_Basic, {Basic_i16, BasicFlag_Integer, 2, STR_LIT("i16")}},
  243. {Type_Basic, {Basic_u16, BasicFlag_Integer | BasicFlag_Unsigned, 2, STR_LIT("u16")}},
  244. {Type_Basic, {Basic_i32, BasicFlag_Integer, 4, STR_LIT("i32")}},
  245. {Type_Basic, {Basic_u32, BasicFlag_Integer | BasicFlag_Unsigned, 4, STR_LIT("u32")}},
  246. {Type_Basic, {Basic_i64, BasicFlag_Integer, 8, STR_LIT("i64")}},
  247. {Type_Basic, {Basic_u64, BasicFlag_Integer | BasicFlag_Unsigned, 8, STR_LIT("u64")}},
  248. {Type_Basic, {Basic_rune, BasicFlag_Integer | BasicFlag_Rune, 4, STR_LIT("rune")}},
  249. // {Type_Basic, {Basic_f16, BasicFlag_Float, 2, STR_LIT("f16")}},
  250. {Type_Basic, {Basic_f32, BasicFlag_Float, 4, STR_LIT("f32")}},
  251. {Type_Basic, {Basic_f64, BasicFlag_Float, 8, STR_LIT("f64")}},
  252. // {Type_Basic, {Basic_complex32, BasicFlag_Complex, 4, STR_LIT("complex32")}},
  253. {Type_Basic, {Basic_complex64, BasicFlag_Complex, 8, STR_LIT("complex64")}},
  254. {Type_Basic, {Basic_complex128, BasicFlag_Complex, 16, STR_LIT("complex128")}},
  255. {Type_Basic, {Basic_int, BasicFlag_Integer, -1, STR_LIT("int")}},
  256. {Type_Basic, {Basic_uint, BasicFlag_Integer | BasicFlag_Unsigned, -1, STR_LIT("uint")}},
  257. {Type_Basic, {Basic_uintptr, BasicFlag_Integer | BasicFlag_Unsigned, -1, STR_LIT("uintptr")}},
  258. {Type_Basic, {Basic_rawptr, BasicFlag_Pointer, -1, STR_LIT("rawptr")}},
  259. {Type_Basic, {Basic_string, BasicFlag_String, -1, STR_LIT("string")}},
  260. {Type_Basic, {Basic_cstring, BasicFlag_String, -1, STR_LIT("cstring")}},
  261. {Type_Basic, {Basic_any, 0, -1, STR_LIT("any")}},
  262. {Type_Basic, {Basic_typeid, 0, -1, STR_LIT("typeid")}},
  263. {Type_Basic, {Basic_UntypedBool, BasicFlag_Boolean | BasicFlag_Untyped, 0, STR_LIT("untyped bool")}},
  264. {Type_Basic, {Basic_UntypedInteger, BasicFlag_Integer | BasicFlag_Untyped, 0, STR_LIT("untyped integer")}},
  265. {Type_Basic, {Basic_UntypedFloat, BasicFlag_Float | BasicFlag_Untyped, 0, STR_LIT("untyped float")}},
  266. {Type_Basic, {Basic_UntypedComplex, BasicFlag_Complex | BasicFlag_Untyped, 0, STR_LIT("untyped complex")}},
  267. {Type_Basic, {Basic_UntypedString, BasicFlag_String | BasicFlag_Untyped, 0, STR_LIT("untyped string")}},
  268. {Type_Basic, {Basic_UntypedRune, BasicFlag_Integer | BasicFlag_Untyped, 0, STR_LIT("untyped rune")}},
  269. {Type_Basic, {Basic_UntypedNil, BasicFlag_Untyped, 0, STR_LIT("untyped nil")}},
  270. {Type_Basic, {Basic_UntypedUndef, BasicFlag_Untyped, 0, STR_LIT("untyped undefined")}},
  271. };
  272. // gb_global Type basic_type_aliases[] = {
  273. // // {Type_Basic, {Basic_byte, BasicFlag_Integer | BasicFlag_Unsigned, 1, STR_LIT("byte")}},
  274. // // {Type_Basic, {Basic_rune, BasicFlag_Integer, 4, STR_LIT("rune")}},
  275. // };
  276. gb_global Type *t_invalid = &basic_types[Basic_Invalid];
  277. gb_global Type *t_llvm_bool = &basic_types[Basic_llvm_bool];
  278. gb_global Type *t_bool = &basic_types[Basic_bool];
  279. gb_global Type *t_i8 = &basic_types[Basic_i8];
  280. gb_global Type *t_u8 = &basic_types[Basic_u8];
  281. gb_global Type *t_i16 = &basic_types[Basic_i16];
  282. gb_global Type *t_u16 = &basic_types[Basic_u16];
  283. gb_global Type *t_i32 = &basic_types[Basic_i32];
  284. gb_global Type *t_u32 = &basic_types[Basic_u32];
  285. gb_global Type *t_i64 = &basic_types[Basic_i64];
  286. gb_global Type *t_u64 = &basic_types[Basic_u64];
  287. gb_global Type *t_rune = &basic_types[Basic_rune];
  288. // gb_global Type *t_f16 = &basic_types[Basic_f16];
  289. gb_global Type *t_f32 = &basic_types[Basic_f32];
  290. gb_global Type *t_f64 = &basic_types[Basic_f64];
  291. // gb_global Type *t_complex32 = &basic_types[Basic_complex32];
  292. gb_global Type *t_complex64 = &basic_types[Basic_complex64];
  293. gb_global Type *t_complex128 = &basic_types[Basic_complex128];
  294. gb_global Type *t_int = &basic_types[Basic_int];
  295. gb_global Type *t_uint = &basic_types[Basic_uint];
  296. gb_global Type *t_uintptr = &basic_types[Basic_uintptr];
  297. gb_global Type *t_rawptr = &basic_types[Basic_rawptr];
  298. gb_global Type *t_string = &basic_types[Basic_string];
  299. gb_global Type *t_cstring = &basic_types[Basic_cstring];
  300. gb_global Type *t_any = &basic_types[Basic_any];
  301. gb_global Type *t_typeid = &basic_types[Basic_typeid];
  302. gb_global Type *t_untyped_bool = &basic_types[Basic_UntypedBool];
  303. gb_global Type *t_untyped_integer = &basic_types[Basic_UntypedInteger];
  304. gb_global Type *t_untyped_float = &basic_types[Basic_UntypedFloat];
  305. gb_global Type *t_untyped_complex = &basic_types[Basic_UntypedComplex];
  306. gb_global Type *t_untyped_string = &basic_types[Basic_UntypedString];
  307. gb_global Type *t_untyped_rune = &basic_types[Basic_UntypedRune];
  308. gb_global Type *t_untyped_nil = &basic_types[Basic_UntypedNil];
  309. gb_global Type *t_untyped_undef = &basic_types[Basic_UntypedUndef];
  310. gb_global Type *t_u8_ptr = nullptr;
  311. gb_global Type *t_int_ptr = nullptr;
  312. gb_global Type *t_i64_ptr = nullptr;
  313. gb_global Type *t_f64_ptr = nullptr;
  314. gb_global Type *t_u8_slice = nullptr;
  315. gb_global Type *t_string_slice = nullptr;
  316. // Type generated for the "preload" file
  317. gb_global Type *t_type_info = nullptr;
  318. gb_global Type *t_type_info_enum_value = nullptr;
  319. gb_global Type *t_type_info_ptr = nullptr;
  320. gb_global Type *t_type_info_enum_value_ptr = nullptr;
  321. gb_global Type *t_type_info_named = nullptr;
  322. gb_global Type *t_type_info_integer = nullptr;
  323. gb_global Type *t_type_info_rune = nullptr;
  324. gb_global Type *t_type_info_float = nullptr;
  325. gb_global Type *t_type_info_complex = nullptr;
  326. gb_global Type *t_type_info_any = nullptr;
  327. gb_global Type *t_type_info_typeid = nullptr;
  328. gb_global Type *t_type_info_string = nullptr;
  329. gb_global Type *t_type_info_boolean = nullptr;
  330. gb_global Type *t_type_info_pointer = nullptr;
  331. gb_global Type *t_type_info_procedure = nullptr;
  332. gb_global Type *t_type_info_array = nullptr;
  333. gb_global Type *t_type_info_dynamic_array = nullptr;
  334. gb_global Type *t_type_info_slice = nullptr;
  335. gb_global Type *t_type_info_tuple = nullptr;
  336. gb_global Type *t_type_info_struct = nullptr;
  337. gb_global Type *t_type_info_union = nullptr;
  338. gb_global Type *t_type_info_enum = nullptr;
  339. gb_global Type *t_type_info_map = nullptr;
  340. gb_global Type *t_type_info_bit_field = nullptr;
  341. gb_global Type *t_type_info_bit_set = nullptr;
  342. gb_global Type *t_type_info_opaque = nullptr;
  343. gb_global Type *t_type_info_named_ptr = nullptr;
  344. gb_global Type *t_type_info_integer_ptr = nullptr;
  345. gb_global Type *t_type_info_rune_ptr = nullptr;
  346. gb_global Type *t_type_info_float_ptr = nullptr;
  347. gb_global Type *t_type_info_complex_ptr = nullptr;
  348. gb_global Type *t_type_info_quaternion_ptr = nullptr;
  349. gb_global Type *t_type_info_any_ptr = nullptr;
  350. gb_global Type *t_type_info_typeid_ptr = nullptr;
  351. gb_global Type *t_type_info_string_ptr = nullptr;
  352. gb_global Type *t_type_info_boolean_ptr = nullptr;
  353. gb_global Type *t_type_info_pointer_ptr = nullptr;
  354. gb_global Type *t_type_info_procedure_ptr = nullptr;
  355. gb_global Type *t_type_info_array_ptr = nullptr;
  356. gb_global Type *t_type_info_dynamic_array_ptr = nullptr;
  357. gb_global Type *t_type_info_slice_ptr = nullptr;
  358. gb_global Type *t_type_info_tuple_ptr = nullptr;
  359. gb_global Type *t_type_info_struct_ptr = nullptr;
  360. gb_global Type *t_type_info_union_ptr = nullptr;
  361. gb_global Type *t_type_info_enum_ptr = nullptr;
  362. gb_global Type *t_type_info_map_ptr = nullptr;
  363. gb_global Type *t_type_info_bit_field_ptr = nullptr;
  364. gb_global Type *t_type_info_bit_set_ptr = nullptr;
  365. gb_global Type *t_type_info_opaque_ptr = nullptr;
  366. gb_global Type *t_allocator = nullptr;
  367. gb_global Type *t_allocator_ptr = nullptr;
  368. gb_global Type *t_context = nullptr;
  369. gb_global Type *t_context_ptr = nullptr;
  370. gb_global Type *t_source_code_location = nullptr;
  371. gb_global Type *t_source_code_location_ptr = nullptr;
  372. gb_global Type *t_map_key = nullptr;
  373. gb_global Type *t_map_header = nullptr;
  374. i64 type_size_of (Type *t);
  375. i64 type_align_of (Type *t);
  376. i64 type_offset_of (Type *t, i32 index);
  377. gbString type_to_string (Type *type);
  378. void init_map_internal_types(Type *type);
  379. Type *base_type(Type *t) {
  380. for (;;) {
  381. if (t == nullptr) {
  382. break;
  383. }
  384. if (t->kind != Type_Named) {
  385. break;
  386. }
  387. if (t == t->Named.base) {
  388. return t_invalid;
  389. }
  390. t = t->Named.base;
  391. }
  392. return t;
  393. }
  394. Type *strip_opaque_type(Type *t) {
  395. for (;;) {
  396. if (t == nullptr) {
  397. break;
  398. }
  399. if (t->kind != Type_Opaque) {
  400. break;
  401. }
  402. t = t->Opaque.elem;
  403. }
  404. return t;
  405. }
  406. Type *base_enum_type(Type *t) {
  407. Type *bt = base_type(t);
  408. if (bt != nullptr &&
  409. bt->kind == Type_Enum) {
  410. return bt->Enum.base_type;
  411. }
  412. return t;
  413. }
  414. Type *core_type(Type *t) {
  415. for (;;) {
  416. if (t == nullptr) {
  417. break;
  418. }
  419. switch (t->kind) {
  420. case Type_Named:
  421. if (t == t->Named.base) {
  422. return t_invalid;
  423. }
  424. t = t->Named.base;
  425. continue;
  426. case Type_Enum:
  427. t = t->Enum.base_type;
  428. continue;
  429. case Type_Opaque:
  430. t = t->Opaque.elem;
  431. continue;
  432. }
  433. break;
  434. }
  435. return t;
  436. }
  437. void set_base_type(Type *t, Type *base) {
  438. if (t && t->kind == Type_Named) {
  439. t->Named.base = base;
  440. }
  441. }
  442. Type *alloc_type(TypeKind kind) {
  443. gbAllocator a = heap_allocator();
  444. Type *t = gb_alloc_item(a, Type);
  445. gb_zero_item(t);
  446. t->kind = kind;
  447. t->cached_size = -1;
  448. t->cached_align = -1;
  449. return t;
  450. }
  451. Type *alloc_type_generic(Scope *scope, i64 id, String name, Type *specialized) {
  452. Type *t = alloc_type(Type_Generic);
  453. t->Generic.id = id;
  454. t->Generic.name = name;
  455. t->Generic.specialized = specialized;
  456. t->Generic.scope = scope;
  457. return t;
  458. }
  459. Type *alloc_type_opaque(Type *elem) {
  460. Type *t = alloc_type(Type_Opaque);
  461. t->Opaque.elem = elem;
  462. return t;
  463. }
  464. Type *alloc_type_pointer(Type *elem) {
  465. Type *t = alloc_type(Type_Pointer);
  466. t->Pointer.elem = elem;
  467. return t;
  468. }
  469. Type *alloc_type_array(Type *elem, i64 count, Type *generic_count = nullptr) {
  470. if (generic_count != nullptr) {
  471. Type *t = alloc_type(Type_Array);
  472. t->Array.elem = elem;
  473. t->Array.count = count;
  474. t->Array.generic_count = generic_count;
  475. return t;
  476. }
  477. Type *t = alloc_type(Type_Array);
  478. t->Array.elem = elem;
  479. t->Array.count = count;
  480. return t;
  481. }
  482. Type *alloc_type_slice(Type *elem) {
  483. Type *t = alloc_type(Type_Slice);
  484. t->Array.elem = elem;
  485. return t;
  486. }
  487. Type *alloc_type_dynamic_array(Type *elem) {
  488. Type *t = alloc_type(Type_DynamicArray);
  489. t->DynamicArray.elem = elem;
  490. return t;
  491. }
  492. Type *alloc_type_struct() {
  493. Type *t = alloc_type(Type_Struct);
  494. return t;
  495. }
  496. Type *alloc_type_union() {
  497. Type *t = alloc_type(Type_Union);
  498. return t;
  499. }
  500. Type *alloc_type_enum() {
  501. Type *t = alloc_type(Type_Enum);
  502. return t;
  503. }
  504. Type *alloc_type_named(String name, Type *base, Entity *type_name) {
  505. Type *t = alloc_type(Type_Named);
  506. t->Named.name = name;
  507. t->Named.base = base;
  508. t->Named.type_name = type_name;
  509. return t;
  510. }
  511. Type *alloc_type_tuple() {
  512. Type *t = alloc_type(Type_Tuple);
  513. return t;
  514. }
  515. Type *alloc_type_proc(Scope *scope, Type *params, isize param_count, Type *results, isize result_count, bool variadic, ProcCallingConvention calling_convention) {
  516. Type *t = alloc_type(Type_Proc);
  517. if (variadic) {
  518. if (param_count == 0) {
  519. GB_PANIC("variadic procedure must have at least one parameter");
  520. }
  521. GB_ASSERT(params != nullptr && params->kind == Type_Tuple);
  522. Entity *e = params->Tuple.variables[param_count-1];
  523. if (base_type(e->type)->kind != Type_Slice) {
  524. // NOTE(bill): For custom calling convention
  525. GB_PANIC("variadic parameter must be of type slice");
  526. }
  527. }
  528. t->Proc.scope = scope;
  529. t->Proc.params = params;
  530. t->Proc.param_count = cast(i32)param_count;
  531. t->Proc.results = results;
  532. t->Proc.result_count = cast(i32)result_count;
  533. t->Proc.variadic = variadic;
  534. t->Proc.calling_convention = calling_convention;
  535. return t;
  536. }
  537. bool is_type_valid_for_keys(Type *t);
  538. Type *alloc_type_map(i64 count, Type *key, Type *value) {
  539. if (key != nullptr) {
  540. GB_ASSERT(is_type_valid_for_keys(key));
  541. GB_ASSERT(value != nullptr);
  542. }
  543. Type *t = alloc_type(Type_Map);
  544. t->Map.key = key;
  545. t->Map.value = value;
  546. return t;
  547. }
  548. Type *alloc_type_bit_field_value(u32 bits) {
  549. Type *t = alloc_type(Type_BitFieldValue);
  550. t->BitFieldValue.bits = bits;
  551. return t;
  552. }
  553. Type *alloc_type_bit_field() {
  554. Type *t = alloc_type(Type_BitField);
  555. return t;
  556. }
  557. Type *alloc_type_bit_set() {
  558. Type *t = alloc_type(Type_BitSet);
  559. return t;
  560. }
  561. ////////////////////////////////////////////////////////////////
  562. Type *type_deref(Type *t) {
  563. if (t != nullptr) {
  564. Type *bt = base_type(t);
  565. if (bt == nullptr)
  566. return nullptr;
  567. if (bt != nullptr && bt->kind == Type_Pointer)
  568. return bt->Pointer.elem;
  569. }
  570. return t;
  571. }
  572. bool is_type_named(Type *t) {
  573. if (t->kind == Type_Basic) {
  574. return true;
  575. }
  576. return t->kind == Type_Named;
  577. }
  578. bool is_type_named_alias(Type *t) {
  579. if (!is_type_named(t)) {
  580. return false;
  581. }
  582. Entity *e = t->Named.type_name;
  583. if (e == nullptr) {
  584. return false;
  585. }
  586. if (e->kind != Entity_TypeName) {
  587. return false;
  588. }
  589. return e->TypeName.is_type_alias;
  590. }
  591. bool is_type_boolean(Type *t) {
  592. // t = core_type(t);
  593. t = base_type(t);
  594. if (t->kind == Type_Basic) {
  595. return (t->Basic.flags & BasicFlag_Boolean) != 0;
  596. }
  597. return false;
  598. }
  599. bool is_type_integer(Type *t) {
  600. // t = core_type(t);
  601. t = base_type(t);
  602. if (t->kind == Type_Basic) {
  603. return (t->Basic.flags & BasicFlag_Integer) != 0;
  604. }
  605. return false;
  606. }
  607. bool is_type_unsigned(Type *t) {
  608. t = base_type(t);
  609. // t = core_type(t);
  610. if (t->kind == Type_Basic) {
  611. return (t->Basic.flags & BasicFlag_Unsigned) != 0;
  612. }
  613. return false;
  614. }
  615. bool is_type_rune(Type *t) {
  616. // t = core_type(t);
  617. t = base_type(t);
  618. if (t->kind == Type_Basic) {
  619. return (t->Basic.flags & BasicFlag_Rune) != 0;
  620. }
  621. return false;
  622. }
  623. bool is_type_numeric(Type *t) {
  624. // t = core_type(t);
  625. t = base_type(t);
  626. if (t->kind == Type_Basic) {
  627. return (t->Basic.flags & BasicFlag_Numeric) != 0;
  628. } else if (t->kind == Type_Enum) {
  629. return is_type_numeric(t->Enum.base_type);
  630. }
  631. // TODO(bill): Should this be here?
  632. if (t->kind == Type_Array) {
  633. return is_type_numeric(t->Array.elem);
  634. }
  635. return false;
  636. }
  637. bool is_type_string(Type *t) {
  638. t = base_type(t);
  639. if (t->kind == Type_Basic) {
  640. return (t->Basic.flags & BasicFlag_String) != 0;
  641. }
  642. return false;
  643. }
  644. bool is_type_cstring(Type *t) {
  645. t = base_type(t);
  646. if (t->kind == Type_Basic) {
  647. return t->Basic.kind == Basic_cstring;
  648. }
  649. return false;
  650. }
  651. bool is_type_typed(Type *t) {
  652. t = base_type(t);
  653. if (t == nullptr) {
  654. return false;
  655. }
  656. if (t->kind == Type_Basic) {
  657. return (t->Basic.flags & BasicFlag_Untyped) == 0;
  658. }
  659. return true;
  660. }
  661. bool is_type_untyped(Type *t) {
  662. t = base_type(t);
  663. if (t->kind == Type_Basic) {
  664. return (t->Basic.flags & BasicFlag_Untyped) != 0;
  665. }
  666. return false;
  667. }
  668. bool is_type_ordered(Type *t) {
  669. t = core_type(t);
  670. switch (t->kind) {
  671. case Type_Basic:
  672. return (t->Basic.flags & BasicFlag_Ordered) != 0;
  673. case Type_Pointer:
  674. return true;
  675. }
  676. return false;
  677. }
  678. bool is_type_ordered_numeric(Type *t) {
  679. t = core_type(t);
  680. switch (t->kind) {
  681. case Type_Basic:
  682. return (t->Basic.flags & BasicFlag_OrderedNumeric) != 0;
  683. }
  684. return false;
  685. }
  686. bool is_type_constant_type(Type *t) {
  687. t = core_type(t);
  688. if (t->kind == Type_Basic) {
  689. return (t->Basic.flags & BasicFlag_ConstantType) != 0;
  690. }
  691. if (t->kind == Type_BitSet) {
  692. return true;
  693. }
  694. return false;
  695. }
  696. bool is_type_float(Type *t) {
  697. t = core_type(t);
  698. if (t->kind == Type_Basic) {
  699. return (t->Basic.flags & BasicFlag_Float) != 0;
  700. }
  701. return false;
  702. }
  703. bool is_type_complex(Type *t) {
  704. t = core_type(t);
  705. if (t->kind == Type_Basic) {
  706. return (t->Basic.flags & BasicFlag_Complex) != 0;
  707. }
  708. return false;
  709. }
  710. bool is_type_f32(Type *t) {
  711. t = core_type(t);
  712. if (t->kind == Type_Basic) {
  713. return t->Basic.kind == Basic_f32;
  714. }
  715. return false;
  716. }
  717. bool is_type_f64(Type *t) {
  718. t = core_type(t);
  719. if (t->kind == Type_Basic) {
  720. return t->Basic.kind == Basic_f64;
  721. }
  722. return false;
  723. }
  724. bool is_type_pointer(Type *t) {
  725. t = base_type(t);
  726. if (t->kind == Type_Basic) {
  727. return (t->Basic.flags & BasicFlag_Pointer) != 0;
  728. }
  729. return t->kind == Type_Pointer;
  730. }
  731. bool is_type_tuple(Type *t) {
  732. t = base_type(t);
  733. return t->kind == Type_Tuple;
  734. }
  735. bool is_type_opaque(Type *t) {
  736. t = base_type(t);
  737. return t->kind == Type_Opaque;
  738. }
  739. bool is_type_uintptr(Type *t) {
  740. if (t->kind == Type_Basic) {
  741. return (t->Basic.kind == Basic_uintptr);
  742. }
  743. return false;
  744. }
  745. bool is_type_rawptr(Type *t) {
  746. if (t->kind == Type_Basic) {
  747. return t->Basic.kind == Basic_rawptr;
  748. }
  749. return false;
  750. }
  751. bool is_type_u8(Type *t) {
  752. if (t->kind == Type_Basic) {
  753. return t->Basic.kind == Basic_u8;
  754. }
  755. return false;
  756. }
  757. bool is_type_array(Type *t) {
  758. t = base_type(t);
  759. return t->kind == Type_Array;
  760. }
  761. bool is_type_dynamic_array(Type *t) {
  762. t = base_type(t);
  763. return t->kind == Type_DynamicArray;
  764. }
  765. bool is_type_slice(Type *t) {
  766. t = base_type(t);
  767. return t->kind == Type_Slice;
  768. }
  769. bool is_type_u8_slice(Type *t) {
  770. t = base_type(t);
  771. if (t->kind == Type_Slice) {
  772. return is_type_u8(t->Slice.elem);
  773. }
  774. return false;
  775. }
  776. bool is_type_u8_ptr(Type *t) {
  777. t = base_type(t);
  778. if (t->kind == Type_Pointer) {
  779. return is_type_u8(t->Slice.elem);
  780. }
  781. return false;
  782. }
  783. bool is_type_proc(Type *t) {
  784. t = base_type(t);
  785. return t->kind == Type_Proc;
  786. }
  787. bool is_type_poly_proc(Type *t) {
  788. t = base_type(t);
  789. return t->kind == Type_Proc && t->Proc.is_polymorphic;
  790. }
  791. Type *base_array_type(Type *t) {
  792. if (is_type_array(t)) {
  793. t = base_type(t);
  794. return t->Array.elem;
  795. }
  796. return t;
  797. }
  798. bool is_type_generic(Type *t) {
  799. t = base_type(t);
  800. return t->kind == Type_Generic;
  801. }
  802. Type *core_array_type(Type *t) {
  803. for (;;) {
  804. Type *prev = t;
  805. t = base_array_type(t);
  806. if (prev == t) break;
  807. }
  808. return t;
  809. }
  810. Type *base_complex_elem_type(Type *t) {
  811. t = core_type(t);
  812. if (is_type_complex(t)) {
  813. switch (t->Basic.kind) {
  814. // case Basic_complex32: return t_f16;
  815. case Basic_complex64: return t_f32;
  816. case Basic_complex128: return t_f64;
  817. case Basic_UntypedComplex: return t_untyped_float;
  818. }
  819. }
  820. GB_PANIC("Invalid complex type");
  821. return t_invalid;
  822. }
  823. bool is_type_struct(Type *t) {
  824. t = base_type(t);
  825. return t->kind == Type_Struct;
  826. }
  827. bool is_type_union(Type *t) {
  828. t = base_type(t);
  829. return t->kind == Type_Union;
  830. }
  831. bool is_type_raw_union(Type *t) {
  832. t = base_type(t);
  833. return (t->kind == Type_Struct && t->Struct.is_raw_union);
  834. }
  835. bool is_type_enum(Type *t) {
  836. t = base_type(t);
  837. return (t->kind == Type_Enum);
  838. }
  839. bool is_type_bit_field(Type *t) {
  840. t = base_type(t);
  841. return (t->kind == Type_BitField);
  842. }
  843. bool is_type_bit_field_value(Type *t) {
  844. t = base_type(t);
  845. return (t->kind == Type_BitFieldValue);
  846. }
  847. bool is_type_bit_set(Type *t) {
  848. t = base_type(t);
  849. return (t->kind == Type_BitSet);
  850. }
  851. bool is_type_map(Type *t) {
  852. t = base_type(t);
  853. return t->kind == Type_Map;
  854. }
  855. bool is_type_any(Type *t) {
  856. t = base_type(t);
  857. return (t->kind == Type_Basic && t->Basic.kind == Basic_any);
  858. }
  859. bool is_type_typeid(Type *t) {
  860. t = base_type(t);
  861. return (t->kind == Type_Basic && t->Basic.kind == Basic_typeid);
  862. }
  863. bool is_type_untyped_nil(Type *t) {
  864. t = base_type(t);
  865. return (t->kind == Type_Basic && t->Basic.kind == Basic_UntypedNil);
  866. }
  867. bool is_type_untyped_undef(Type *t) {
  868. t = base_type(t);
  869. return (t->kind == Type_Basic && t->Basic.kind == Basic_UntypedUndef);
  870. }
  871. bool is_type_empty_union(Type *t) {
  872. t = base_type(t);
  873. return t->kind == Type_Union && t->Union.variants.count == 0;
  874. }
  875. bool is_type_empty_struct(Type *t) {
  876. t = base_type(t);
  877. return t->kind == Type_Struct && !t->Struct.is_raw_union && t->Struct.fields.count == 0;
  878. }
  879. bool is_type_valid_for_keys(Type *t) {
  880. t = core_type(t);
  881. if (t->kind == Type_Generic) {
  882. return true;
  883. }
  884. if (is_type_untyped(t)) {
  885. return false;
  886. }
  887. if (is_type_integer(t)) {
  888. return true;
  889. }
  890. if (is_type_float(t)) {
  891. return true;
  892. }
  893. if (is_type_string(t)) {
  894. return true;
  895. }
  896. if (is_type_pointer(t)) {
  897. return true;
  898. }
  899. return false;
  900. }
  901. bool is_type_valid_bit_set_elem(Type *t) {
  902. if (is_type_enum(t)) {
  903. return true;
  904. }
  905. t = core_type(t);
  906. if (t->kind == Type_Generic) {
  907. return true;
  908. }
  909. return false;
  910. }
  911. Type *bit_set_to_int(Type *t) {
  912. GB_ASSERT(is_type_bit_set(t));
  913. Type *bt = base_type(t);
  914. Type *underlying = bt->BitSet.underlying;
  915. if (underlying != nullptr && is_type_integer(underlying)) {
  916. return underlying;
  917. }
  918. i64 sz = type_size_of(t);
  919. switch (sz) {
  920. case 0: return t_u8;
  921. case 1: return t_u8;
  922. case 2: return t_u16;
  923. case 4: return t_u32;
  924. case 8: return t_u64;
  925. }
  926. GB_PANIC("Unknown bit_set size");
  927. return nullptr;
  928. }
  929. bool is_type_indexable(Type *t) {
  930. Type *bt = base_type(t);
  931. switch (bt->kind) {
  932. case Type_Basic:
  933. return bt->Basic.kind == Basic_string;
  934. case Type_Array:
  935. case Type_Slice:
  936. case Type_DynamicArray:
  937. case Type_Map:
  938. return true;
  939. }
  940. return false;
  941. }
  942. bool is_type_polymorphic_record(Type *t) {
  943. t = base_type(t);
  944. if (t->kind == Type_Struct) {
  945. return t->Struct.is_polymorphic;
  946. } else if (t->kind == Type_Union) {
  947. return t->Union.is_polymorphic;
  948. }
  949. return false;
  950. }
  951. bool is_type_polymorphic_record_specialized(Type *t) {
  952. t = base_type(t);
  953. if (t->kind == Type_Struct) {
  954. return t->Struct.is_polymorphic && t->Struct.is_poly_specialized;
  955. } else if (t->kind == Type_Union) {
  956. return t->Union.is_polymorphic && t->Union.is_poly_specialized;
  957. }
  958. return false;
  959. }
  960. bool is_type_polymorphic_record_unspecialized(Type *t) {
  961. t = base_type(t);
  962. if (t->kind == Type_Struct) {
  963. return t->Struct.is_polymorphic && !t->Struct.is_poly_specialized;
  964. } else if (t->kind == Type_Struct) {
  965. return t->Struct.is_polymorphic && !t->Struct.is_poly_specialized;
  966. }
  967. return false;
  968. }
  969. TypeTuple *get_record_polymorphic_params(Type *t) {
  970. t = base_type(t);
  971. switch (t->kind) {
  972. case Type_Struct:
  973. if (t->Struct.polymorphic_params) {
  974. return &t->Struct.polymorphic_params->Tuple;
  975. }
  976. break;
  977. case Type_Union:
  978. if (t->Union.polymorphic_params) {
  979. return &t->Union.polymorphic_params->Tuple;
  980. }
  981. break;
  982. }
  983. return nullptr;
  984. }
  985. bool is_type_polymorphic(Type *t) {
  986. switch (t->kind) {
  987. case Type_Generic:
  988. return true;
  989. case Type_Named:
  990. return is_type_polymorphic(t->Named.base);
  991. case Type_Opaque:
  992. return is_type_polymorphic(t->Opaque.elem);
  993. case Type_Pointer:
  994. return is_type_polymorphic(t->Pointer.elem);
  995. case Type_Array:
  996. if (t->Array.generic_count != nullptr) {
  997. return true;
  998. }
  999. return is_type_polymorphic(t->Array.elem);
  1000. case Type_DynamicArray:
  1001. return is_type_polymorphic(t->DynamicArray.elem);
  1002. case Type_Slice:
  1003. return is_type_polymorphic(t->Slice.elem);
  1004. case Type_Tuple:
  1005. for_array(i, t->Tuple.variables) {
  1006. if (is_type_polymorphic(t->Tuple.variables[i]->type)) {
  1007. return true;
  1008. }
  1009. }
  1010. break;
  1011. case Type_Proc:
  1012. if (t->Proc.is_polymorphic) {
  1013. return true;
  1014. }
  1015. #if 1
  1016. if (t->Proc.param_count > 0 &&
  1017. is_type_polymorphic(t->Proc.params)) {
  1018. return true;
  1019. }
  1020. if (t->Proc.result_count > 0 &&
  1021. is_type_polymorphic(t->Proc.results)) {
  1022. return true;
  1023. }
  1024. #endif
  1025. break;
  1026. case Type_Enum:
  1027. if (t->kind == Type_Enum) {
  1028. if (t->Enum.base_type != nullptr) {
  1029. return is_type_polymorphic(t->Enum.base_type);
  1030. }
  1031. return false;
  1032. }
  1033. break;
  1034. case Type_Union:
  1035. for_array(i, t->Union.variants) {
  1036. if (is_type_polymorphic(t->Union.variants[i])) {
  1037. return true;
  1038. }
  1039. }
  1040. break;
  1041. case Type_Struct:
  1042. if (t->Struct.is_polymorphic) {
  1043. return true;
  1044. }
  1045. for_array(i, t->Struct.fields) {
  1046. if (is_type_polymorphic(t->Struct.fields[i]->type)) {
  1047. return true;
  1048. }
  1049. }
  1050. break;
  1051. case Type_Map:
  1052. if (is_type_polymorphic(t->Map.key)) {
  1053. return true;
  1054. }
  1055. if (is_type_polymorphic(t->Map.value)) {
  1056. return true;
  1057. }
  1058. break;
  1059. }
  1060. return false;
  1061. }
  1062. bool type_has_undef(Type *t) {
  1063. // t = base_type(t);
  1064. return true;
  1065. }
  1066. bool type_has_nil(Type *t) {
  1067. t = base_type(t);
  1068. switch (t->kind) {
  1069. case Type_Basic: {
  1070. switch (t->Basic.kind) {
  1071. case Basic_rawptr:
  1072. case Basic_any:
  1073. return true;
  1074. case Basic_cstring:
  1075. return true;
  1076. case Basic_typeid:
  1077. return true;
  1078. }
  1079. return false;
  1080. } break;
  1081. case Type_Enum:
  1082. case Type_BitSet:
  1083. case Type_BitField:
  1084. return true;
  1085. case Type_Slice:
  1086. case Type_Proc:
  1087. case Type_Pointer:
  1088. case Type_DynamicArray:
  1089. case Type_Map:
  1090. return true;
  1091. case Type_Union:
  1092. return true;
  1093. case Type_Struct:
  1094. return false;
  1095. case Type_Opaque:
  1096. return true;
  1097. }
  1098. return false;
  1099. }
  1100. bool elem_type_can_be_constant(Type *t) {
  1101. t = base_type(t);
  1102. if (t == t_invalid) {
  1103. return false;
  1104. }
  1105. if (is_type_any(t) || is_type_union(t)) {
  1106. return false;
  1107. }
  1108. return true;
  1109. }
  1110. bool is_type_comparable(Type *t) {
  1111. t = base_type(t);
  1112. switch (t->kind) {
  1113. case Type_Basic:
  1114. switch (t->Basic.kind) {
  1115. case Basic_UntypedNil:
  1116. case Basic_any:
  1117. return false;
  1118. case Basic_rune:
  1119. return true;
  1120. case Basic_string:
  1121. return true;
  1122. case Basic_cstring:
  1123. return true;
  1124. case Basic_typeid:
  1125. return true;
  1126. }
  1127. return true;
  1128. case Type_Pointer:
  1129. return true;
  1130. case Type_Enum:
  1131. return is_type_comparable(core_type(t));
  1132. case Type_Array:
  1133. return is_type_comparable(t->Array.elem);
  1134. case Type_Proc:
  1135. return true;
  1136. case Type_BitSet:
  1137. return true;
  1138. case Type_Opaque:
  1139. return is_type_comparable(t->Opaque.elem);
  1140. }
  1141. return false;
  1142. }
  1143. Type *strip_type_aliasing(Type *x) {
  1144. if (x == nullptr) {
  1145. return x;
  1146. }
  1147. if (x->kind == Type_Named) {
  1148. Entity *e = x->Named.type_name;
  1149. if (e != nullptr && e->kind == Entity_TypeName && e->TypeName.is_type_alias) {
  1150. return x->Named.base;
  1151. }
  1152. }
  1153. return x;
  1154. }
  1155. bool are_types_identical(Type *x, Type *y) {
  1156. if (x == y) {
  1157. return true;
  1158. }
  1159. if ((x == nullptr && y != nullptr) ||
  1160. (x != nullptr && y == nullptr)) {
  1161. return false;
  1162. }
  1163. x = strip_type_aliasing(x);
  1164. y = strip_type_aliasing(y);
  1165. switch (x->kind) {
  1166. case Type_Generic:
  1167. if (y->kind == Type_Generic) {
  1168. return are_types_identical(x->Generic.specialized, y->Generic.specialized);
  1169. }
  1170. break;
  1171. case Type_Opaque:
  1172. if (y->kind == Type_Opaque) {
  1173. return are_types_identical(x->Opaque.elem, y->Opaque.elem);
  1174. }
  1175. break;
  1176. case Type_Basic:
  1177. if (y->kind == Type_Basic) {
  1178. return x->Basic.kind == y->Basic.kind;
  1179. }
  1180. break;
  1181. case Type_Array:
  1182. if (y->kind == Type_Array) {
  1183. return (x->Array.count == y->Array.count) && are_types_identical(x->Array.elem, y->Array.elem);
  1184. }
  1185. break;
  1186. case Type_DynamicArray:
  1187. if (y->kind == Type_DynamicArray) {
  1188. return are_types_identical(x->DynamicArray.elem, y->DynamicArray.elem);
  1189. }
  1190. break;
  1191. case Type_Slice:
  1192. if (y->kind == Type_Slice) {
  1193. return are_types_identical(x->Slice.elem, y->Slice.elem);
  1194. }
  1195. break;
  1196. case Type_BitField:
  1197. if (y->kind == Type_BitField) {
  1198. if (x->BitField.fields.count == y->BitField.fields.count &&
  1199. x->BitField.custom_align == y->BitField.custom_align) {
  1200. for (i32 i = 0; i < x->BitField.fields.count; i++) {
  1201. if (x->BitField.offsets[i] != y->BitField.offsets[i]) {
  1202. return false;
  1203. }
  1204. if (x->BitField.sizes[i] != y->BitField.sizes[i]) {
  1205. return false;
  1206. }
  1207. }
  1208. return true;
  1209. }
  1210. }
  1211. break;
  1212. case Type_BitSet:
  1213. if (y->kind == Type_BitSet) {
  1214. return are_types_identical(x->BitSet.elem, y->BitSet.elem) &&
  1215. are_types_identical(x->BitSet.underlying, y->BitSet.underlying) &&
  1216. x->BitSet.lower == y->BitSet.lower &&
  1217. x->BitSet.upper == y->BitSet.upper;
  1218. }
  1219. break;
  1220. case Type_Enum:
  1221. return x == y; // NOTE(bill): All enums are unique
  1222. case Type_Union:
  1223. if (y->kind == Type_Union) {
  1224. if (x->Union.variants.count == y->Union.variants.count &&
  1225. x->Union.custom_align == y->Union.custom_align) {
  1226. // NOTE(bill): zeroth variant is nullptr
  1227. for_array(i, x->Union.variants) {
  1228. if (!are_types_identical(x->Union.variants[i], y->Union.variants[i])) {
  1229. return false;
  1230. }
  1231. }
  1232. return true;
  1233. }
  1234. }
  1235. break;
  1236. case Type_Struct:
  1237. if (y->kind == Type_Struct) {
  1238. if (x->Struct.is_raw_union == y->Struct.is_raw_union &&
  1239. x->Struct.fields.count == y->Struct.fields.count &&
  1240. x->Struct.is_packed == y->Struct.is_packed &&
  1241. x->Struct.custom_align == y->Struct.custom_align) {
  1242. // TODO(bill); Fix the custom alignment rule
  1243. for_array(i, x->Struct.fields) {
  1244. Entity *xf = x->Struct.fields[i];
  1245. Entity *yf = y->Struct.fields[i];
  1246. if (xf->kind != yf->kind) {
  1247. return false;
  1248. }
  1249. if (!are_types_identical(xf->type, yf->type)) {
  1250. return false;
  1251. }
  1252. if (xf->token.string != yf->token.string) {
  1253. return false;
  1254. }
  1255. bool xf_is_using = (xf->flags&EntityFlag_Using) != 0;
  1256. bool yf_is_using = (yf->flags&EntityFlag_Using) != 0;
  1257. if (xf_is_using ^ yf_is_using) {
  1258. return false;
  1259. }
  1260. }
  1261. return true;
  1262. }
  1263. }
  1264. break;
  1265. case Type_Pointer:
  1266. if (y->kind == Type_Pointer) {
  1267. return are_types_identical(x->Pointer.elem, y->Pointer.elem);
  1268. }
  1269. break;
  1270. case Type_Named:
  1271. if (y->kind == Type_Named) {
  1272. return x->Named.type_name == y->Named.type_name;
  1273. }
  1274. break;
  1275. case Type_Tuple:
  1276. if (y->kind == Type_Tuple) {
  1277. if (x->Tuple.variables.count == y->Tuple.variables.count) {
  1278. for_array(i, x->Tuple.variables) {
  1279. Entity *xe = x->Tuple.variables[i];
  1280. Entity *ye = y->Tuple.variables[i];
  1281. if (xe->kind != ye->kind || !are_types_identical(xe->type, ye->type)) {
  1282. return false;
  1283. }
  1284. if (xe->kind == Entity_Constant && !compare_exact_values(Token_CmpEq, xe->Constant.value, ye->Constant.value)) {
  1285. // NOTE(bill): This is needed for polymorphic procedures
  1286. return false;
  1287. }
  1288. }
  1289. return true;
  1290. }
  1291. }
  1292. break;
  1293. case Type_Proc:
  1294. if (y->kind == Type_Proc) {
  1295. return x->Proc.calling_convention == y->Proc.calling_convention &&
  1296. x->Proc.c_vararg == y->Proc.c_vararg &&
  1297. x->Proc.variadic == y->Proc.variadic &&
  1298. x->Proc.diverging == y->Proc.diverging &&
  1299. are_types_identical(x->Proc.params, y->Proc.params) &&
  1300. are_types_identical(x->Proc.results, y->Proc.results);
  1301. }
  1302. break;
  1303. case Type_Map:
  1304. if (y->kind == Type_Map) {
  1305. return are_types_identical(x->Map.key, y->Map.key) &&
  1306. are_types_identical(x->Map.value, y->Map.value);
  1307. }
  1308. break;
  1309. }
  1310. return false;
  1311. }
  1312. Type *default_bit_field_value_type(Type *type) {
  1313. if (type == nullptr) {
  1314. return t_invalid;
  1315. }
  1316. Type *t = base_type(type);
  1317. if (t->kind == Type_BitFieldValue) {
  1318. i32 bits = t->BitFieldValue.bits;
  1319. i32 size = 8*next_pow2((bits+7)/8);
  1320. switch (size) {
  1321. case 8: return t_u8;
  1322. case 16: return t_u16;
  1323. case 32: return t_u32;
  1324. case 64: return t_u64;
  1325. default: GB_PANIC("Too big of a bit size!"); break;
  1326. }
  1327. }
  1328. return type;
  1329. }
  1330. Type *default_type(Type *type) {
  1331. if (type == nullptr) {
  1332. return t_invalid;
  1333. }
  1334. if (type->kind == Type_Basic) {
  1335. switch (type->Basic.kind) {
  1336. case Basic_UntypedBool: return t_bool;
  1337. case Basic_UntypedInteger: return t_int;
  1338. case Basic_UntypedFloat: return t_f64;
  1339. case Basic_UntypedComplex: return t_complex128;
  1340. case Basic_UntypedString: return t_string;
  1341. case Basic_UntypedRune: return t_rune;
  1342. }
  1343. }
  1344. if (type->kind == Type_BitFieldValue) {
  1345. return default_bit_field_value_type(type);
  1346. }
  1347. return type;
  1348. }
  1349. /*
  1350. // NOTE(bill): Valid Compile time execution #run type
  1351. bool is_type_cte_safe(Type *type) {
  1352. type = default_type(base_type(type));
  1353. switch (type->kind) {
  1354. case Type_Basic:
  1355. switch (type->Basic.kind) {
  1356. case Basic_rawptr:
  1357. case Basic_any:
  1358. return false;
  1359. }
  1360. return true;
  1361. case Type_Pointer:
  1362. return false;
  1363. case Type_Array:
  1364. return is_type_cte_safe(type->Array.elem);
  1365. case Type_DynamicArray:
  1366. return false;
  1367. case Type_Map:
  1368. return false;
  1369. case Type_Slice:
  1370. return false;
  1371. case Type_Struct: {
  1372. if (type->Struct.is_raw_union) {
  1373. return false;
  1374. }
  1375. for_array(i, type->Struct.fields) {
  1376. Entity *v = type->Struct.fields[i];
  1377. if (!is_type_cte_safe(v->type)) {
  1378. return false;
  1379. }
  1380. }
  1381. return true;
  1382. }
  1383. case Type_Tuple: {
  1384. for_array(i, type->Tuple.variables) {
  1385. Entity *v = type->Tuple.variables[i];
  1386. if (!is_type_cte_safe(v->type)) {
  1387. return false;
  1388. }
  1389. }
  1390. return true;
  1391. }
  1392. case Type_Proc:
  1393. // TODO(bill): How should I handle procedures in the CTE stage?
  1394. // return type->Proc.calling_convention == ProcCC_Odin;
  1395. return false;
  1396. }
  1397. return false;
  1398. }
  1399. */
  1400. i64 union_variant_index(Type *u, Type *v) {
  1401. u = base_type(u);
  1402. GB_ASSERT(u->kind == Type_Union);
  1403. for_array(i, u->Union.variants) {
  1404. Type *vt = u->Union.variants[i];
  1405. if (are_types_identical(v, vt)) {
  1406. return cast(i64)(i+1);
  1407. }
  1408. }
  1409. return 0;
  1410. }
  1411. i64 union_tag_size(Type *u) {
  1412. u = base_type(u);
  1413. GB_ASSERT(u->kind == Type_Union);
  1414. if (u->Union.tag_size > 0) {
  1415. return u->Union.tag_size;
  1416. }
  1417. u64 n = cast(u64)u->Union.variants.count;
  1418. if (n == 0) {
  1419. return 0;
  1420. }
  1421. i64 bytes = next_pow2(cast(i64)(floor_log2(n)/8 + 1));
  1422. i64 tag_size = gb_max(bytes, 1);
  1423. u->Union.tag_size = tag_size;
  1424. return tag_size;
  1425. }
  1426. Type *union_tag_type(Type *u) {
  1427. i64 s = union_tag_size(u);
  1428. switch (s) {
  1429. case 1: return t_u8;
  1430. case 2: return t_u16;
  1431. case 4: return t_u32;
  1432. case 8: return t_u64;
  1433. }
  1434. GB_PANIC("Invalid union_tag_size");
  1435. return t_uint;
  1436. }
  1437. enum ProcTypeOverloadKind {
  1438. ProcOverload_Identical, // The types are identical
  1439. ProcOverload_CallingConvention,
  1440. ProcOverload_ParamCount,
  1441. ProcOverload_ParamVariadic,
  1442. ProcOverload_ParamTypes,
  1443. ProcOverload_ResultCount,
  1444. ProcOverload_ResultTypes,
  1445. ProcOverload_Polymorphic,
  1446. ProcOverload_NotProcedure,
  1447. };
  1448. ProcTypeOverloadKind are_proc_types_overload_safe(Type *x, Type *y) {
  1449. if (x == nullptr && y == nullptr) return ProcOverload_NotProcedure;
  1450. if (x == nullptr && y != nullptr) return ProcOverload_NotProcedure;
  1451. if (x != nullptr && y == nullptr) return ProcOverload_NotProcedure;
  1452. if (!is_type_proc(x)) return ProcOverload_NotProcedure;
  1453. if (!is_type_proc(y)) return ProcOverload_NotProcedure;
  1454. TypeProc px = base_type(x)->Proc;
  1455. TypeProc py = base_type(y)->Proc;
  1456. // if (px.calling_convention != py.calling_convention) {
  1457. // return ProcOverload_CallingConvention;
  1458. // }
  1459. // if (px.is_polymorphic != py.is_polymorphic) {
  1460. // return ProcOverload_Polymorphic;
  1461. // }
  1462. if (px.param_count != py.param_count) {
  1463. return ProcOverload_ParamCount;
  1464. }
  1465. for (isize i = 0; i < px.param_count; i++) {
  1466. Entity *ex = px.params->Tuple.variables[i];
  1467. Entity *ey = py.params->Tuple.variables[i];
  1468. if (!are_types_identical(ex->type, ey->type)) {
  1469. return ProcOverload_ParamTypes;
  1470. }
  1471. }
  1472. // IMPORTANT TODO(bill): Determine the rules for overloading procedures with variadic parameters
  1473. if (px.variadic != py.variadic) {
  1474. return ProcOverload_ParamVariadic;
  1475. }
  1476. if (px.is_polymorphic != py.is_polymorphic) {
  1477. return ProcOverload_Polymorphic;
  1478. }
  1479. if (px.result_count != py.result_count) {
  1480. return ProcOverload_ResultCount;
  1481. }
  1482. for (isize i = 0; i < px.result_count; i++) {
  1483. Entity *ex = px.results->Tuple.variables[i];
  1484. Entity *ey = py.results->Tuple.variables[i];
  1485. if (!are_types_identical(ex->type, ey->type)) {
  1486. return ProcOverload_ResultTypes;
  1487. }
  1488. }
  1489. if (px.params != nullptr && py.params != nullptr) {
  1490. Entity *ex = px.params->Tuple.variables[0];
  1491. Entity *ey = py.params->Tuple.variables[0];
  1492. bool ok = are_types_identical(ex->type, ey->type);
  1493. if (ok) {
  1494. }
  1495. }
  1496. return ProcOverload_Identical;
  1497. }
  1498. Selection lookup_field_with_selection(Type *type_, String field_name, bool is_type, Selection sel);
  1499. Selection lookup_field(Type *type_, String field_name, bool is_type) {
  1500. return lookup_field_with_selection(type_, field_name, is_type, empty_selection);
  1501. }
  1502. Selection lookup_field_from_index(Type *type, i64 index) {
  1503. GB_ASSERT(is_type_struct(type) || is_type_union(type) || is_type_tuple(type));
  1504. type = base_type(type);
  1505. gbAllocator a = heap_allocator();
  1506. isize max_count = 0;
  1507. switch (type->kind) {
  1508. case Type_Struct: max_count = type->Struct.fields.count; break;
  1509. case Type_Tuple: max_count = type->Tuple.variables.count; break;
  1510. case Type_BitField: max_count = type->BitField.fields.count; break;
  1511. }
  1512. if (index >= max_count) {
  1513. return empty_selection;
  1514. }
  1515. switch (type->kind) {
  1516. case Type_Struct:
  1517. for (isize i = 0; i < max_count; i++) {
  1518. Entity *f = type->Struct.fields[i];
  1519. if (f->kind == Entity_Variable) {
  1520. if (f->Variable.field_src_index == index) {
  1521. auto sel_array = array_make<i32>(a, 1);
  1522. sel_array[0] = cast(i32)i;
  1523. return make_selection(f, sel_array, false);
  1524. }
  1525. }
  1526. }
  1527. break;
  1528. case Type_Tuple:
  1529. for (isize i = 0; i < max_count; i++) {
  1530. Entity *f = type->Tuple.variables[i];
  1531. if (i == index) {
  1532. auto sel_array = array_make<i32>(a, 1);
  1533. sel_array[0] = cast(i32)i;
  1534. return make_selection(f, sel_array, false);
  1535. }
  1536. }
  1537. break;
  1538. case Type_BitField: {
  1539. auto sel_array = array_make<i32>(a, 1);
  1540. sel_array[0] = cast(i32)index;
  1541. return make_selection(type->BitField.fields[cast(isize)index], sel_array, false);
  1542. } break;
  1543. }
  1544. GB_PANIC("Illegal index");
  1545. return empty_selection;
  1546. }
  1547. Entity *scope_lookup_current(Scope *s, String name);
  1548. Selection lookup_field_with_selection(Type *type_, String field_name, bool is_type, Selection sel) {
  1549. GB_ASSERT(type_ != nullptr);
  1550. if (is_blank_ident(field_name)) {
  1551. return empty_selection;
  1552. }
  1553. gbAllocator a = heap_allocator();
  1554. Type *type = type_deref(type_);
  1555. bool is_ptr = type != type_;
  1556. sel.indirect = sel.indirect || is_ptr;
  1557. type = base_type(type);
  1558. if (is_type) {
  1559. switch (type->kind) {
  1560. case Type_Struct:
  1561. if (type->Struct.names != nullptr &&
  1562. field_name == "names") {
  1563. sel.entity = type->Struct.names;
  1564. return sel;
  1565. }
  1566. break;
  1567. case Type_Enum:
  1568. if (type->Enum.names != nullptr &&
  1569. field_name == "names") {
  1570. sel.entity = type->Enum.names;
  1571. return sel;
  1572. }
  1573. break;
  1574. }
  1575. if (is_type_enum(type)) {
  1576. // NOTE(bill): These may not have been added yet, so check in case
  1577. for_array(i, type->Enum.fields) {
  1578. Entity *f = type->Enum.fields[i];
  1579. GB_ASSERT(f->kind == Entity_Constant);
  1580. String str = f->token.string;
  1581. if (field_name == str) {
  1582. sel.entity = f;
  1583. // selection_add_index(&sel, i);
  1584. return sel;
  1585. }
  1586. }
  1587. }
  1588. if (type->kind == Type_Struct) {
  1589. Scope *s = type->Struct.scope;
  1590. if (s != nullptr) {
  1591. Entity *found = scope_lookup_current(s, field_name);
  1592. if (found != nullptr && found->kind != Entity_Variable) {
  1593. sel.entity = found;
  1594. return sel;
  1595. }
  1596. }
  1597. } else if (type->kind == Type_BitSet) {
  1598. return lookup_field_with_selection(type->BitSet.elem, field_name, true, sel);
  1599. }
  1600. if (type->kind == Type_Generic && type->Generic.specialized != nullptr) {
  1601. Type *specialized = type->Generic.specialized;
  1602. return lookup_field_with_selection(specialized, field_name, is_type, sel);
  1603. }
  1604. } else if (type->kind == Type_Union) {
  1605. } else if (type->kind == Type_Struct) {
  1606. for_array(i, type->Struct.fields) {
  1607. Entity *f = type->Struct.fields[i];
  1608. if (f->kind != Entity_Variable || (f->flags & EntityFlag_Field) == 0) {
  1609. continue;
  1610. }
  1611. String str = f->token.string;
  1612. if (field_name == str) {
  1613. selection_add_index(&sel, i); // HACK(bill): Leaky memory
  1614. sel.entity = f;
  1615. return sel;
  1616. }
  1617. if (f->flags & EntityFlag_Using) {
  1618. isize prev_count = sel.index.count;
  1619. selection_add_index(&sel, i); // HACK(bill): Leaky memory
  1620. sel = lookup_field_with_selection(f->type, field_name, is_type, sel);
  1621. if (sel.entity != nullptr) {
  1622. if (is_type_pointer(f->type)) {
  1623. sel.indirect = true;
  1624. }
  1625. return sel;
  1626. }
  1627. sel.index.count = prev_count;
  1628. }
  1629. }
  1630. } else if (type->kind == Type_BitField) {
  1631. for_array(i, type->BitField.fields) {
  1632. Entity *f = type->BitField.fields[i];
  1633. if (f->kind != Entity_Variable ||
  1634. (f->flags & EntityFlag_BitFieldValue) == 0) {
  1635. continue;
  1636. }
  1637. String str = f->token.string;
  1638. if (field_name == str) {
  1639. selection_add_index(&sel, i); // HACK(bill): Leaky memory
  1640. sel.entity = f;
  1641. return sel;
  1642. }
  1643. }
  1644. } else if (type->kind == Type_Basic) {
  1645. switch (type->Basic.kind) {
  1646. case Basic_any: {
  1647. #if 1
  1648. // IMPORTANT TODO(bill): Should these members be available to should I only allow them with
  1649. // `Raw_Any` type?
  1650. String data_str = str_lit("data");
  1651. String id_str = str_lit("id");
  1652. gb_local_persist Entity *entity__any_data = alloc_entity_field(nullptr, make_token_ident(data_str), t_rawptr, false, 0);
  1653. gb_local_persist Entity *entity__any_id = alloc_entity_field(nullptr, make_token_ident(id_str), t_typeid, false, 1);
  1654. if (field_name == data_str) {
  1655. selection_add_index(&sel, 0);
  1656. sel.entity = entity__any_data;
  1657. return sel;
  1658. } else if (field_name == id_str) {
  1659. selection_add_index(&sel, 1);
  1660. sel.entity = entity__any_id;
  1661. return sel;
  1662. }
  1663. #endif
  1664. } break;
  1665. }
  1666. return sel;
  1667. } else if (type->kind == Type_Array) {
  1668. if (type->Array.count <= 4) {
  1669. // HACK(bill): Memory leak
  1670. switch (type->Array.count) {
  1671. #define _ARRAY_FIELD_CASE(_length, _name) \
  1672. case (_length): \
  1673. if (field_name == _name) { \
  1674. selection_add_index(&sel, (_length)-1); \
  1675. sel.entity = alloc_entity_array_elem(nullptr, make_token_ident(str_lit(_name)), type->Array.elem, (_length)-1); \
  1676. return sel; \
  1677. } \
  1678. /*fallthrough*/
  1679. _ARRAY_FIELD_CASE(4, "w");
  1680. _ARRAY_FIELD_CASE(3, "z");
  1681. _ARRAY_FIELD_CASE(2, "y");
  1682. _ARRAY_FIELD_CASE(1, "x");
  1683. default: break;
  1684. #undef _ARRAY_FIELD_CASE
  1685. }
  1686. }
  1687. } else if (type->kind == Type_DynamicArray) {
  1688. // IMPORTANT TODO(bill): Should these members be available to should I only allow them with
  1689. // `Raw_Dynamic_Array` type?
  1690. GB_ASSERT(t_allocator != nullptr);
  1691. String allocator_str = str_lit("allocator");
  1692. gb_local_persist Entity *entity__allocator = alloc_entity_field(nullptr, make_token_ident(allocator_str), t_allocator, false, 3);
  1693. if (field_name == allocator_str) {
  1694. selection_add_index(&sel, 3);
  1695. sel.entity = entity__allocator;
  1696. return sel;
  1697. }
  1698. } else if (type->kind == Type_Map) {
  1699. // IMPORTANT TODO(bill): Should these members be available to should I only allow them with
  1700. // `Raw_Map` type?
  1701. GB_ASSERT(t_allocator != nullptr);
  1702. String allocator_str = str_lit("allocator");
  1703. gb_local_persist Entity *entity__allocator = alloc_entity_field(nullptr, make_token_ident(allocator_str), t_allocator, false, 3);
  1704. if (field_name == allocator_str) {
  1705. selection_add_index(&sel, 1);
  1706. selection_add_index(&sel, 3);
  1707. sel.entity = entity__allocator;
  1708. return sel;
  1709. }
  1710. }
  1711. return sel;
  1712. }
  1713. // IMPORTANT TODO(bill): SHould this TypePath code be removed since type cycle checking is handled much earlier on?
  1714. struct TypePath {
  1715. Array<Entity *> path; // Entity_TypeName;
  1716. bool failure;
  1717. };
  1718. void type_path_init(TypePath *tp) {
  1719. tp->path.allocator = heap_allocator();
  1720. }
  1721. void type_path_free(TypePath *tp) {
  1722. array_free(&tp->path);
  1723. }
  1724. void type_path_print_illegal_cycle(TypePath *tp, isize start_index) {
  1725. GB_ASSERT(tp != nullptr);
  1726. GB_ASSERT(start_index < tp->path.count);
  1727. Entity *e = tp->path[start_index];
  1728. GB_ASSERT(e != nullptr);
  1729. error(e->token, "Illegal declaration cycle of `%.*s`", LIT(e->token.string));
  1730. // NOTE(bill): Print cycle, if it's deep enough
  1731. for (isize j = start_index; j < tp->path.count; j++) {
  1732. Entity *e = tp->path[j];
  1733. error(e->token, "\t%.*s refers to", LIT(e->token.string));
  1734. }
  1735. // NOTE(bill): This will only print if the path count > 1
  1736. error(e->token, "\t%.*s", LIT(e->token.string));
  1737. tp->failure = true;
  1738. e->type->failure = true;
  1739. base_type(e->type)->failure = true;
  1740. }
  1741. bool type_path_push(TypePath *tp, Type *t) {
  1742. GB_ASSERT(tp != nullptr);
  1743. if (t->kind != Type_Named) {
  1744. return false;
  1745. }
  1746. Entity *e = t->Named.type_name;
  1747. for (isize i = 0; i < tp->path.count; i++) {
  1748. Entity *p = tp->path[i];
  1749. if (p == e) {
  1750. type_path_print_illegal_cycle(tp, i);
  1751. }
  1752. }
  1753. array_add(&tp->path, e);
  1754. return true;
  1755. }
  1756. void type_path_pop(TypePath *tp) {
  1757. if (tp != nullptr && tp->path.count > 0) {
  1758. array_pop(&tp->path);
  1759. }
  1760. }
  1761. #define FAILURE_SIZE 0
  1762. #define FAILURE_ALIGNMENT 0
  1763. i64 type_size_of_internal (Type *t, TypePath *path);
  1764. i64 type_align_of_internal(Type *t, TypePath *path);
  1765. i64 type_size_of(Type *t) {
  1766. if (t == nullptr) {
  1767. return 0;
  1768. }
  1769. // NOTE(bill): Always calculate the size when it is a Type_Basic
  1770. if (t->kind != Type_Basic && t->cached_size >= 0) {
  1771. return t->cached_size;
  1772. }
  1773. TypePath path = {0};
  1774. type_path_init(&path);
  1775. t->cached_size = type_size_of_internal(t, &path);
  1776. type_path_free(&path);
  1777. return t->cached_size;
  1778. }
  1779. i64 type_align_of(Type *t) {
  1780. if (t == nullptr) {
  1781. return 1;
  1782. }
  1783. // NOTE(bill): Always calculate the size when it is a Type_Basic
  1784. if (t->kind != Type_Basic && t->cached_align > 0) {
  1785. return t->cached_align;
  1786. }
  1787. TypePath path = {0};
  1788. type_path_init(&path);
  1789. t->cached_align = type_align_of_internal(t, &path);
  1790. type_path_free(&path);
  1791. return t->cached_align;
  1792. }
  1793. i64 type_align_of_internal(Type *t, TypePath *path) {
  1794. GB_ASSERT(path != nullptr);
  1795. if (t->failure) {
  1796. return FAILURE_ALIGNMENT;
  1797. }
  1798. t = base_type(t);
  1799. switch (t->kind) {
  1800. case Type_Basic: {
  1801. GB_ASSERT(is_type_typed(t));
  1802. switch (t->Basic.kind) {
  1803. case Basic_string: return build_context.word_size;
  1804. case Basic_cstring: return build_context.word_size;
  1805. case Basic_any: return build_context.word_size;
  1806. case Basic_typeid: return build_context.word_size;
  1807. case Basic_int: case Basic_uint: case Basic_uintptr: case Basic_rawptr:
  1808. return build_context.word_size;
  1809. case Basic_complex64: case Basic_complex128:
  1810. return type_size_of_internal(t, path) / 2;
  1811. }
  1812. } break;
  1813. case Type_Array: {
  1814. Type *elem = t->Array.elem;
  1815. bool pop = type_path_push(path, elem);
  1816. if (path->failure) {
  1817. return FAILURE_ALIGNMENT;
  1818. }
  1819. i64 align = type_align_of_internal(t->Array.elem, path);
  1820. if (pop) type_path_pop(path);
  1821. return align;
  1822. }
  1823. case Type_Opaque:
  1824. return type_align_of_internal(t->Opaque.elem, path);
  1825. case Type_DynamicArray:
  1826. // data, count, capacity, allocator
  1827. return build_context.word_size;
  1828. case Type_Slice:
  1829. return build_context.word_size;
  1830. case Type_Tuple: {
  1831. i64 max = 1;
  1832. for_array(i, t->Tuple.variables) {
  1833. i64 align = type_align_of_internal(t->Tuple.variables[i]->type, path);
  1834. if (max < align) {
  1835. max = align;
  1836. }
  1837. }
  1838. return max;
  1839. } break;
  1840. case Type_Map:
  1841. init_map_internal_types(t);
  1842. return type_align_of_internal(t->Map.internal_type, path);
  1843. case Type_Enum:
  1844. return type_align_of_internal(t->Enum.base_type, path);
  1845. case Type_Union: {
  1846. if (t->Union.variants.count == 0) {
  1847. return 1;
  1848. }
  1849. if (t->Union.custom_align > 0) {
  1850. return gb_clamp(t->Union.custom_align, 1, build_context.max_align);
  1851. }
  1852. i64 max = 1;
  1853. for_array(i, t->Union.variants) {
  1854. Type *variant = t->Union.variants[i];
  1855. bool pop = type_path_push(path, variant);
  1856. if (path->failure) {
  1857. return FAILURE_ALIGNMENT;
  1858. }
  1859. i64 align = type_align_of_internal(variant, path);
  1860. if (pop) type_path_pop(path);
  1861. if (max < align) {
  1862. max = align;
  1863. }
  1864. }
  1865. return max;
  1866. } break;
  1867. case Type_Struct: {
  1868. if (t->Struct.custom_align > 0) {
  1869. return gb_clamp(t->Struct.custom_align, 1, build_context.max_align);
  1870. }
  1871. if (t->Struct.is_raw_union) {
  1872. i64 max = 1;
  1873. for_array(i, t->Struct.fields) {
  1874. Type *field_type = t->Struct.fields[i]->type;
  1875. bool pop = type_path_push(path, field_type);
  1876. if (path->failure) {
  1877. return FAILURE_ALIGNMENT;
  1878. }
  1879. i64 align = type_align_of_internal(field_type, path);
  1880. if (pop) type_path_pop(path);
  1881. if (max < align) {
  1882. max = align;
  1883. }
  1884. }
  1885. return max;
  1886. } else if (t->Struct.fields.count > 0) {
  1887. i64 max = 1;
  1888. // NOTE(bill): Check the fields to check for cyclic definitions
  1889. for_array(i, t->Struct.fields) {
  1890. Type *field_type = t->Struct.fields[i]->type;
  1891. bool pop = type_path_push(path, field_type);
  1892. if (path->failure) return FAILURE_ALIGNMENT;
  1893. i64 align = type_align_of_internal(field_type, path);
  1894. if (pop) type_path_pop(path);
  1895. if (max < align) {
  1896. max = align;
  1897. }
  1898. }
  1899. if (t->Struct.is_packed) {
  1900. return 1;
  1901. }
  1902. return max;
  1903. }
  1904. } break;
  1905. case Type_BitField: {
  1906. i64 align = 1;
  1907. if (t->BitField.custom_align > 0) {
  1908. align = t->BitField.custom_align;
  1909. }
  1910. return gb_clamp(next_pow2(align), 1, build_context.max_align);
  1911. } break;
  1912. case Type_BitSet: {
  1913. if (t->BitSet.underlying != nullptr) {
  1914. return type_align_of(t->BitSet.underlying);
  1915. }
  1916. i64 bits = t->BitSet.upper - t->BitSet.lower + 1;
  1917. if (bits <= 8) return 1;
  1918. if (bits <= 16) return 2;
  1919. if (bits <= 32) return 4;
  1920. if (bits <= 64) return 8;
  1921. return 8; // NOTE(bill): Could be an invalid range so limit it for now
  1922. }
  1923. }
  1924. // return gb_clamp(next_pow2(type_size_of(t)), 1, build_context.max_align);
  1925. // NOTE(bill): Things that are bigger than build_context.word_size, are actually comprised of smaller types
  1926. // TODO(bill): Is this correct for 128-bit types (integers)?
  1927. return gb_clamp(next_pow2(type_size_of_internal(t, path)), 1, build_context.word_size);
  1928. }
  1929. Array<i64> type_set_offsets_of(Array<Entity *> fields, bool is_packed, bool is_raw_union) {
  1930. gbAllocator a = heap_allocator();
  1931. auto offsets = array_make<i64>(a, fields.count);
  1932. i64 curr_offset = 0;
  1933. if (is_raw_union) {
  1934. for_array(i, fields) {
  1935. offsets[i] = 0;
  1936. }
  1937. } else if (is_packed) {
  1938. for_array(i, fields) {
  1939. i64 size = type_size_of(fields[i]->type);
  1940. offsets[i] = curr_offset;
  1941. curr_offset += size;
  1942. }
  1943. } else {
  1944. for_array(i, fields) {
  1945. Type *t = fields[i]->type;
  1946. i64 align = gb_max(type_align_of(t), 1);
  1947. i64 size = gb_max(type_size_of( t), 0);
  1948. curr_offset = align_formula(curr_offset, align);
  1949. offsets[i] = curr_offset;
  1950. curr_offset += size;
  1951. }
  1952. }
  1953. return offsets;
  1954. }
  1955. bool type_set_offsets(Type *t) {
  1956. t = base_type(t);
  1957. if (t->kind == Type_Struct) {
  1958. if (!t->Struct.are_offsets_set) {
  1959. t->Struct.are_offsets_being_processed = true;
  1960. t->Struct.offsets = type_set_offsets_of(t->Struct.fields, t->Struct.is_packed, t->Struct.is_raw_union);
  1961. t->Struct.are_offsets_being_processed = false;
  1962. t->Struct.are_offsets_set = true;
  1963. return true;
  1964. }
  1965. } else if (is_type_tuple(t)) {
  1966. if (!t->Tuple.are_offsets_set) {
  1967. t->Struct.are_offsets_being_processed = true;
  1968. t->Tuple.offsets = type_set_offsets_of(t->Tuple.variables, false, false);
  1969. t->Struct.are_offsets_being_processed = false;
  1970. t->Tuple.are_offsets_set = true;
  1971. return true;
  1972. }
  1973. } else {
  1974. GB_PANIC("Invalid type for setting offsets");
  1975. }
  1976. return false;
  1977. }
  1978. i64 type_size_of_internal(Type *t, TypePath *path) {
  1979. if (t->failure) {
  1980. return FAILURE_SIZE;
  1981. }
  1982. switch (t->kind) {
  1983. case Type_Named: {
  1984. bool pop = type_path_push(path, t);
  1985. if (path->failure) {
  1986. return FAILURE_ALIGNMENT;
  1987. }
  1988. i64 size = type_size_of_internal(t->Named.base, path);
  1989. if (pop) type_path_pop(path);
  1990. return size;
  1991. } break;
  1992. case Type_Basic: {
  1993. GB_ASSERT_MSG(is_type_typed(t), "%s", type_to_string(t));
  1994. BasicKind kind = t->Basic.kind;
  1995. i64 size = t->Basic.size;
  1996. if (size > 0) {
  1997. return size;
  1998. }
  1999. switch (kind) {
  2000. case Basic_string: return 2*build_context.word_size;
  2001. case Basic_cstring: return build_context.word_size;
  2002. case Basic_any: return 2*build_context.word_size;
  2003. case Basic_typeid: return build_context.word_size;
  2004. case Basic_int: case Basic_uint: case Basic_uintptr: case Basic_rawptr:
  2005. return build_context.word_size;
  2006. }
  2007. } break;
  2008. case Type_Pointer:
  2009. return build_context.word_size;
  2010. case Type_Opaque:
  2011. return type_size_of_internal(t->Opaque.elem, path);
  2012. case Type_Array: {
  2013. i64 count, align, size, alignment;
  2014. count = t->Array.count;
  2015. if (count == 0) {
  2016. return 0;
  2017. }
  2018. align = type_align_of_internal(t->Array.elem, path);
  2019. if (path->failure) {
  2020. return FAILURE_SIZE;
  2021. }
  2022. size = type_size_of_internal( t->Array.elem, path);
  2023. alignment = align_formula(size, align);
  2024. return alignment*(count-1) + size;
  2025. } break;
  2026. case Type_Slice: // ptr + len
  2027. return 2 * build_context.word_size;
  2028. case Type_DynamicArray:
  2029. // data + len + cap + allocator(procedure+data)
  2030. return 3*build_context.word_size + 2*build_context.word_size;
  2031. case Type_Map:
  2032. init_map_internal_types(t);
  2033. return type_size_of_internal(t->Map.internal_type, path);
  2034. case Type_Tuple: {
  2035. i64 count, align, size;
  2036. count = t->Tuple.variables.count;
  2037. if (count == 0) {
  2038. return 0;
  2039. }
  2040. align = type_align_of_internal(t, path);
  2041. type_set_offsets(t);
  2042. size = t->Tuple.offsets[cast(isize)count-1] + type_size_of_internal(t->Tuple.variables[cast(isize)count-1]->type, path);
  2043. return align_formula(size, align);
  2044. } break;
  2045. case Type_Enum:
  2046. return type_size_of_internal(t->Enum.base_type, path);
  2047. case Type_Union: {
  2048. if (t->Union.variants.count == 0) {
  2049. return 0;
  2050. }
  2051. i64 align = type_align_of_internal(t, path);
  2052. if (path->failure) {
  2053. return FAILURE_SIZE;
  2054. }
  2055. i64 max = 0;
  2056. i64 field_size = 0;
  2057. for_array(i, t->Union.variants) {
  2058. Type *variant_type = t->Union.variants[i];
  2059. i64 size = type_size_of_internal(variant_type, path);
  2060. if (max < size) {
  2061. max = size;
  2062. }
  2063. }
  2064. // NOTE(bill): Align to tag
  2065. i64 tag_size = union_tag_size(t);
  2066. i64 size = align_formula(max, tag_size);
  2067. // NOTE(bill): Calculate the padding between the common fields and the tag
  2068. t->Union.tag_size = tag_size;
  2069. t->Union.variant_block_size = size - field_size;
  2070. return align_formula(size + tag_size, align);
  2071. } break;
  2072. case Type_Struct: {
  2073. if (t->Struct.is_raw_union) {
  2074. i64 count = t->Struct.fields.count;
  2075. i64 align = type_align_of_internal(t, path);
  2076. if (path->failure) {
  2077. return FAILURE_SIZE;
  2078. }
  2079. i64 max = 0;
  2080. for (isize i = 0; i < count; i++) {
  2081. i64 size = type_size_of_internal(t->Struct.fields[i]->type, path);
  2082. if (max < size) {
  2083. max = size;
  2084. }
  2085. }
  2086. // TODO(bill): Is this how it should work?
  2087. return align_formula(max, align);
  2088. } else {
  2089. i64 count = 0, size = 0, align = 0;
  2090. count = t->Struct.fields.count;
  2091. if (count == 0) {
  2092. return 0;
  2093. }
  2094. align = type_align_of_internal(t, path);
  2095. if (path->failure) {
  2096. return FAILURE_SIZE;
  2097. }
  2098. if (t->Struct.are_offsets_being_processed && t->Struct.offsets.data == nullptr) {
  2099. type_path_print_illegal_cycle(path, path->path.count-1);
  2100. return FAILURE_SIZE;
  2101. }
  2102. type_set_offsets(t);
  2103. size = t->Struct.offsets[cast(isize)count-1] + type_size_of_internal(t->Struct.fields[cast(isize)count-1]->type, path);
  2104. return align_formula(size, align);
  2105. }
  2106. } break;
  2107. case Type_BitField: {
  2108. i64 align = 8*type_align_of_internal(t, path);
  2109. i64 end = 0;
  2110. if (t->BitField.fields.count > 0) {
  2111. i64 last = t->BitField.fields.count-1;
  2112. end = t->BitField.offsets[cast(isize)last] + t->BitField.sizes[cast(isize)last];
  2113. }
  2114. i64 bits = align_formula(end, align);
  2115. GB_ASSERT((bits%8) == 0);
  2116. return bits/8;
  2117. } break;
  2118. case Type_BitSet: {
  2119. if (t->BitSet.underlying != nullptr) {
  2120. return type_size_of(t->BitSet.underlying);
  2121. }
  2122. i64 bits = t->BitSet.upper - t->BitSet.lower + 1;
  2123. if (bits <= 8) return 1;
  2124. if (bits <= 16) return 2;
  2125. if (bits <= 32) return 4;
  2126. if (bits <= 64) return 8;
  2127. return 8; // NOTE(bill): Could be an invalid range so limit it for now
  2128. }
  2129. }
  2130. // Catch all
  2131. return build_context.word_size;
  2132. }
  2133. i64 type_offset_of(Type *t, i32 index) {
  2134. t = base_type(t);
  2135. if (t->kind == Type_Struct) {
  2136. type_set_offsets(t);
  2137. if (gb_is_between(index, 0, t->Struct.fields.count-1)) {
  2138. return t->Struct.offsets[index];
  2139. }
  2140. } else if (t->kind == Type_Tuple) {
  2141. type_set_offsets(t);
  2142. if (gb_is_between(index, 0, t->Tuple.variables.count-1)) {
  2143. return t->Tuple.offsets[index];
  2144. }
  2145. } else if (t->kind == Type_Basic) {
  2146. if (t->Basic.kind == Basic_string) {
  2147. switch (index) {
  2148. case 0: return 0; // data
  2149. case 1: return build_context.word_size; // len
  2150. }
  2151. } else if (t->Basic.kind == Basic_any) {
  2152. switch (index) {
  2153. case 0: return 0; // type_info
  2154. case 1: return build_context.word_size; // data
  2155. }
  2156. }
  2157. } else if (t->kind == Type_Slice) {
  2158. switch (index) {
  2159. case 0: return 0; // data
  2160. case 1: return 1*build_context.word_size; // len
  2161. case 2: return 2*build_context.word_size; // cap
  2162. }
  2163. } else if (t->kind == Type_DynamicArray) {
  2164. switch (index) {
  2165. case 0: return 0; // data
  2166. case 1: return 1*build_context.word_size; // len
  2167. case 2: return 2*build_context.word_size; // cap
  2168. case 3: return 3*build_context.word_size; // allocator
  2169. }
  2170. } else if (t->kind == Type_Union) {
  2171. /* i64 s = */ type_size_of(t);
  2172. switch (index) {
  2173. case -1: return align_formula(t->Union.variant_block_size, build_context.word_size); // __type_info
  2174. }
  2175. }
  2176. return 0;
  2177. }
  2178. i64 type_offset_of_from_selection(Type *type, Selection sel) {
  2179. GB_ASSERT(sel.indirect == false);
  2180. Type *t = type;
  2181. i64 offset = 0;
  2182. for_array(i, sel.index) {
  2183. i32 index = sel.index[i];
  2184. t = base_type(t);
  2185. offset += type_offset_of(t, index);
  2186. if (t->kind == Type_Struct && !t->Struct.is_raw_union) {
  2187. t = t->Struct.fields[index]->type;
  2188. } else {
  2189. // NOTE(bill): No need to worry about custom types, just need the alignment
  2190. switch (t->kind) {
  2191. case Type_Basic:
  2192. if (t->Basic.kind == Basic_string) {
  2193. switch (index) {
  2194. case 0: t = t_rawptr; break;
  2195. case 1: t = t_int; break;
  2196. }
  2197. } else if (t->Basic.kind == Basic_any) {
  2198. switch (index) {
  2199. case 0: t = t_type_info_ptr; break;
  2200. case 1: t = t_rawptr; break;
  2201. }
  2202. }
  2203. break;
  2204. case Type_Slice:
  2205. switch (index) {
  2206. case 0: t = t_rawptr; break;
  2207. case 1: t = t_int; break;
  2208. case 2: t = t_int; break;
  2209. }
  2210. break;
  2211. case Type_DynamicArray:
  2212. switch (index) {
  2213. case 0: t = t_rawptr; break;
  2214. case 1: t = t_int; break;
  2215. case 2: t = t_int; break;
  2216. case 3: t = t_allocator; break;
  2217. }
  2218. break;
  2219. }
  2220. }
  2221. }
  2222. return offset;
  2223. }
  2224. gbString write_type_to_string(gbString str, Type *type) {
  2225. if (type == nullptr) {
  2226. return gb_string_appendc(str, "<no type>");
  2227. }
  2228. switch (type->kind) {
  2229. case Type_Basic:
  2230. str = gb_string_append_length(str, type->Basic.name.text, type->Basic.name.len);
  2231. break;
  2232. case Type_Generic:
  2233. if (type->Generic.name.len == 0) {
  2234. str = gb_string_appendc(str, "type");
  2235. } else {
  2236. String name = type->Generic.name;
  2237. str = gb_string_append_rune(str, '$');
  2238. str = gb_string_append_length(str, name.text, name.len);
  2239. if (type->Generic.specialized != nullptr) {
  2240. str = gb_string_append_rune(str, '/');
  2241. str = write_type_to_string(str, type->Generic.specialized);
  2242. }
  2243. }
  2244. break;
  2245. case Type_Pointer:
  2246. str = gb_string_append_rune(str, '^');
  2247. str = write_type_to_string(str, type->Pointer.elem);
  2248. break;
  2249. case Type_Opaque:
  2250. str = gb_string_appendc(str, "opaque ");
  2251. str = write_type_to_string(str, type->Opaque.elem);
  2252. break;
  2253. case Type_Array:
  2254. str = gb_string_appendc(str, gb_bprintf("[%d]", cast(int)type->Array.count));
  2255. str = write_type_to_string(str, type->Array.elem);
  2256. break;
  2257. case Type_Slice:
  2258. str = gb_string_appendc(str, "[]");
  2259. str = write_type_to_string(str, type->Array.elem);
  2260. break;
  2261. case Type_DynamicArray:
  2262. str = gb_string_appendc(str, "[dynamic]");
  2263. str = write_type_to_string(str, type->DynamicArray.elem);
  2264. break;
  2265. case Type_Enum:
  2266. str = gb_string_appendc(str, "enum");
  2267. if (type->Enum.base_type != nullptr) {
  2268. str = gb_string_appendc(str, " ");
  2269. str = write_type_to_string(str, type->Enum.base_type);
  2270. }
  2271. str = gb_string_appendc(str, " {");
  2272. for_array(i, type->Enum.fields) {
  2273. Entity *f = type->Enum.fields[i];
  2274. GB_ASSERT(f->kind == Entity_Constant);
  2275. if (i > 0) {
  2276. str = gb_string_appendc(str, ", ");
  2277. }
  2278. str = gb_string_append_length(str, f->token.string.text, f->token.string.len);
  2279. // str = gb_string_appendc(str, " = ");
  2280. }
  2281. str = gb_string_append_rune(str, '}');
  2282. break;
  2283. case Type_Union:
  2284. str = gb_string_appendc(str, "union {");
  2285. for_array(i, type->Union.variants) {
  2286. Type *t = type->Union.variants[i];
  2287. if (i > 0) str = gb_string_appendc(str, ", ");
  2288. str = write_type_to_string(str, t);
  2289. }
  2290. str = gb_string_append_rune(str, '}');
  2291. break;
  2292. case Type_Struct: {
  2293. str = gb_string_appendc(str, "struct");
  2294. if (type->Struct.is_packed) str = gb_string_appendc(str, " #packed");
  2295. if (type->Struct.is_raw_union) str = gb_string_appendc(str, " #raw_union");
  2296. str = gb_string_appendc(str, " {");
  2297. for_array(i, type->Struct.fields) {
  2298. Entity *f = type->Struct.fields[i];
  2299. GB_ASSERT(f->kind == Entity_Variable);
  2300. if (i > 0) {
  2301. str = gb_string_appendc(str, ", ");
  2302. }
  2303. str = gb_string_append_length(str, f->token.string.text, f->token.string.len);
  2304. str = gb_string_appendc(str, ": ");
  2305. str = write_type_to_string(str, f->type);
  2306. }
  2307. str = gb_string_append_rune(str, '}');
  2308. } break;
  2309. case Type_Map: {
  2310. str = gb_string_appendc(str, "map[");
  2311. str = write_type_to_string(str, type->Map.key);
  2312. str = gb_string_append_rune(str, ']');
  2313. str = write_type_to_string(str, type->Map.value);
  2314. } break;
  2315. case Type_Named:
  2316. if (type->Named.type_name != nullptr) {
  2317. str = gb_string_append_length(str, type->Named.name.text, type->Named.name.len);
  2318. } else {
  2319. // NOTE(bill): Just in case
  2320. str = gb_string_appendc(str, "<named type>");
  2321. }
  2322. break;
  2323. case Type_Tuple:
  2324. if (type->Tuple.variables.count > 0) {
  2325. isize comma_index = 0;
  2326. for_array(i, type->Tuple.variables) {
  2327. Entity *var = type->Tuple.variables[i];
  2328. if (var != nullptr) {
  2329. if (var->kind == Entity_Constant) {
  2330. // Ignore
  2331. continue;
  2332. }
  2333. if (comma_index++ > 0) {
  2334. str = gb_string_appendc(str, ", ");
  2335. }
  2336. if (var->kind == Entity_Variable) {
  2337. if (var->flags&EntityFlag_CVarArg) {
  2338. str = gb_string_appendc(str, "#c_vararg ");
  2339. }
  2340. if (var->flags&EntityFlag_Ellipsis) {
  2341. Type *slice = base_type(var->type);
  2342. str = gb_string_appendc(str, "..");
  2343. GB_ASSERT(var->type->kind == Type_Slice);
  2344. str = write_type_to_string(str, slice->Slice.elem);
  2345. } else {
  2346. str = write_type_to_string(str, var->type);
  2347. }
  2348. } else {
  2349. GB_ASSERT(var->kind == Entity_TypeName);
  2350. if (var->type->kind == Type_Generic) {
  2351. str = gb_string_appendc(str, "type/");
  2352. str = write_type_to_string(str, var->type);
  2353. } else {
  2354. str = gb_string_appendc(str, "type");
  2355. }
  2356. }
  2357. }
  2358. }
  2359. }
  2360. break;
  2361. case Type_Proc:
  2362. str = gb_string_appendc(str, "proc");
  2363. switch (type->Proc.calling_convention) {
  2364. case ProcCC_Odin:
  2365. break;
  2366. case ProcCC_Contextless:
  2367. str = gb_string_appendc(str, " \"contextless\" ");
  2368. break;
  2369. case ProcCC_CDecl:
  2370. str = gb_string_appendc(str, " \"cdecl\" ");
  2371. break;
  2372. case ProcCC_StdCall:
  2373. str = gb_string_appendc(str, " \"stdcall\" ");
  2374. break;
  2375. case ProcCC_FastCall:
  2376. str = gb_string_appendc(str, " \"fastcall\" ");
  2377. break;
  2378. // case ProcCC_VectorCall:
  2379. // str = gb_string_appendc(str, " \"vectorcall\" ");
  2380. // break;
  2381. // case ProcCC_ClrCall:
  2382. // str = gb_string_appendc(str, " \"clrcall\" ");
  2383. // break;
  2384. }
  2385. str = gb_string_appendc(str, "(");
  2386. if (type->Proc.params) {
  2387. str = write_type_to_string(str, type->Proc.params);
  2388. }
  2389. str = gb_string_appendc(str, ")");
  2390. if (type->Proc.results) {
  2391. str = gb_string_appendc(str, " -> ");
  2392. str = write_type_to_string(str, type->Proc.results);
  2393. }
  2394. break;
  2395. case Type_BitField:
  2396. str = gb_string_appendc(str, "bit_field ");
  2397. if (type->BitField.custom_align != 0) {
  2398. str = gb_string_append_fmt(str, "#align %d ", cast(int)type->BitField.custom_align);
  2399. }
  2400. str = gb_string_append_rune(str, '{');
  2401. for_array(i, type->BitField.fields) {
  2402. Entity *f = type->BitField.fields[i];
  2403. GB_ASSERT(f->kind == Entity_Variable);
  2404. GB_ASSERT(f->type != nullptr && f->type->kind == Type_BitFieldValue);
  2405. str = gb_string_append_rune(str, '{');
  2406. if (i > 0) {
  2407. str = gb_string_appendc(str, ", ");
  2408. }
  2409. str = gb_string_append_length(str, f->token.string.text, f->token.string.len);
  2410. str = gb_string_appendc(str, ": ");
  2411. str = gb_string_append_fmt(str, "%lld", cast(long long)f->type->BitFieldValue.bits);
  2412. }
  2413. str = gb_string_append_rune(str, '}');
  2414. break;
  2415. case Type_BitFieldValue:
  2416. str = gb_string_append_fmt(str, "(bit field value with %d bits)", cast(int)type->BitFieldValue.bits);
  2417. break;
  2418. case Type_BitSet:
  2419. str = gb_string_appendc(str, "bit_set[");
  2420. str = write_type_to_string(str, type->BitSet.elem);
  2421. str = gb_string_appendc(str, "]");
  2422. break;
  2423. }
  2424. return str;
  2425. }
  2426. gbString type_to_string(Type *type) {
  2427. return write_type_to_string(gb_string_make(heap_allocator(), ""), type);
  2428. }