Browse Source

`map` is internally backed by a pointer (i.e. a "reference type")

gingerBill 7 years ago
parent
commit
a69ea58388
7 changed files with 156 additions and 93 deletions
  1. 36 9
      core/_preload.odin
  2. 20 20
      core/fmt.odin
  3. 5 1
      core/raw.odin
  4. 2 1
      src/check_type.cpp
  5. 80 52
      src/ir.cpp
  6. 2 2
      src/ir_print.cpp
  7. 11 8
      src/types.cpp

+ 36 - 9
core/_preload.odin

@@ -487,9 +487,13 @@ free_slice :: proc(array: $T/[]$E, loc := #caller_location) {
 	free_ptr(raw.data(array), loc);
 }
 free_map :: proc(m: $T/map[$K]$V, loc := #caller_location) {
-	raw := cast(^raw.Map)&m;
-	free_dynamic_array(raw.hashes, loc);
-	free_ptr(raw.entries.data, loc);
+	raw := transmute(raw.Map)m;
+	if raw.internal != nil {
+		free_dynamic_array(raw.hashes, loc);
+		free_ptr(raw.entries.data, loc);
+		free_ptr(raw.internal, loc);
+		raw.internal = nil;
+	}
 }
 
 free :: proc[
@@ -911,14 +915,22 @@ __default_hash :: proc(data: []byte) -> u128 {
 }
 __default_hash_string :: proc(s: string) -> u128 do return __default_hash(cast([]byte)s);
 
+__dynamic_map_check_init :: proc(h: __Map_Header) {
+	if h.m.internal == nil {
+		h.m.internal = new(raw.Map_Internal);
+	}
+}
+
 __dynamic_map_reserve :: proc(using header: __Map_Header, cap: int, loc := #caller_location)  {
+	__dynamic_map_check_init(header);
 	__dynamic_array_reserve(&m.hashes, size_of(int), align_of(int), cap, loc);
 	__dynamic_array_reserve(&m.entries, entry_size, entry_align,    cap, loc);
 }
-
 __dynamic_map_rehash :: proc(using header: __Map_Header, new_count: int, loc := #caller_location) {
+	__dynamic_map_check_init(header);
 	new_header: __Map_Header = header;
 	nm: raw.Map;
+	nm.internal = new(raw.Map_Internal);
 	new_header.m = &nm;
 
 	header_hashes := cast(^raw.Dynamic_Array)&header.m.hashes;
@@ -956,6 +968,7 @@ __dynamic_map_rehash :: proc(using header: __Map_Header, new_count: int, loc :=
 }
 
 __dynamic_map_get :: proc(h: __Map_Header, key: __Map_Key) -> rawptr {
+	__dynamic_map_check_init(h);
 	index := __dynamic_map_find(h, key).entry_index;
 	if index >= 0 {
 		data := cast(^byte)__dynamic_map_get_entry(h, index);
@@ -964,11 +977,13 @@ __dynamic_map_get :: proc(h: __Map_Header, key: __Map_Key) -> rawptr {
 	return nil;
 }
 
-__dynamic_map_set :: proc(using h: __Map_Header, key: __Map_Key, value: rawptr, loc := #caller_location) {
+__dynamic_map_set :: proc(h: __Map_Header, key: __Map_Key, value: rawptr, loc := #caller_location) {
+	__dynamic_map_check_init(h);
+
 	index: int;
 	assert(value != nil);
 
-	if len(m.hashes) == 0 {
+	if len(h.m.hashes) == 0 {
 		__dynamic_map_reserve(h, __INITIAL_MAP_CAP, loc);
 		__dynamic_map_grow(h, loc);
 	}
@@ -982,14 +997,14 @@ __dynamic_map_set :: proc(using h: __Map_Header, key: __Map_Key, value: rawptr,
 			entry := __dynamic_map_get_entry(h, fr.entry_prev);
 			entry.next = index;
 		} else {
-			m.hashes[fr.hash_index] = index;
+			h.m.hashes[fr.hash_index] = index;
 		}
 	}
 	{
 		e := __dynamic_map_get_entry(h, index);
 		e.key = key;
-		val := cast(^byte)e + value_offset;
-		__mem_copy(val, value, value_size);
+		val := cast(^byte)(uintptr(e) + h.value_offset);
+		__mem_copy(val, value, h.value_size);
 	}
 
 	if __dynamic_map_full(h) {
@@ -999,11 +1014,13 @@ __dynamic_map_set :: proc(using h: __Map_Header, key: __Map_Key, value: rawptr,
 
 
 __dynamic_map_grow :: proc(using h: __Map_Header, loc := #caller_location) {
+	__dynamic_map_check_init(h);
 	new_count := max(2*m.entries.cap + 8, __INITIAL_MAP_CAP);
 	__dynamic_map_rehash(h, new_count, loc);
 }
 
 __dynamic_map_full :: inline proc(using h: __Map_Header) -> bool {
+	__dynamic_map_check_init(h);
 	return int(0.75 * f64(len(m.hashes))) <= m.entries.cap;
 }
 
@@ -1017,6 +1034,7 @@ __dynamic_map_hash_equal :: proc(h: __Map_Header, a, b: __Map_Key) -> bool {
 }
 
 __dynamic_map_find :: proc(using h: __Map_Header, key: __Map_Key) -> __Map_Find_Result {
+	__dynamic_map_check_init(h);
 	fr := __Map_Find_Result{-1, -1, -1};
 	if len(m.hashes) > 0 {
 		fr.hash_index = int(key.hash % u128(len(m.hashes)));
@@ -1032,6 +1050,7 @@ __dynamic_map_find :: proc(using h: __Map_Header, key: __Map_Key) -> __Map_Find_
 }
 
 __dynamic_map_add_entry :: proc(using h: __Map_Header, key: __Map_Key, loc := #caller_location) -> int {
+	__dynamic_map_check_init(h);
 	prev := m.entries.len;
 	c := __dynamic_array_append_nothing(&m.entries, entry_size, entry_align, loc);
 	if c != prev {
@@ -1043,6 +1062,7 @@ __dynamic_map_add_entry :: proc(using h: __Map_Header, key: __Map_Key, loc := #c
 }
 
 __dynamic_map_delete :: proc(using h: __Map_Header, key: __Map_Key) {
+	__dynamic_map_check_init(h);
 	fr := __dynamic_map_find(h, key);
 	if fr.entry_index >= 0 {
 		__dynamic_map_erase(h, fr);
@@ -1050,10 +1070,13 @@ __dynamic_map_delete :: proc(using h: __Map_Header, key: __Map_Key) {
 }
 
 __dynamic_map_get_entry :: proc(using h: __Map_Header, index: int) -> ^__Map_Entry_Header {
+	__dynamic_map_check_init(h);
+	assert(0 <= index && index < m.entries.len);
 	return cast(^__Map_Entry_Header)(cast(^byte)m.entries.data + index*entry_size);
 }
 
 __dynamic_map_erase :: proc(using h: __Map_Header, fr: __Map_Find_Result) {
+	__dynamic_map_check_init(h);
 	if fr.entry_prev < 0 {
 		m.hashes[fr.hash_index] = __dynamic_map_get_entry(h, fr.entry_index).next;
 	} else {
@@ -1071,3 +1094,7 @@ __dynamic_map_erase :: proc(using h: __Map_Header, fr: __Map_Find_Result) {
 		m.hashes[last.hash_index] = fr.entry_index;
 	}
 }
+
+__map_for_test :: proc "c" (s: string) {
+	fmt.printf("__map_for_test '%s'\n", s);
+}

+ 20 - 20
core/fmt.odin

@@ -815,33 +815,33 @@ fmt_value :: proc(fi: ^Fmt_Info, v: any, verb: rune) {
 		write_string(fi.buf, "map[");
 		defer write_byte(fi.buf, ']');
 
-		entries    := &(^raw.Map)(v.data).entries;
-		gs         := type_info_base(info.generated_struct).variant.(Type_Info_Struct);
-		ed         := type_info_base(gs.types[1]).variant.(Type_Info_Dynamic_Array);
-		entry_type := ed.elem.variant.(Type_Info_Struct);
-		entry_size := ed.elem_size;
+		if (^raw.Map)(v.data).internal != nil {
+			entries    := &(^raw.Map)(v.data).entries;
+			gs         := type_info_base(info.generated_struct).variant.(Type_Info_Struct);
+			ed         := type_info_base(gs.types[1]).variant.(Type_Info_Dynamic_Array);
+			entry_type := ed.elem.variant.(Type_Info_Struct);
+			entry_size := ed.elem_size;
 
-		for i in 0..entries.len {
-			if i > 0 do write_string(fi.buf, ", ");
+			for i in 0..entries.len {
+				if i > 0 do write_string(fi.buf, ", ");
 
-			data := uintptr(entries.data) + uintptr(i*entry_size);
-			header := cast(^__Map_Entry_Header)data;
+				data := uintptr(entries.data) + uintptr(i*entry_size);
+				header := cast(^__Map_Entry_Header)data;
 
-			if types.is_string(info.key) {
-				write_string(fi.buf, header.key.str);
-			} else {
-				fi := Fmt_Info{buf = fi.buf};
-				fmt_arg(&fi, any{rawptr(&header.key.hash), info.key}, 'v');
-			}
+				if types.is_string(info.key) {
+					write_string(fi.buf, header.key.str);
+				} else {
+					fi := Fmt_Info{buf = fi.buf};
+					fmt_arg(&fi, any{rawptr(&header.key.hash), info.key}, 'v');
+				}
 
-			write_string(fi.buf, "=");
+				write_string(fi.buf, "=");
 
-			value := data + entry_type.offsets[2];
-			fmt_arg(fi, any{rawptr(value), info.value}, 'v');
+				value := data + entry_type.offsets[2];
+				fmt_arg(fi, any{rawptr(value), info.value}, 'v');
+			}
 		}
 
-
-
 	case Type_Info_Struct:
 		if info.is_raw_union {
 			write_string(fi.buf, "(raw_union)");

+ 5 - 1
core/raw.odin

@@ -20,11 +20,15 @@ Dynamic_Array :: struct {
 	allocator: Allocator,
 }
 
-Map :: struct {
+Map_Internal :: struct {
 	hashes:  [dynamic]int,
 	entries: Dynamic_Array,
 }
 
+Map :: struct {
+	using internal: ^Map_Internal,
+}
+
 make_any :: inline proc(data: rawptr, type_info: ^Type_Info) -> any {
 	return transmute(any)Any{data, type_info};
 }

+ 2 - 1
src/check_type.cpp

@@ -1844,7 +1844,8 @@ void generate_map_internal_types(gbAllocator a, Type *type) {
 
 	type_set_offsets(a, generated_struct_type);
 	type->Map.generated_struct_type = generated_struct_type;
-	type->Map.lookup_result_type = make_optional_ok_type(a, value);
+	type->Map.internal_type         = make_type_pointer(a, generated_struct_type);
+	type->Map.lookup_result_type    = make_optional_ok_type(a, value);
 }
 
 void check_map_type(Checker *c, Type *type, AstNode *node) {

+ 80 - 52
src/ir.cpp

@@ -1733,8 +1733,8 @@ void ir_emit_startup_runtime(irProcedure *proc) {
 irValue *ir_emit_struct_ep(irProcedure *proc, irValue *s, i32 index);
 irValue *ir_emit_comp(irProcedure *proc, TokenKind op_kind, irValue *left, irValue *right);
 
-irValue *ir_gen_map_header(irProcedure *proc, irValue *map_val, Type *map_type) {
-	GB_ASSERT_MSG(is_type_pointer(ir_type(map_val)), "%s", type_to_string(ir_type(map_val)));
+irValue *ir_gen_map_header(irProcedure *proc, irValue *map_val_ptr, Type *map_type) {
+	GB_ASSERT_MSG(is_type_pointer(ir_type(map_val_ptr)), "%s", type_to_string(ir_type(map_val_ptr)));
 	gbAllocator a = proc->module->allocator;
 	irValue *h = ir_add_local_generated(proc, t_map_header);
 	map_type = base_type(map_type);
@@ -1744,7 +1744,7 @@ irValue *ir_gen_map_header(irProcedure *proc, irValue *map_val, Type *map_type)
 
 	// NOTE(bill): Removes unnecessary allocation if split gep
 	irValue *gep0 = ir_emit_struct_ep(proc, h, 0);
-	irValue *m = ir_emit_conv(proc, map_val, type_deref(ir_type(gep0)));
+	irValue *m = ir_emit_conv(proc, map_val_ptr, type_deref(ir_type(gep0)));
 	ir_emit_store(proc, gep0, m);
 
 	if (is_type_string(key_type)) {
@@ -2536,14 +2536,18 @@ irValue *ir_emit_struct_ep(irProcedure *proc, irValue *s, i32 index) {
 		case 2: result_type = t_int_ptr;                                      break;
 		case 3: result_type = t_allocator_ptr;                                break;
 		}
-	} else if (is_type_map(t)) {
+	} /* else if (is_type_map(t)) {
 		generate_map_internal_types(a, t);
+		Type *itp = make_type_pointer(a, t->Map.internal_type);
+		s = ir_emit_load(proc, ir_emit_transmute(proc, s, itp));
+
 		Type *gst = t->Map.generated_struct_type;
+		GB_ASSERT(gst->kind == Type_Struct);
 		switch (index) {
 		case 0: result_type = make_type_pointer(a, gst->Struct.fields[0]->type); break;
 		case 1: result_type = make_type_pointer(a, gst->Struct.fields[1]->type); break;
 		}
-	}else {
+	} */else {
 		GB_PANIC("TODO(bill): struct_gep type: %s, %d", type_to_string(ir_type(s)), index);
 	}
 
@@ -2611,15 +2615,15 @@ irValue *ir_emit_struct_ev(irProcedure *proc, irValue *s, i32 index) {
 		}
 		break;
 
-	case Type_Map: {
-		generate_map_internal_types(a, t);
-		Type *gst = t->Map.generated_struct_type;
-		switch (index) {
-		case 0: result_type = gst->Struct.fields[0]->type; break;
-		case 1: result_type = gst->Struct.fields[1]->type; break;
-		}
-		break;
-	}
+	// case Type_Map: {
+	// 	generate_map_internal_types(a, t);
+	// 	Type *gst = t->Map.generated_struct_type;
+	// 	switch (index) {
+	// 	case 0: result_type = gst->Struct.fields[0]->type; break;
+	// 	case 1: result_type = gst->Struct.fields[1]->type; break;
+	// 	}
+	// 	break;
+	// }
 
 	default:
 		GB_PANIC("TODO(bill): struct_ev type: %s, %d", type_to_string(ir_type(s)), index);
@@ -6192,10 +6196,6 @@ void ir_build_range_indexed(irProcedure *proc, irValue *expr, Type *val_type, ir
 	irBlock *done = nullptr;
 	irBlock *body = nullptr;
 
-	irValue *key = nullptr;
-	if (expr_type->kind == Type_Map) {
-		key = ir_add_local_generated(proc, expr_type->Map.key);
-	}
 
 	irValue *index = ir_add_local_generated(proc, t_int);
 	ir_emit_store(proc, index, ir_const_int(proc->module->allocator, -1));
@@ -6210,6 +6210,7 @@ void ir_build_range_indexed(irProcedure *proc, irValue *expr, Type *val_type, ir
 	body = ir_new_block(proc, nullptr, "for.index.body");
 	done = ir_new_block(proc, nullptr, "for.index.done");
 	if (count == nullptr) {
+		GB_ASSERT(count_ptr != nullptr);
 		count = ir_emit_load(proc, count_ptr);
 	}
 	irValue *cond = ir_emit_comp(proc, Token_Lt, incr, count);
@@ -6217,52 +6218,59 @@ void ir_build_range_indexed(irProcedure *proc, irValue *expr, Type *val_type, ir
 	ir_start_block(proc, body);
 
 	idx = ir_emit_load(proc, index);
-	if (val_type != nullptr) {
-		switch (expr_type->kind) {
-		case Type_Array: {
+	switch (expr_type->kind) {
+	case Type_Array: {
+		if (val_type != nullptr) {
 			val = ir_emit_load(proc, ir_emit_array_ep(proc, expr, idx));
-			break;
 		}
-		case Type_Slice: {
+		break;
+	}
+	case Type_Slice: {
+		if (val_type != nullptr) {
 			irValue *elem = ir_slice_elem(proc, expr);
 			val = ir_emit_load(proc, ir_emit_ptr_offset(proc, elem, idx));
-			break;
 		}
-		case Type_DynamicArray: {
+		break;
+	}
+	case Type_DynamicArray: {
+		if (val_type != nullptr) {
 			irValue *elem = ir_emit_struct_ep(proc, expr, 0);
 			elem = ir_emit_load(proc, elem);
 			val = ir_emit_load(proc, ir_emit_ptr_offset(proc, elem, idx));
-			break;
 		}
-		case Type_Map: {
-			irValue *entries = ir_emit_struct_ep(proc, expr, 1);
-			irValue *elem = ir_emit_struct_ep(proc, entries, 0);
-			elem = ir_emit_load(proc, elem);
+		break;
+	}
+	case Type_Map: {
+		irValue *key = ir_add_local_generated(proc, expr_type->Map.key);
 
-			irValue *entry = ir_emit_ptr_offset(proc, elem, idx);
-			val = ir_emit_load(proc, ir_emit_struct_ep(proc, entry, 2));
+		Type *itp = make_type_pointer(proc->module->allocator, expr_type->Map.internal_type);
+		irValue *data_ptr = ir_emit_transmute(proc, expr, itp);
+		irValue *internal_ptr = ir_emit_load(proc, data_ptr);
 
-			irValue *hash = ir_emit_struct_ep(proc, entry, 0);
-			if (is_type_string(expr_type->Map.key)) {
-				irValue *str = ir_emit_struct_ep(proc, hash, 1);
-				ir_emit_store(proc, key, ir_emit_load(proc, str));
-			} else {
-				irValue *hash_ptr = ir_emit_struct_ep(proc, hash, 0);
-				hash_ptr = ir_emit_conv(proc, hash_ptr, ir_type(key));
-				ir_emit_store(proc, key, ir_emit_load(proc, hash_ptr));
-			}
+		irValue *entries = ir_emit_struct_ep(proc, internal_ptr, 1);
+		irValue *elem = ir_emit_struct_ep(proc, entries, 0);
+		elem = ir_emit_load(proc, elem);
 
+		irValue *entry = ir_emit_ptr_offset(proc, elem, idx);
+		val = ir_emit_load(proc, ir_emit_struct_ep(proc, entry, 2));
 
-			break;
-		}
-		default:
-			GB_PANIC("Cannot do range_indexed of %s", type_to_string(expr_type));
-			break;
+		irValue *hash = ir_emit_struct_ep(proc, entry, 0);
+		if (is_type_string(expr_type->Map.key)) {
+			irValue *str = ir_emit_struct_ep(proc, hash, 1);
+			ir_emit_store(proc, key, ir_emit_load(proc, str));
+		} else {
+			irValue *hash_ptr = ir_emit_struct_ep(proc, hash, 0);
+			hash_ptr = ir_emit_conv(proc, hash_ptr, ir_type(key));
+			ir_emit_store(proc, key, ir_emit_load(proc, hash_ptr));
 		}
-	}
 
-	if (key != nullptr) {
 		idx = ir_emit_load(proc, key);
+
+		break;
+	}
+	default:
+		GB_PANIC("Cannot do range_indexed of %s", type_to_string(expr_type));
+		break;
 	}
 
 	if (val_)  *val_  = val;
@@ -6863,7 +6871,6 @@ void ir_build_stmt_internal(irProcedure *proc, AstNode *node) {
 			ir_emit_if(proc, cond, body, done);
 			ir_start_block(proc, body);
 
-
 			irValue *val_ptr = ir_emit_ptr_offset(proc, values_data, offset);
 			ir_emit_increment(proc, offset_);
 
@@ -6885,14 +6892,34 @@ void ir_build_stmt_internal(irProcedure *proc, AstNode *node) {
 			switch (et->kind) {
 			case Type_Map: {
 				is_map = true;
+				gbAllocator a = proc->module->allocator;
 				irAddr addr = ir_build_addr(proc, rs->expr);
 				irValue *map = ir_addr_get_ptr(proc, addr);
 				if (is_type_pointer(type_deref(ir_addr_type(addr)))) {
 					map = ir_addr_load(proc, addr);
 				}
-				irValue *entries_ptr = ir_emit_struct_ep(proc, map, 1);
-				irValue *count_ptr = ir_emit_struct_ep(proc, entries_ptr, 1);
-				ir_build_range_indexed(proc, map, val1_type, count_ptr, &val, &key, &loop, &done);
+				irValue *count_ptr = ir_add_local_generated(proc, t_int);
+				irValue *count_ptr_ptr = ir_add_local_generated(proc, t_int_ptr);
+				ir_emit_store(proc, count_ptr_ptr, count_ptr);
+
+				irBlock *not_nil_block = ir_new_block(proc, nullptr, "map.not.nil.block");
+				irBlock *end_nil_block = ir_new_block(proc, nullptr, "map.end.nil.block");
+				{
+					Type *itp = make_type_pointer(a, et->Map.internal_type);
+					irValue *data_ptr = ir_emit_transmute(proc, map, itp);
+					irValue *internal_ptr = ir_emit_load(proc, data_ptr);
+
+					irValue *cond = ir_emit_comp(proc, Token_NotEq, internal_ptr, v_raw_nil);
+					ir_emit_if(proc, cond, not_nil_block, end_nil_block);
+					ir_start_block(proc, not_nil_block);
+
+					irValue *entries_ptr = ir_emit_struct_ep(proc, internal_ptr, 1);
+					irValue *cp = ir_emit_struct_ep(proc, entries_ptr, 1);
+					ir_emit_store(proc, count_ptr_ptr, cp);
+					ir_emit_jump(proc, end_nil_block);
+				}
+				ir_start_block(proc, end_nil_block);
+				ir_build_range_indexed(proc, map, val1_type, ir_emit_load(proc, count_ptr_ptr), &val, &key, &loop, &done);
 				break;
 			}
 			case Type_Array: {
@@ -6948,6 +6975,7 @@ void ir_build_stmt_internal(irProcedure *proc, AstNode *node) {
 			}
 		}
 
+
 		irAddr val0_addr = {};
 		irAddr val1_addr = {};
 		if (val0_type) val0_addr = ir_build_addr(proc, rs->val0);

+ 2 - 2
src/ir_print.cpp

@@ -427,8 +427,8 @@ void ir_print_type(irFileBuffer *f, irModule *m, Type *t) {
 
 	case Type_Map: {
 		generate_map_internal_types(m->allocator, t);
-		GB_ASSERT(t->Map.generated_struct_type != nullptr);
-		ir_print_type(f, m, t->Map.generated_struct_type);
+		GB_ASSERT(t->Map.internal_type != nullptr);
+		ir_print_type(f, m, t->Map.internal_type);
 		break;
 	}
 

+ 11 - 8
src/types.cpp

@@ -162,6 +162,7 @@ struct TypeStruct {
 		Type * value;                                     \
 		Type * entry_type;                                \
 		Type * generated_struct_type;                     \
+		Type * internal_type;                             \
 		Type * lookup_result_type;                        \
 	})                                                    \
 	TYPE_KIND(BitFieldValue, struct { u32 bits; })        \
@@ -1365,10 +1366,10 @@ i64 union_tag_size(gbAllocator a, Type *u) {
 		return u->Union.tag_size;
 	}
 
-	i64 tag_size = type_align_of(a, u);
-	if (tag_size < 1) {
-		tag_size = build_context.word_size;
-	}
+	u64 n = cast(u64)u->Union.variants.count;
+	i64 bytes = next_pow2(cast(i64)(floor_log2(n)/8 + 1));
+	i64 tag_size = gb_max(bytes, 1);
+
 	u->Union.tag_size = tag_size;
 	return tag_size;
 }
@@ -1383,7 +1384,7 @@ Type *union_tag_type(gbAllocator a, Type *u) {
 	case 16: return t_u128;
 	}
 	GB_PANIC("Invalid union_tag_size");
-	return t_int;
+	return t_uint;
 }
 
 
@@ -1870,7 +1871,8 @@ i64 type_align_of_internal(gbAllocator allocator, Type *t, TypePath *path) {
 
 	case Type_Map:
 		generate_map_internal_types(allocator, t);
-		return type_align_of_internal(allocator, t->Map.generated_struct_type, path);
+		// return type_align_of_internal(allocator, t->Map.generated_struct_type, path);
+		return build_context.word_size;
 
 	case Type_Enum:
 		return type_align_of_internal(allocator, t->Enum.base_type, path);
@@ -2063,7 +2065,8 @@ i64 type_size_of_internal(gbAllocator allocator, Type *t, TypePath *path) {
 
 	case Type_Map:
 		generate_map_internal_types(allocator, t);
-		return type_size_of_internal(allocator, t->Map.generated_struct_type, path);
+		// return type_size_of_internal(allocator, t->Map.generated_struct_type, path);
+		return build_context.word_size;
 
 	case Type_Tuple: {
 		i64 count, align, size;
@@ -2101,7 +2104,7 @@ i64 type_size_of_internal(gbAllocator allocator, Type *t, TypePath *path) {
 		}
 
 		// NOTE(bill): Align to tag
-		i64 tag_size = gb_max(align, 1);
+		i64 tag_size = union_tag_size(allocator, t);
 		i64 size = align_formula(max, tag_size);
 		// NOTE(bill): Calculate the padding between the common fields and the tag
 		t->Union.tag_size = tag_size;