Răsfoiți Sursa

Add `ArenaTemp` to the compiler

gingerBill 2 ani în urmă
părinte
comite
520ff731de
8 a modificat fișierele cu 107 adăugiri și 12 ștergeri
  1. 1 0
      src/big_int.cpp
  2. 1 0
      src/build_settings.cpp
  3. 2 0
      src/check_builtin.cpp
  4. 1 0
      src/check_decl.cpp
  5. 27 10
      src/check_expr.cpp
  6. 7 0
      src/check_stmt.cpp
  7. 4 0
      src/common.cpp
  8. 64 2
      src/common_memory.cpp

+ 1 - 0
src/big_int.cpp

@@ -477,6 +477,7 @@ gb_internal void big_int_or(BigInt *dst, BigInt const *x, BigInt const *y) {
 }
 
 gb_internal void debug_print_big_int(BigInt const *x) {
+	TEMPORARY_ALLOCATOR_GUARD();
 	String s = big_int_to_string(temporary_allocator(), x, 10);
 	gb_printf_err("[DEBUG] %.*s\n", LIT(s));
 }

+ 1 - 0
src/build_settings.cpp

@@ -1045,6 +1045,7 @@ gb_internal bool has_asm_extension(String const &path) {
 
 // temporary
 gb_internal char *token_pos_to_string(TokenPos const &pos) {
+	TEMPORARY_ALLOCATOR_GUARD();
 	gbString s = gb_string_make_reserve(temporary_allocator(), 128);
 	String file = get_file_path_string(pos.file_id);
 	switch (build_context.ODIN_ERROR_POS_STYLE) {

+ 2 - 0
src/check_builtin.cpp

@@ -1118,6 +1118,7 @@ gb_internal bool cache_load_file_directive(CheckerContext *c, Ast *call, String
 		}
 	});
 
+	TEMPORARY_ALLOCATOR_GUARD();
 	char *c_str = alloc_cstring(temporary_allocator(), path);
 
 	gbFile f = {};
@@ -3062,6 +3063,7 @@ gb_internal bool check_builtin_procedure(CheckerContext *c, Operand *operand, As
 	}
 
 	case BuiltinProc_soa_zip: {
+		TEMPORARY_ALLOCATOR_GUARD();
 		auto types = array_make<Type *>(temporary_allocator(), 0, ce->args.count);
 		auto names = array_make<String>(temporary_allocator(), 0, ce->args.count);
 

+ 1 - 0
src/check_decl.cpp

@@ -117,6 +117,7 @@ gb_internal void check_init_variables(CheckerContext *ctx, Entity **lhs, isize l
 
 	// NOTE(bill): If there is a bad syntax error, rhs > lhs which would mean there would need to be
 	// an extra allocation
+	TEMPORARY_ALLOCATOR_GUARD();
 	auto operands = array_make<Operand>(temporary_allocator(), 0, 2*lhs_count);
 	check_unpack_arguments(ctx, lhs, lhs_count, &operands, inits, true, false);
 

+ 27 - 10
src/check_expr.cpp

@@ -86,6 +86,7 @@ gb_internal Entity * find_polymorphic_record_entity (CheckerContext *c, Type *or
 gb_internal void     check_not_tuple                (CheckerContext *c, Operand *operand);
 gb_internal void     convert_to_typed               (CheckerContext *c, Operand *operand, Type *target_type);
 gb_internal gbString expr_to_string                 (Ast *expression);
+gb_internal gbString expr_to_string                 (Ast *expression, gbAllocator allocator);
 gb_internal void     update_untyped_expr_type       (CheckerContext *c, Ast *e, Type *type, bool final);
 gb_internal bool     check_is_terminating           (Ast *node, String const &label);
 gb_internal bool     check_has_break                (Ast *stmt, String const &label, bool implicit);
@@ -2404,8 +2405,8 @@ gb_internal void check_comparison(CheckerContext *c, Operand *x, Operand *y, Tok
 			if (x->type == err_type && is_operand_nil(*x)) {
 				err_type = y->type;
 			}
-			gbString type_string = type_to_string(err_type);
-			defer (gb_string_free(type_string));
+			TEMPORARY_ALLOCATOR_GUARD();
+			gbString type_string = type_to_string(err_type, temporary_allocator());
 			err_str = gb_string_make(temporary_allocator(),
 				gb_bprintf("operator '%.*s' not defined for type '%s'", LIT(token_strings[op]), type_string));
 		} else {
@@ -2417,20 +2418,19 @@ gb_internal void check_comparison(CheckerContext *c, Operand *x, Operand *y, Tok
 			add_comparison_procedures_for_fields(c, comparison_type);
 		}
 	} else {
+		TEMPORARY_ALLOCATOR_GUARD();
 		gbString xt, yt;
 		if (x->mode == Addressing_ProcGroup) {
-			xt = gb_string_make(heap_allocator(), "procedure group");
+			xt = gb_string_make(temporary_allocator(), "procedure group");
 		} else {
 			xt = type_to_string(x->type);
 		}
 		if (y->mode == Addressing_ProcGroup) {
-			yt = gb_string_make(heap_allocator(), "procedure group");
+			yt = gb_string_make(temporary_allocator(), "procedure group");
 		} else {
 			yt = type_to_string(y->type);
 		}
 		err_str = gb_string_make(temporary_allocator(), gb_bprintf("mismatched types '%s' and '%s'", xt, yt));
-		gb_string_free(yt);
-		gb_string_free(xt);
 	}
 
 	if (err_str != nullptr) {
@@ -3893,6 +3893,8 @@ gb_internal void convert_to_typed(CheckerContext *c, Operand *operand, Type *tar
 
 	case Type_Union:
 		if (!is_operand_nil(*operand) && !is_operand_undef(*operand)) {
+			TEMPORARY_ALLOCATOR_GUARD();
+
 			isize count = t->Union.variants.count;
 			ValidIndexAndScore *valids = gb_alloc_array(temporary_allocator(), ValidIndexAndScore, count);
 			isize valid_count = 0;
@@ -4057,10 +4059,10 @@ gb_internal bool check_index_value(CheckerContext *c, Type *main_type, bool open
 	    (c->state_flags & StateFlag_no_bounds_check) == 0) {
 		BigInt i = exact_value_to_integer(operand.value).value_integer;
 		if (i.sign && !is_type_enum(index_type) && !is_type_multi_pointer(main_type)) {
+			TEMPORARY_ALLOCATOR_GUARD();
 			String idx_str = big_int_to_string(temporary_allocator(), &i);
-			gbString expr_str = expr_to_string(operand.expr);
+			gbString expr_str = expr_to_string(operand.expr, temporary_allocator());
 			error(operand.expr, "Index '%s' cannot be a negative value, got %.*s", expr_str, LIT(idx_str));
-			gb_string_free(expr_str);
 			if (value) *value = 0;
 			return false;
 		}
@@ -4120,10 +4122,10 @@ gb_internal bool check_index_value(CheckerContext *c, Type *main_type, bool open
 				}
 
 				if (out_of_bounds) {
+					TEMPORARY_ALLOCATOR_GUARD();
 					String idx_str = big_int_to_string(temporary_allocator(), &i);
-					gbString expr_str = expr_to_string(operand.expr);
+					gbString expr_str = expr_to_string(operand.expr, temporary_allocator());
 					error(operand.expr, "Index '%s' is out of bounds range 0..<%lld, got %.*s", expr_str, max_count, LIT(idx_str));
-					gb_string_free(expr_str);
 					return false;
 				}
 
@@ -5446,6 +5448,8 @@ gb_internal CALL_ARGUMENT_CHECKER(check_named_call_arguments) {
 	bool show_error = show_error_mode == CallArgumentMode_ShowErrors;
 	CallArgumentError err = CallArgumentError_None;
 
+	TEMPORARY_ALLOCATOR_GUARD();
+
 	isize param_count = pt->param_count;
 	bool *visited = gb_alloc_array(temporary_allocator(), bool, param_count);
 	auto ordered_operands = array_make<Operand>(temporary_allocator(), param_count);
@@ -6358,6 +6362,8 @@ gb_internal CallArgumentError check_polymorphic_record_type(CheckerContext *c, O
 		ordered_operands = array_make<Operand>(permanent_allocator(), param_count);
 		array_copy(&ordered_operands, operands, 0);
 	} else {
+		TEMPORARY_ALLOCATOR_GUARD();
+
 		bool *visited = gb_alloc_array(temporary_allocator(), bool, param_count);
 
 		// LEAK(bill)
@@ -7146,6 +7152,8 @@ gb_internal bool attempt_implicit_selector_expr(CheckerContext *c, Operand *o, A
 		return true;
 	}
 	if (is_type_union(th)) {
+		TEMPORARY_ALLOCATOR_GUARD();
+
 		Type *union_type = base_type(th);
 		auto operands = array_make<Operand>(temporary_allocator(), 0, union_type->Union.variants.count);
 
@@ -7329,6 +7337,8 @@ gb_internal void add_constant_switch_case(CheckerContext *ctx, SeenMap *seen, Op
 	uintptr key = hash_exact_value(operand.value);
 	TypeAndToken *found = map_get(seen, key);
 	if (found != nullptr) {
+		TEMPORARY_ALLOCATOR_GUARD();
+
 		isize count = multi_map_count(seen, key);
 		TypeAndToken *taps = gb_alloc_array(temporary_allocator(), TypeAndToken, count);
 
@@ -7895,6 +7905,8 @@ gb_internal ExprKind check_compound_literal(CheckerContext *c, Operand *o, Ast *
 		}
 
 		if (cl->elems[0]->kind == Ast_FieldValue) {
+			TEMPORARY_ALLOCATOR_GUARD();
+
 			bool *fields_visited = gb_alloc_array(temporary_allocator(), bool, field_count);
 
 			for (Ast *elem : cl->elems) {
@@ -8423,6 +8435,8 @@ gb_internal ExprKind check_compound_literal(CheckerContext *c, Operand *o, Ast *
 
 		// NOTE(bill): Check for missing cases when `#partial literal` is not present
 		if (cl->elems.count > 0 && !was_error && !is_partial) {
+			TEMPORARY_ALLOCATOR_GUARD();
+
 			Type *et = base_type(index_type);
 			GB_ASSERT(et->kind == Type_Enum);
 			auto fields = et->Enum.fields;
@@ -10469,6 +10483,9 @@ gb_internal gbString write_expr_to_string(gbString str, Ast *node, bool shorthan
 gb_internal gbString expr_to_string(Ast *expression) {
 	return write_expr_to_string(gb_string_make(heap_allocator(), ""), expression, false);
 }
+gb_internal gbString expr_to_string(Ast *expression, gbAllocator allocator) {
+	return write_expr_to_string(gb_string_make(allocator, ""), expression, false);
+}
 gb_internal gbString expr_to_string_shorthand(Ast *expression) {
 	return write_expr_to_string(gb_string_make(heap_allocator(), ""), expression, true);
 }

+ 7 - 0
src/check_stmt.cpp

@@ -1048,6 +1048,8 @@ gb_internal void check_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags
 	}
 
 	if (!is_partial && is_type_enum(x.type)) {
+		TEMPORARY_ALLOCATOR_GUARD();
+
 		Type *et = base_type(x.type);
 		GB_ASSERT(is_type_enum(et));
 		auto fields = et->Enum.fields;
@@ -1280,6 +1282,8 @@ gb_internal void check_type_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_
 	}
 
 	if (!is_partial && is_type_union(type_deref(x.type))) {
+		TEMPORARY_ALLOCATOR_GUARD();
+
 		Type *ut = base_type(type_deref(x.type));
 		GB_ASSERT(is_type_union(ut));
 		auto variants = ut->Union.variants;
@@ -1523,6 +1527,7 @@ gb_internal void check_stmt_internal(CheckerContext *ctx, Ast *node, u32 flags)
 				return;
 			}
 
+			TEMPORARY_ALLOCATOR_GUARD();
 
 			// NOTE(bill): If there is a bad syntax error, rhs > lhs which would mean there would need to be
 			// an extra allocation
@@ -1743,6 +1748,8 @@ gb_internal void check_stmt_internal(CheckerContext *ctx, Ast *node, u32 flags)
 
 
 	case_ast_node(rs, RangeStmt, node);
+		TEMPORARY_ALLOCATOR_GUARD();
+
 		u32 new_flags = mod_flags | Stmt_BreakAllowed | Stmt_ContinueAllowed;
 
 		check_open_scope(ctx, node);

+ 4 - 0
src/common.cpp

@@ -716,6 +716,8 @@ gb_internal LoadedFileError load_file_32(char const *fullpath, LoadedFile *memor
 	
 	if (!copy_file_contents) {
 	#if defined(GB_SYSTEM_WINDOWS)
+		TEMPORARY_ALLOCATOR_GUARD();
+
 		isize w_len = 0;
 		wchar_t *w_str = gb__alloc_utf8_to_ucs2(temporary_allocator(), fullpath, &w_len);
 		if (w_str == nullptr) {
@@ -817,6 +819,8 @@ gb_internal LoadedFileError load_file_32(char const *fullpath, LoadedFile *memor
 #define USE_DAMERAU_LEVENSHTEIN 1
 
 gb_internal isize levenstein_distance_case_insensitive(String const &a, String const &b) {
+	TEMPORARY_ALLOCATOR_GUARD();
+
 	isize w = b.len+1;
 	isize h = a.len+1;
 	isize *matrix = gb_alloc_array(temporary_allocator(), isize, w*h);

+ 64 - 2
src/common_memory.cpp

@@ -48,6 +48,7 @@ struct Arena {
 	MemoryBlock * curr_block;
 	isize         minimum_block_size;
 	BlockingMutex mutex;
+	isize         temp_count;
 };
 
 enum { DEFAULT_MINIMUM_BLOCK_SIZE = 8ll*1024ll*1024ll };
@@ -245,7 +246,66 @@ gb_internal void virtual_memory_dealloc(MemoryBlock *block_to_free) {
 	}
 }
 
+struct ArenaTemp {
+	Arena *      arena;
+	MemoryBlock *block;
+	isize        used;
+};
+
+ArenaTemp arena_temp_begin(Arena *arena) {
+	GB_ASSERT(arena);
+	ArenaTemp temp = {};
+	temp.arena = arena;
+	temp.block = arena->curr_block;
+	if (arena->curr_block != nullptr) {
+		temp.used = arena->curr_block->used;
+	}
+	arena->temp_count += 1;
+	return temp;
+}
 
+void arena_temp_end(ArenaTemp const &temp) {
+	GB_ASSERT(temp.arena);
+	Arena *arena = temp.arena;
+	bool memory_block_found = false;
+	for (MemoryBlock *block = arena->curr_block; block != nullptr; block = block->prev) {
+		if (block == temp.block) {
+			memory_block_found = true;
+			break;
+		}
+	}
+	GB_ASSERT_MSG(memory_block_found, "memory block stored within ArenaTemp not owned by Arena");
+
+	while (arena->curr_block != temp.block) {
+		MemoryBlock *free_block = arena->curr_block;
+		if (free_block != nullptr) {
+			arena->curr_block = free_block->prev;
+			virtual_memory_dealloc(free_block);
+		}
+	}
+
+	MemoryBlock *block = arena->curr_block;
+	if (block) {
+		GB_ASSERT_MSG(block->used >= temp.used, "out of order use of arena_temp_end");
+		isize amount_to_zero = gb_min(block->used - temp.used, block->size - block->used);
+		gb_zero_size(block->base + temp.used, amount_to_zero);
+		block->used = temp.used;
+	}
+
+	GB_ASSERT_MSG(arena->temp_count > 0, "double-use of arena_temp_end");
+	arena->temp_count -= 1;
+}
+
+
+struct ArenaTempGuard {
+	ArenaTempGuard(Arena *arena) {
+		this->temp = arena_temp_begin(arena);
+	}
+	~ArenaTempGuard() {
+		arena_temp_end(this->temp);
+	}
+	ArenaTemp temp;
+};
 
 
 gb_internal GB_ALLOCATOR_PROC(arena_allocator_proc);
@@ -294,11 +354,13 @@ gb_internal gbAllocator permanent_allocator() {
 	return arena_allocator(&permanent_arena);
 }
 
+gb_global gb_thread_local Arena temporary_arena = {nullptr, DEFAULT_MINIMUM_BLOCK_SIZE};
 gb_internal gbAllocator temporary_allocator() {
-	return permanent_allocator();
+	return arena_allocator(&temporary_arena);
 }
 
-
+#define TEMPORARY_ALLOCATOR_GUARD() ArenaTempGuard GB_DEFER_3(_arena_guard_){&temporary_arena}
+#define PERMANENT_ALLOCATOR_GUARD() ArenaTempGuard GB_DEFER_3(_arena_guard_){&permanent_arena}