Selaa lähdekoodia

Add more asan support to the odin runtime and begin sanitizing
allocators

This adds various bindings to the asan runtime which can be used
to poison/unpoison memory handed out by various allocators. This
means we can catch use after free memory bugs when using operations
such as free_all during runtime.

Asan poisoning are added for the follow allocators in mem:
Arena (including temporary arenas)
Scratch
Stack
Small_Stack

Additionally a bug in the stack allocator was fixed to disallow freeing
in the middle of the stack (caught by the asan!).

I plan on adding support for all the allocators in core. This is just
a good starting point and were some of the easiest ones to implement
asan for.

Lucas Perlind 4 kuukautta sitten
vanhempi
commit
ab0b26e876
2 muutettua tiedostoa jossa 353 lisäystä ja 10 poistoa
  1. 311 0
      base/runtime/asan.odin
  2. 42 10
      core/mem/allocators.odin

+ 311 - 0
base/runtime/asan.odin

@@ -0,0 +1,311 @@
+#+no-instrumentation
+package runtime
+
+Asan_Death_Callback :: #type proc "c" (pc: rawptr, bp: rawptr, sp: rawptr, addr: rawptr, is_write: i32, access_size: uint)
+
+@(private="file")
+ASAN_ENABLED :: .Address in ODIN_SANITIZER_FLAGS
+
+@(private="file")
+@(default_calling_convention="system")
+foreign {
+	__asan_poison_memory_region      :: proc(address: rawptr, size: uint) ---
+	__asan_unpoison_memory_region    :: proc(address: rawptr, size: uint) ---
+	__sanitizer_set_death_callback   :: proc(callback: Asan_Death_Callback) ---
+	__asan_region_is_poisoned        :: proc(begin: rawptr, size: uint) -> rawptr ---
+	__asan_address_is_poisoned       :: proc(addr: rawptr) -> i32 ---
+	__asan_describe_address          :: proc(addr: rawptr) ---
+	__asan_report_present            :: proc() -> i32 ---
+	__asan_get_report_pc             :: proc() -> rawptr ---
+	__asan_get_report_bp             :: proc() -> rawptr ---
+	__asan_get_report_sp             :: proc() -> rawptr ---
+	__asan_get_report_address        :: proc() -> rawptr ---
+	__asan_get_report_access_type    :: proc() -> i32 ---
+	__asan_get_report_access_size    :: proc() -> uint ---
+	__asan_get_report_description    :: proc() -> cstring ---
+	__asan_locate_address            :: proc(addr: rawptr, name: rawptr, name_size: uint, region_address: ^rawptr, region_size: ^uint) -> cstring ---
+	__asan_get_alloc_stack           :: proc(addr: rawptr, trace: rawptr, size: uint, thread_id: ^i32) -> uint ---
+	__asan_get_free_stack            :: proc(addr: rawptr, trace: rawptr, size: uint, thread_id: ^i32) -> uint ---
+	__asan_get_shadow_mapping        :: proc(shadow_scale: ^uint, shadow_offset: ^uint) ---
+	__asan_print_accumulated_stats   :: proc() ---
+	__asan_get_current_fake_stack    :: proc() -> rawptr ---
+	__asan_addr_is_in_fake_stack     :: proc(fake_stack: rawptr, addr: rawptr, beg: ^rawptr, end: ^rawptr) -> rawptr ---
+	__asan_handle_no_return          :: proc() ---
+	__asan_update_allocation_context :: proc(addr: rawptr) -> i32 ---
+}
+
+Asan_Access_Type :: enum {
+	none,
+	read,
+	write,
+}
+
+Asan_Located_Address_String :: struct {
+	category: string,
+	name: string,
+}
+
+Asan_Shadow_Mapping :: struct {
+	scale, offset: uint
+}
+
+asan_poison_slice :: proc(region: $T/[]$E) {
+	when ASAN_ENABLED {
+		__asan_poison_memory_region(raw_data(region), size_of(E) * len(region))
+	}
+}
+
+asan_unpoison_slice :: proc(region: $T/[]$E) {
+	when ASAN_ENABLED {
+		__asan_unpoison_memory_region(raw_data(region), size_of(E) * len(region))
+	}
+}
+
+asan_poison_ptr :: proc(ptr: ^$T) {
+	when ASAN_ENABLED {
+		__asan_poison_memory_region(ptr, size_of(T))
+	}
+}
+
+asan_unpoison_ptr :: proc(ptr: ^$T) {
+	when ASAN_ENABLED {
+		__asan_unpoison_memory_region(ptr, size_of(T))
+	}
+}
+
+asan_poison_rawptr :: proc(ptr: rawptr, len: int) {
+	when ASAN_ENABLED {
+		assert(len >= 0)
+		__asan_poison_memory_region(ptr, uint(len))
+	}
+}
+
+asan_unpoison_rawptr :: proc(ptr: rawptr, len: int) {
+	when ASAN_ENABLED {
+		assert(len >= 0)
+		__asan_unpoison_memory_region(ptr, uint(len))
+	}
+}
+
+asan_poison :: proc {
+	asan_poison_slice,
+	asan_poison_ptr,
+	asan_poison_rawptr,
+}
+
+asan_unpoison :: proc {
+	asan_unpoison_slice,
+	asan_unpoison_ptr,
+	asan_unpoison_rawptr,
+}
+
+asan_set_death_callback :: proc(callback: Asan_Death_Callback) {
+	when ASAN_ENABLED {
+		__sanitizer_set_death_callback(callback)
+	}
+}
+
+asan_region_is_poisoned_slice :: proc(region: []$T/$E) -> rawptr {
+	when ASAN_ENABLED {
+		return __asan_region_is_poisoned(raw_data(region), size_of(E) * len(region))
+	} else {
+		return nil
+	}
+}
+
+asan_region_is_poisoned_ptr :: proc(ptr: ^$T) -> rawptr {
+	when ASAN_ENABLED {
+		return __asan_region_is_poisoned(ptr, size_of(T))
+	} else {
+		return nil
+	}
+}
+
+asan_region_is_poisoned_rawptr :: proc(region: rawptr, len: int) -> rawptr {
+	when ASAN_ENABLED {
+		assert(len >= 0)
+		return __asan_region_is_poisoned(region, uint(len))
+	} else {
+		return nil
+	}
+}
+
+asan_region_is_poisoned :: proc {
+	asan_region_is_poisoned_slice,
+	asan_region_is_poisoned_ptr,
+	asan_region_is_poisoned_rawptr,
+}
+
+asan_address_is_poisoned :: proc(address: rawptr) -> bool {
+	when ASAN_ENABLED {
+		return __asan_address_is_poisoned(address) != 0
+	} else {
+		return false
+	}
+}
+
+asan_describe_address :: proc(address: rawptr) {
+	when ASAN_ENABLED {
+		__asan_describe_address(address)
+	}
+}
+
+asan_report_present :: proc() -> bool {
+	when ASAN_ENABLED {
+		return __asan_report_present() != 0
+	} else {
+		return false
+	}
+}
+
+asan_get_report_pc :: proc() -> rawptr {
+	when ASAN_ENABLED {
+		return __asan_get_report_pc()
+	} else {
+		return nil
+	}
+}
+
+asan_get_report_bp :: proc() -> rawptr {
+	when ASAN_ENABLED {
+		return __asan_get_report_bp()
+	} else {
+		return nil
+	}
+}
+
+asan_get_report_sp :: proc() -> rawptr {
+	when ASAN_ENABLED {
+		return __asan_get_report_sp()
+	} else {
+		return nil
+	}
+}
+
+asan_get_report_address :: proc() -> rawptr {
+	when ASAN_ENABLED {
+		return __asan_get_report_address()
+	} else {
+		return nil
+	}
+}
+
+asan_get_report_access_type :: proc() -> Asan_Access_Type {
+	when ASAN_ENABLED {
+		return __asan_get_report_access_type() == 0 ? .read : .write
+	} else {
+		return .none
+	}
+}
+
+asan_get_report_access_size :: proc() -> uint {
+	when ASAN_ENABLED {
+		return __asan_get_report_access_size()
+	} else {
+		return 0
+	}
+}
+
+asan_get_report_description :: proc() -> string {
+	when ASAN_ENABLED {
+		return string(__asan_get_report_description())
+	} else {
+		return "unknown"
+	}
+}
+
+asan_locate_address :: proc(addr: rawptr, allocator: Allocator, string_alloc_size := 64) -> (Asan_Located_Address_String, []byte, Allocator_Error) {
+	when ASAN_ENABLED {
+		data, err := make([]byte, string_alloc_size, allocator)
+		if err != nil {
+			return { "", "" }, {}, err
+		}
+		out_addr: rawptr
+		out_size: uint
+		str := __asan_locate_address(addr, raw_data(data), len(data), &out_addr, &out_size)
+		return { string(str), string(cstring(raw_data(data))) }, (cast([^]byte)out_addr)[:out_size], nil
+	} else {
+		return { "", "" }, {}, nil
+	}
+}
+
+asan_get_alloc_stack_trace :: proc(addr: rawptr, allocator: Allocator, stack_alloc_size := 32) -> ([]rawptr, int, Allocator_Error) {
+	when ASAN_ENABLED {
+		data, err := make([]rawptr, stack_alloc_size, allocator)
+		if err != nil {
+			return {}, 0, err
+		}
+		out_thread: i32
+		__asan_get_alloc_stack(addr, raw_data(data), len(data), &out_thread)
+		return data, int(out_thread), nil
+	} else {
+		return {}, 0, nil
+	}
+}
+
+asan_get_free_stack_trace :: proc(addr: rawptr, allocator: Allocator, stack_alloc_size := 32) -> ([]rawptr, int, Allocator_Error) {
+	when ASAN_ENABLED {
+		data, err := make([]rawptr, stack_alloc_size, allocator)
+		if err != nil {
+			return {}, 0, err
+		}
+		out_thread: i32
+		__asan_get_free_stack(addr, raw_data(data), len(data), &out_thread)
+		return data, int(out_thread), nil
+	} else {
+		return {}, 0, nil
+	}
+}
+
+asan_get_shadow_mapping :: proc() -> Asan_Shadow_Mapping {
+	when ASAN_ENABLED {
+		result: Asan_Shadow_Mapping
+		__asan_get_shadow_mapping(&result.scale, &result.offset)
+		return result
+	} else {
+		return {}
+	}
+}
+
+asan_print_accumulated_stats :: proc() {
+	when ASAN_ENABLED {
+		__asan_print_accumulated_stats()
+	}
+}
+
+asan_get_current_fake_stack :: proc() -> rawptr {
+	when ASAN_ENABLED {
+		return __asan_get_current_fake_stack()
+	} else {
+		return nil
+	}
+}
+
+asan_is_in_fake_stack :: proc(fake_stack: rawptr, addr: rawptr) -> ([]byte, bool) {
+	when ASAN_ENABLED {
+		begin: rawptr
+		end: rawptr
+		addr := __asan_addr_is_in_fake_stack(fake_stack, addr, &begin, &end)
+		if addr == nil {
+			return {}, false
+		}
+		return ((cast([^]byte)begin)[:uintptr(end)-uintptr(begin)]), true
+	} else {
+		return {}, false
+	}
+}
+
+asan_handle_no_return :: proc() {
+	when ASAN_ENABLED {
+		__asan_handle_no_return()
+	}
+}
+
+asan_update_allocation_context :: proc(addr: rawptr) -> bool {
+	when ASAN_ENABLED {
+		return __asan_update_allocation_context(addr) != 0
+	} else {
+		return false
+	}
+}
+

+ 42 - 10
core/mem/allocators.odin

@@ -138,6 +138,7 @@ arena_init :: proc(a: ^Arena, data: []byte) {
 	a.offset     = 0
 	a.peak_used  = 0
 	a.temp_count = 0
+	runtime.asan_poison(a.data)
 }
 
 /*
@@ -224,7 +225,9 @@ arena_alloc_bytes_non_zeroed :: proc(
 	}
 	a.offset += total_size
 	a.peak_used = max(a.peak_used, a.offset)
-	return byte_slice(ptr, size), nil
+	result := byte_slice(ptr, size)
+	runtime.asan_unpoison(result)
+	return result, nil
 }
 
 /*
@@ -232,6 +235,7 @@ Free all memory to an arena.
 */
 arena_free_all :: proc(a: ^Arena) {
 	a.offset = 0
+	runtime.asan_poison(a.data)
 }
 
 arena_allocator_proc :: proc(
@@ -309,6 +313,7 @@ allocations *inside* the temporary memory region will be freed to the arena.
 end_arena_temp_memory :: proc(tmp: Arena_Temp_Memory) {
 	assert(tmp.arena.offset >= tmp.prev_offset)
 	assert(tmp.arena.temp_count > 0)
+	runtime.asan_poison(tmp.arena.data[tmp.prev_offset:tmp.arena.offset])
 	tmp.arena.offset = tmp.prev_offset
 	tmp.arena.temp_count -= 1
 }
@@ -363,6 +368,7 @@ scratch_init :: proc(s: ^Scratch, size: int, backup_allocator := context.allocat
 	s.prev_allocation = nil
 	s.backup_allocator = backup_allocator
 	s.leaked_allocations.allocator = backup_allocator
+	runtime.asan_poison(s.data)
 	return nil
 }
 
@@ -377,6 +383,7 @@ scratch_destroy :: proc(s: ^Scratch) {
 		free_bytes(ptr, s.backup_allocator)
 	}
 	delete(s.leaked_allocations)
+	runtime.asan_unpoison(s.data)
 	delete(s.data, s.backup_allocator)
 	s^ = {}
 }
@@ -472,7 +479,9 @@ scratch_alloc_bytes_non_zeroed :: proc(
 		ptr   := align_forward_uintptr(offset+start, uintptr(alignment))
 		s.prev_allocation = rawptr(ptr)
 		s.curr_offset = int(offset) + size
-		return byte_slice(rawptr(ptr), size), nil
+		result := byte_slice(rawptr(ptr), size)
+		runtime.asan_unpoison(result)
+		return result, nil
 	} else {
 		a := s.backup_allocator
 		if a.procedure == nil {
@@ -516,6 +525,7 @@ scratch_free :: proc(s: ^Scratch, ptr: rawptr, loc := #caller_location) -> Alloc
 	old_ptr := uintptr(ptr)
 	if s.prev_allocation == ptr {
 		s.curr_offset = int(uintptr(s.prev_allocation) - start)
+		runtime.asan_poison(s.data[s.curr_offset:])
 		s.prev_allocation = nil
 		return nil
 	}
@@ -546,6 +556,7 @@ scratch_free_all :: proc(s: ^Scratch, loc := #caller_location) {
 		free_bytes(ptr, s.backup_allocator, loc)
 	}
 	clear(&s.leaked_allocations)
+	runtime.asan_poison(s.data)
 }
 
 /*
@@ -675,7 +686,9 @@ scratch_resize_bytes_non_zeroed :: proc(
 	old_ptr := uintptr(old_memory)
 	if begin <= old_ptr && old_ptr < end && old_ptr+uintptr(size) < end {
 		s.curr_offset = int(old_ptr-begin)+size
-		return byte_slice(old_memory, size), nil
+		result := byte_slice(old_memory, size)
+		runtime.asan_unpoison(result)
+		return result, nil
 	}
 	data, err := scratch_alloc_bytes_non_zeroed(s, size, alignment, loc)
 	if err != nil {
@@ -776,6 +789,7 @@ stack_init :: proc(s: ^Stack, data: []byte) {
 	s.prev_offset = 0
 	s.curr_offset = 0
 	s.peak_used   = 0
+	runtime.asan_poison(data)
 }
 
 /*
@@ -861,15 +875,19 @@ stack_alloc_bytes_non_zeroed :: proc(
 	if s.curr_offset + padding + size > len(s.data) {
 		return nil, .Out_Of_Memory
 	}
+	old_offset := s.prev_offset
 	s.prev_offset = s.curr_offset
 	s.curr_offset += padding
 	next_addr := curr_addr + uintptr(padding)
 	header := (^Stack_Allocation_Header)(next_addr - size_of(Stack_Allocation_Header))
+	runtime.asan_unpoison(header)
 	header.padding = padding
-	header.prev_offset = s.prev_offset
+	header.prev_offset = old_offset
 	s.curr_offset += size
 	s.peak_used = max(s.peak_used, s.curr_offset)
-	return byte_slice(rawptr(next_addr), size), nil
+	result := byte_slice(rawptr(next_addr), size)
+	runtime.asan_unpoison(result)
+	return result, nil
 }
 
 /*
@@ -902,12 +920,15 @@ stack_free :: proc(
 	}
 	header := (^Stack_Allocation_Header)(curr_addr - size_of(Stack_Allocation_Header))
 	old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)))
-	if old_offset != header.prev_offset {
+	if old_offset != s.prev_offset {
 		// panic("Out of order stack allocator free");
 		return .Invalid_Pointer
 	}
-	s.curr_offset = old_offset
+
 	s.prev_offset = header.prev_offset
+	runtime.asan_poison(s.data[old_offset:s.curr_offset])
+	s.curr_offset = old_offset
+
 	return nil
 }
 
@@ -917,6 +938,7 @@ Free all allocations to the stack.
 stack_free_all :: proc(s: ^Stack, loc := #caller_location) {
 	s.prev_offset = 0
 	s.curr_offset = 0
+	runtime.asan_poison(s.data)
 }
 
 /*
@@ -1076,7 +1098,9 @@ stack_resize_bytes_non_zeroed :: proc(
 	if diff > 0 {
 		zero(rawptr(curr_addr + uintptr(diff)), diff)
 	}
-	return byte_slice(old_memory, size), nil
+	result := byte_slice(old_memory, size)
+	runtime.asan_unpoison(result)
+	return result, nil
 }
 
 stack_allocator_proc :: proc(
@@ -1144,6 +1168,7 @@ small_stack_init :: proc(s: ^Small_Stack, data: []byte) {
 	s.data      = data
 	s.offset    = 0
 	s.peak_used = 0
+	runtime.asan_poison(data)
 }
 
 /*
@@ -1252,10 +1277,13 @@ small_stack_alloc_bytes_non_zeroed :: proc(
 	s.offset += padding
 	next_addr := curr_addr + uintptr(padding)
 	header := (^Small_Stack_Allocation_Header)(next_addr - size_of(Small_Stack_Allocation_Header))
+	runtime.asan_unpoison(header)
 	header.padding = auto_cast padding
 	s.offset += size
 	s.peak_used = max(s.peak_used, s.offset)
-	return byte_slice(rawptr(next_addr), size), nil
+	result := byte_slice(rawptr(next_addr), size)
+	runtime.asan_unpoison(result)
+	return result, nil
 }
 
 /*
@@ -1289,6 +1317,7 @@ small_stack_free :: proc(
 	}
 	header := (^Small_Stack_Allocation_Header)(curr_addr - size_of(Small_Stack_Allocation_Header))
 	old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)))
+	runtime.asan_poison(s.data[old_offset:s.offset])
 	s.offset = old_offset
 	return nil
 }
@@ -1298,6 +1327,7 @@ Free all memory to small stack.
 */
 small_stack_free_all :: proc(s: ^Small_Stack) {
 	s.offset = 0
+	runtime.asan_poison(s.data)
 }
 
 /*
@@ -1442,7 +1472,9 @@ small_stack_resize_bytes_non_zeroed :: proc(
 		return nil, nil
 	}
 	if old_size == size {
-		return byte_slice(old_memory, size), nil
+		result := byte_slice(old_memory, size)
+		runtime.asan_unpoison(result)
+		return result, nil
 	}
 	data, err := small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
 	if err == nil {