Browse Source

Improve the `Allocator` interface to support returning `Allocator_Error` to allow for safer calls

Virtually all code (except for user-written custom allocators) should work as normal. Extra features will need to be added to make the current procedures support the `Allocator_Error` return value (akin to #optional_ok)
gingerBill 4 years ago
parent
commit
f98c4d6837

+ 93 - 13
core/mem/alloc.odin

@@ -28,11 +28,19 @@ Allocator_Query_Info :: struct {
 }
 */
 
+Allocator_Error :: runtime.Allocator_Error;
+/*
+Allocator_Error :: enum byte {
+	None            = 0,
+	Out_Of_Memory   = 1,
+	Invalid_Pointer = 2,
+}
+*/
 Allocator_Proc :: runtime.Allocator_Proc;
 /*
 Allocator_Proc :: #type proc(allocator_data: rawptr, mode: Allocator_Mode,
                              size, alignment: int,
-                             old_memory: rawptr, old_size: int, flags: u64 = 0, location := #caller_location) -> rawptr;
+                             old_memory: rawptr, old_size: int, location: Source_Code_Location = #caller_location) -> ([]byte, Allocator_Error);
 */
 
 Allocator :: runtime.Allocator;
@@ -52,23 +60,49 @@ alloc :: proc(size: int, alignment: int = DEFAULT_ALIGNMENT, allocator := contex
 	if allocator.procedure == nil {
 		return nil;
 	}
-	return allocator.procedure(allocator.data, Allocator_Mode.Alloc, size, alignment, nil, 0, 0, loc);
+	data, err := allocator.procedure(allocator.data, Allocator_Mode.Alloc, size, alignment, nil, 0, loc);
+	_ = err;
+	return raw_data(data);
+}
+
+alloc_bytes :: proc(size: int, alignment: int = DEFAULT_ALIGNMENT, allocator := context.allocator, loc := #caller_location) -> ([]byte, Allocator_Error) {
+	if size == 0 {
+		return nil, nil;
+	}
+	if allocator.procedure == nil {
+		return nil, nil;
+	}
+	return allocator.procedure(allocator.data, Allocator_Mode.Alloc, size, alignment, nil, 0, loc);
 }
 
-free :: proc(ptr: rawptr, allocator := context.allocator, loc := #caller_location) {
+free :: proc(ptr: rawptr, allocator := context.allocator, loc := #caller_location) -> Allocator_Error {
 	if ptr == nil {
-		return;
+		return nil;
 	}
 	if allocator.procedure == nil {
-		return;
+		return nil;
+	}
+	_, err := allocator.procedure(allocator.data, Allocator_Mode.Free, 0, 0, ptr, 0, loc);
+	return err;
+}
+
+free_bytes :: proc(bytes: []byte, allocator := context.allocator, loc := #caller_location) -> Allocator_Error {
+	if bytes == nil {
+		return nil;
 	}
-	allocator.procedure(allocator.data, Allocator_Mode.Free, 0, 0, ptr, 0, 0, loc);
+	if allocator.procedure == nil {
+		return nil;
+	}
+	_, err := allocator.procedure(allocator.data, Allocator_Mode.Free, 0, 0, raw_data(bytes), len(bytes), loc);
+	return err;
 }
 
-free_all :: proc(allocator := context.allocator, loc := #caller_location) {
+free_all :: proc(allocator := context.allocator, loc := #caller_location) -> Allocator_Error {
 	if allocator.procedure != nil {
-		allocator.procedure(allocator.data, Allocator_Mode.Free_All, 0, 0, nil, 0, 0, loc);
+		_, err := allocator.procedure(allocator.data, Allocator_Mode.Free_All, 0, 0, nil, 0, loc);
+		return err;
 	}
+	return nil;
 }
 
 resize :: proc(ptr: rawptr, old_size, new_size: int, alignment: int = DEFAULT_ALIGNMENT, allocator := context.allocator, loc := #caller_location) -> rawptr {
@@ -77,18 +111,40 @@ resize :: proc(ptr: rawptr, old_size, new_size: int, alignment: int = DEFAULT_AL
 	}
 	if new_size == 0 {
 		if ptr != nil {
-			allocator.procedure(allocator.data, Allocator_Mode.Free, 0, 0, ptr, 0, 0, loc);
+			allocator.procedure(allocator.data, Allocator_Mode.Free, 0, 0, ptr, old_size, loc);
 		}
 		return nil;
 	} else if ptr == nil {
-		return allocator.procedure(allocator.data, Allocator_Mode.Alloc, new_size, alignment, nil, 0, 0, loc);
+		_, err := allocator.procedure(allocator.data, Allocator_Mode.Alloc, new_size, alignment, nil, 0, loc);
+		_ = err;
+		return nil;
+	}
+	data, err := allocator.procedure(allocator.data, Allocator_Mode.Resize, new_size, alignment, ptr, old_size, loc);
+	_ = err;
+	return raw_data(data);
+}
+
+resize_bytes :: proc(old_data: []byte, new_size: int, alignment: int = DEFAULT_ALIGNMENT, allocator := context.allocator, loc := #caller_location) -> ([]byte, Allocator_Error) {
+	if allocator.procedure == nil {
+		return nil, nil;
+	}
+	ptr := raw_data(old_data);
+	old_size := len(old_data);
+	if new_size == 0 {
+		if ptr != nil {
+			_, err := allocator.procedure(allocator.data, Allocator_Mode.Free, 0, 0, ptr, old_size, loc);
+			return nil, err;
+		}
+		return nil, nil;
+	} else if ptr == nil {
+		return allocator.procedure(allocator.data, Allocator_Mode.Alloc, new_size, alignment, nil, 0, loc);
 	}
-	return allocator.procedure(allocator.data, Allocator_Mode.Resize, new_size, alignment, ptr, old_size, 0, loc);
+	return allocator.procedure(allocator.data, Allocator_Mode.Resize, new_size, alignment, ptr, old_size, loc);
 }
 
 query_features :: proc(allocator: Allocator, loc := #caller_location) -> (set: Allocator_Mode_Set) {
 	if allocator.procedure != nil {
-		allocator.procedure(allocator.data, Allocator_Mode.Query_Features, 0, 0, &set, 0, 0, loc);
+		allocator.procedure(allocator.data, Allocator_Mode.Query_Features, 0, 0, &set, 0, loc);
 		return set;
 	}
 	return nil;
@@ -97,7 +153,7 @@ query_features :: proc(allocator: Allocator, loc := #caller_location) -> (set: A
 query_info :: proc(pointer: rawptr, allocator: Allocator, loc := #caller_location) -> (props: Allocator_Query_Info) {
 	props.pointer = pointer;
 	if allocator.procedure != nil {
-		allocator.procedure(allocator.data, Allocator_Mode.Query_Info, 0, 0, &props, 0, 0, loc);
+		allocator.procedure(allocator.data, Allocator_Mode.Query_Info, 0, 0, &props, 0, loc);
 	}
 	return;
 }
@@ -218,4 +274,28 @@ default_resize_align :: proc(old_memory: rawptr, old_size, new_size, alignment:
 	free(old_memory, allocator, loc);
 	return new_memory;
 }
+default_resize_bytes_align :: proc(old_data: []byte, new_size, alignment: int, allocator := context.allocator, loc := #caller_location) -> ([]byte, Allocator_Error) {
+	old_memory := raw_data(old_data);
+	old_size := len(old_data);
+	if old_memory == nil {
+		return alloc_bytes(new_size, alignment, allocator, loc);
+	}
+
+	if new_size == 0 {
+		err := free_bytes(old_data, allocator, loc);
+		return nil, err;
+	}
+
+	if new_size == old_size {
+		return old_data, .None;
+	}
 
+	new_memory, err := alloc_bytes(new_size, alignment, allocator, loc);
+	if new_memory == nil || err != nil {
+		return nil, err;
+	}
+
+	runtime.copy(new_memory, old_data);
+	free_bytes(old_data, allocator, loc);
+	return new_memory, err;
+}

+ 154 - 174
core/mem/allocators.odin

@@ -5,8 +5,8 @@ import "core:runtime"
 
 nil_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
                            size, alignment: int,
-                           old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
-	return nil;
+                           old_memory: rawptr, old_size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
+	return nil, nil;
 }
 
 nil_allocator :: proc() -> Allocator {
@@ -47,7 +47,7 @@ arena_allocator :: proc(arena: ^Arena) -> Allocator {
 
 arena_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
                              size, alignment: int,
-                             old_memory: rawptr, old_size: int, flags: u64, location := #caller_location) -> rawptr {
+                             old_memory: rawptr, old_size: int, location := #caller_location) -> ([]byte, Allocator_Error)  {
 	arena := cast(^Arena)allocator_data;
 
 	switch mode {
@@ -55,7 +55,7 @@ arena_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		total_size := size + alignment;
 
 		if arena.offset + total_size > len(arena.data) {
-			return nil;
+			return nil, .Out_Of_Memory;
 		}
 
 		#no_bounds_check end := &arena.data[arena.offset];
@@ -63,7 +63,8 @@ arena_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		ptr := align_forward(end, uintptr(alignment));
 		arena.offset += total_size;
 		arena.peak_used = max(arena.peak_used, arena.offset);
-		return zero(ptr, size);
+		zero(ptr, size);
+		return byte_slice(ptr, size), nil;
 
 	case .Free:
 		// NOTE(bill): Free all at once
@@ -73,20 +74,20 @@ arena_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		arena.offset = 0;
 
 	case .Resize:
-		return default_resize_align(old_memory, old_size, size, alignment, arena_allocator(arena));
+		return default_resize_bytes_align(byte_slice(old_memory, old_size), size, alignment, arena_allocator(arena));
 
 	case .Query_Features:
 		set := (^Allocator_Mode_Set)(old_memory);
 		if set != nil {
 			set^ = {.Alloc, .Free_All, .Resize, .Query_Features};
 		}
-		return set;
+		return nil, nil;
 
 	case .Query_Info:
-		return nil;
+		return nil, nil;
 	}
 
-	return nil;
+	return nil, nil;
 }
 
 begin_arena_temp_memory :: proc(a: ^Arena) -> Arena_Temp_Memory {
@@ -109,9 +110,9 @@ end_arena_temp_memory :: proc(using tmp: Arena_Temp_Memory) {
 Scratch_Allocator :: struct {
 	data:               []byte,
 	curr_offset:        int,
-	prev_allocation:   rawptr,
+	prev_allocation:    rawptr,
 	backup_allocator:   Allocator,
-	leaked_allocations: [dynamic]rawptr,
+	leaked_allocations: [dynamic][]byte,
 }
 
 scratch_allocator_init :: proc(s: ^Scratch_Allocator, size: int, backup_allocator := context.allocator) {
@@ -127,7 +128,7 @@ scratch_allocator_destroy :: proc(s: ^Scratch_Allocator) {
 		return;
 	}
 	for ptr in s.leaked_allocations {
-		free(ptr, s.backup_allocator);
+		free_bytes(ptr, s.backup_allocator);
 	}
 	delete(s.leaked_allocations);
 	delete(s.data, s.backup_allocator);
@@ -136,7 +137,7 @@ scratch_allocator_destroy :: proc(s: ^Scratch_Allocator) {
 
 scratch_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
                                size, alignment: int,
-                               old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
+                               old_memory: rawptr, old_size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
 
 	s := (^Scratch_Allocator)(allocator_data);
 
@@ -165,7 +166,7 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 			s.prev_allocation = rawptr(ptr);
 			offset := int(ptr - start);
 			s.curr_offset = offset + size;
-			return rawptr(ptr);
+			return byte_slice(rawptr(ptr), size), nil;
 
 		case size <= len(s.data):
 			start := uintptr(raw_data(s.data));
@@ -175,7 +176,7 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 			s.prev_allocation = rawptr(ptr);
 			offset := int(ptr - start);
 			s.curr_offset = offset + size;
-			return rawptr(ptr);
+			return byte_slice(rawptr(ptr), size), nil;
 		}
 		a := s.backup_allocator;
 		if a.procedure == nil {
@@ -183,9 +184,12 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 			s.backup_allocator = a;
 		}
 
-		ptr := alloc(size, alignment, a, loc);
+		ptr, err := alloc_bytes(size, alignment, a, loc);
+		if err != nil {
+			return ptr, err;
+		}
 		if s.leaked_allocations == nil {
-			s.leaked_allocations = make([dynamic]rawptr, a);
+			s.leaked_allocations = make([dynamic][]byte, a);
 		}
 		append(&s.leaked_allocations, ptr);
 
@@ -195,7 +199,7 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 			}
 		}
 
-		return ptr;
+		return ptr, err;
 
 	case .Free:
 		start := uintptr(raw_data(s.data));
@@ -205,30 +209,32 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		if s.prev_allocation == old_memory {
 			s.curr_offset = int(uintptr(s.prev_allocation) - start);
 			s.prev_allocation = nil;
-			return nil;
+			return nil, nil;
 		}
 
 		if start <= old_ptr && old_ptr < end {
 			// NOTE(bill): Cannot free this pointer but it is valid
-			return nil;
+			return nil, nil;
 		}
 
 		if len(s.leaked_allocations) != 0 {
-			for ptr, i in s.leaked_allocations {
+			for data, i in s.leaked_allocations {
+				ptr := raw_data(data);
 				if ptr == old_memory {
-					free(ptr, s.backup_allocator);
+					free_bytes(data, s.backup_allocator);
 					ordered_remove(&s.leaked_allocations, i);
-					return nil;
+					return nil, nil;
 				}
 			}
 		}
-		panic("invalid pointer passed to default_temp_allocator");
+		return nil, .Invalid_Pointer;
+		// panic("invalid pointer passed to default_temp_allocator");
 
 	case .Free_All:
 		s.curr_offset = 0;
 		s.prev_allocation = nil;
 		for ptr in s.leaked_allocations {
-			free(ptr, s.backup_allocator);
+			free_bytes(ptr, s.backup_allocator);
 		}
 		clear(&s.leaked_allocations);
 
@@ -238,26 +244,28 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		old_ptr := uintptr(old_memory);
 		if begin <= old_ptr && old_ptr < end && old_ptr+uintptr(size) < end {
 			s.curr_offset = int(old_ptr-begin)+size;
-			return old_memory;
+			return byte_slice(old_memory, size), nil;
 		}
-		ptr := scratch_allocator_proc(allocator_data, .Alloc, size, alignment, old_memory, old_size, flags, loc);
-		copy(ptr, old_memory, old_size);
-		scratch_allocator_proc(allocator_data, .Free, 0, alignment, old_memory, old_size, flags, loc);
-		return ptr;
+		data, err := scratch_allocator_proc(allocator_data, .Alloc, size, alignment, old_memory, old_size, loc);
+		if err != nil {
+			return data, err;
+		}
+		runtime.copy(data, byte_slice(old_memory, old_size));
+		_, err = scratch_allocator_proc(allocator_data, .Free, 0, alignment, old_memory, old_size, loc);
+		return data, err;
 
 	case .Query_Features:
 		set := (^Allocator_Mode_Set)(old_memory);
 		if set != nil {
 			set^ = {.Alloc, .Free, .Free_All, .Resize, .Query_Features};
 		}
-		return set;
+		return nil, nil;
 
 	case .Query_Info:
-		return nil;
+		return nil, nil;
 	}
 
-
-	return nil;
+	return nil, nil;
 }
 
 scratch_allocator :: proc(allocator: ^Scratch_Allocator) -> Allocator {
@@ -301,18 +309,18 @@ stack_allocator :: proc(stack: ^Stack) -> Allocator {
 
 stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
                              size, alignment: int,
-                             old_memory: rawptr, old_size: int, flags: u64, location := #caller_location) -> rawptr {
+                             old_memory: rawptr, old_size: int, location := #caller_location) -> ([]byte, Allocator_Error) {
 	s := cast(^Stack)allocator_data;
 
 	if s.data == nil {
-		return nil;
+		return nil, .Out_Of_Memory;
 	}
 
-	raw_alloc :: proc(s: ^Stack, size, alignment: int) -> rawptr {
+	raw_alloc :: proc(s: ^Stack, size, alignment: int) -> ([]byte, Allocator_Error) {
 		curr_addr := uintptr(raw_data(s.data)) + uintptr(s.curr_offset);
 		padding := calc_padding_with_header(curr_addr, uintptr(alignment), size_of(Stack_Allocation_Header));
 		if s.curr_offset + padding + size > len(s.data) {
-			return nil;
+			return nil, .Out_Of_Memory;
 		}
 		s.prev_offset = s.curr_offset;
 		s.curr_offset += padding;
@@ -326,7 +334,8 @@ stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 
 		s.peak_used = max(s.peak_used, s.curr_offset);
 
-		return zero(rawptr(next_addr), size);
+		zero(rawptr(next_addr), size);
+		return byte_slice(rawptr(next_addr), size), nil;
 	}
 
 	switch mode {
@@ -334,7 +343,7 @@ stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		return raw_alloc(s, size, alignment);
 	case .Free:
 		if old_memory == nil {
-			return nil;
+			return nil, nil;
 		}
 		start := uintptr(raw_data(s.data));
 		end := start + uintptr(len(s.data));
@@ -346,20 +355,20 @@ stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 
 		if curr_addr >= start+uintptr(s.curr_offset) {
 			// NOTE(bill): Allow double frees
-			return nil;
+			return nil, nil;
 		}
 
 		header := (^Stack_Allocation_Header)(curr_addr - size_of(Stack_Allocation_Header));
 		old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)));
 
 		if old_offset != header.prev_offset {
-			panic("Out of order stack allocator free");
+			// panic("Out of order stack allocator free");
+			return nil, .Invalid_Pointer;
 		}
 
 		s.curr_offset = old_offset;
 		s.prev_offset = header.prev_offset;
 
-
 	case .Free_All:
 		s.prev_offset = 0;
 		s.curr_offset = 0;
@@ -369,7 +378,7 @@ stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 			return raw_alloc(s, size, alignment);
 		}
 		if size == 0 {
-			return nil;
+			return nil, nil;
 		}
 
 		start := uintptr(raw_data(s.data));
@@ -381,20 +390,22 @@ stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 
 		if curr_addr >= start+uintptr(s.curr_offset) {
 			// NOTE(bill): Allow double frees
-			return nil;
+			return nil, nil;
 		}
 
 		if old_size == size {
-			return old_memory;
+			return byte_slice(old_memory, size), nil;
 		}
 
 		header := (^Stack_Allocation_Header)(curr_addr - size_of(Stack_Allocation_Header));
 		old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)));
 
 		if old_offset != header.prev_offset {
-			ptr := raw_alloc(s, size, alignment);
-			copy(ptr, old_memory, min(old_size, size));
-			return ptr;
+			data, err := raw_alloc(s, size, alignment);
+			if err == nil {
+				runtime.copy(data, byte_slice(old_memory, old_size));
+			}
+			return data, err;
 		}
 
 		old_memory_size := uintptr(s.curr_offset) - (curr_addr - start);
@@ -406,19 +417,19 @@ stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 			zero(rawptr(curr_addr + uintptr(diff)), diff);
 		}
 
-		return old_memory;
+		return byte_slice(old_memory, size), nil;
 
 	case .Query_Features:
 		set := (^Allocator_Mode_Set)(old_memory);
 		if set != nil {
 			set^ = {.Alloc, .Free, .Free_All, .Resize, .Query_Features};
 		}
-		return set;
+		return nil, nil;
 	case .Query_Info:
-		return nil;
+		return nil, nil;
 	}
 
-	return nil;
+	return nil, nil;
 }
 
 
@@ -453,20 +464,20 @@ small_stack_allocator :: proc(stack: ^Small_Stack) -> Allocator {
 
 small_stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
                                    size, alignment: int,
-                                   old_memory: rawptr, old_size: int, flags: u64, location := #caller_location) -> rawptr {
+                                   old_memory: rawptr, old_size: int, ocation := #caller_location) -> ([]byte, Allocator_Error) {
 	s := cast(^Small_Stack)allocator_data;
 
 	if s.data == nil {
-		return nil;
+		return nil, .Out_Of_Memory;
 	}
 
 	align := clamp(alignment, 1, 8*size_of(Stack_Allocation_Header{}.padding)/2);
 
-	raw_alloc :: proc(s: ^Small_Stack, size, alignment: int) -> rawptr {
+	raw_alloc :: proc(s: ^Small_Stack, size, alignment: int) -> ([]byte, Allocator_Error) {
 		curr_addr := uintptr(raw_data(s.data)) + uintptr(s.offset);
 		padding := calc_padding_with_header(curr_addr, uintptr(alignment), size_of(Small_Stack_Allocation_Header));
 		if s.offset + padding + size > len(s.data) {
-			return nil;
+			return nil, .Out_Of_Memory;
 		}
 		s.offset += padding;
 
@@ -478,7 +489,8 @@ small_stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 
 		s.peak_used = max(s.peak_used, s.offset);
 
-		return zero(rawptr(next_addr), size);
+		zero(rawptr(next_addr), size);
+		return byte_slice(rawptr(next_addr), size), nil;
 	}
 
 	switch mode {
@@ -486,19 +498,20 @@ small_stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		return raw_alloc(s, size, align);
 	case .Free:
 		if old_memory == nil {
-			return nil;
+			return nil, nil;
 		}
 		start := uintptr(raw_data(s.data));
 		end := start + uintptr(len(s.data));
 		curr_addr := uintptr(old_memory);
 
 		if !(start <= curr_addr && curr_addr < end) {
-			panic("Out of bounds memory address passed to stack allocator (free)");
+			// panic("Out of bounds memory address passed to stack allocator (free)");
+			return nil, .Invalid_Pointer;
 		}
 
 		if curr_addr >= start+uintptr(s.offset) {
 			// NOTE(bill): Allow double frees
-			return nil;
+			return nil, nil;
 		}
 
 		header := (^Small_Stack_Allocation_Header)(curr_addr - size_of(Small_Stack_Allocation_Header));
@@ -514,41 +527,44 @@ small_stack_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 			return raw_alloc(s, size, align);
 		}
 		if size == 0 {
-			return nil;
+			return nil, nil;
 		}
 
 		start := uintptr(raw_data(s.data));
 		end := start + uintptr(len(s.data));
 		curr_addr := uintptr(old_memory);
 		if !(start <= curr_addr && curr_addr < end) {
-			panic("Out of bounds memory address passed to stack allocator (resize)");
+			// panic("Out of bounds memory address passed to stack allocator (resize)");
+			return nil, .Invalid_Pointer;
 		}
 
 		if curr_addr >= start+uintptr(s.offset) {
 			// NOTE(bill): Treat as a double free
-			return nil;
+			return nil, nil;
 		}
 
 		if old_size == size {
-			return old_memory;
+			return byte_slice(old_memory, size), nil;
 		}
 
-		ptr := raw_alloc(s, size, align);
-		copy(ptr, old_memory, min(old_size, size));
-		return ptr;
+		data, err := raw_alloc(s, size, align);
+		if err == nil {
+			runtime.copy(data, byte_slice(old_memory, old_size));
+		}
+		return data, err;
 
 	case .Query_Features:
 		set := (^Allocator_Mode_Set)(old_memory);
 		if set != nil {
 			set^ = {.Alloc, .Free, .Free_All, .Resize, .Query_Features};
 		}
-		return set;
+		return nil, nil;
 
 	case .Query_Info:
-		return nil;
+		return nil, nil;
 	}
 
-	return nil;
+	return nil, nil;
 }
 
 
@@ -579,42 +595,44 @@ DYNAMIC_POOL_OUT_OF_BAND_SIZE_DEFAULT :: 6554;
 
 dynamic_pool_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
                                     size, alignment: int,
-                                    old_memory: rawptr, old_size: int,
-                                    flags: u64 = 0, loc := #caller_location) -> rawptr {
+                                    old_memory: rawptr, old_size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
 	pool := (^Dynamic_Pool)(allocator_data);
 
 	switch mode {
 	case .Alloc:
-		return dynamic_pool_alloc(pool, size);
+		return dynamic_pool_alloc_bytes(pool, size);
 	case .Free:
-		//
+		return nil, nil;
 	case .Free_All:
 		dynamic_pool_free_all(pool);
+		return nil, nil;
 	case .Resize:
 		if old_size >= size {
-			return old_memory;
+			return byte_slice(old_memory, size), nil;
+		}
+		data, err := dynamic_pool_alloc_bytes(pool, size);
+		if err == nil {
+			runtime.copy(data, byte_slice(old_memory, old_size));
 		}
-		ptr := dynamic_pool_alloc(pool, size);
-		copy(ptr, old_memory, old_size);
-		return ptr;
+		return data, err;
 
 	case .Query_Features:
 		set := (^Allocator_Mode_Set)(old_memory);
 		if set != nil {
 			set^ = {.Alloc, .Free_All, .Resize, .Query_Features, .Query_Info};
 		}
-		return set;
+		return nil, nil;
 
 	case .Query_Info:
 		info := (^Allocator_Query_Info)(old_memory);
 		if info != nil && info.pointer != nil {
 			info.size = pool.block_size;
 			info.alignment = pool.alignment;
-			return info;
+			return byte_slice(info, size_of(info^)), nil;
 		}
-		return nil;
+		return nil, nil;
 	}
-	return nil;
+	return nil, nil;
 }
 
 
@@ -649,8 +667,14 @@ dynamic_pool_destroy :: proc(using pool: ^Dynamic_Pool) {
 }
 
 
-dynamic_pool_alloc :: proc(using pool: ^Dynamic_Pool, bytes: int) -> rawptr {
-	cycle_new_block :: proc(using pool: ^Dynamic_Pool) {
+dynamic_pool_alloc :: proc(pool: ^Dynamic_Pool, bytes: int) -> rawptr {
+	data, err := dynamic_pool_alloc_bytes(pool, bytes);
+	assert(err == nil);
+	return raw_data(data);
+}
+
+dynamic_pool_alloc_bytes :: proc(using pool: ^Dynamic_Pool, bytes: int) -> ([]byte, Allocator_Error) {
+	cycle_new_block :: proc(using pool: ^Dynamic_Pool) -> (err: Allocator_Error) {
 		if block_allocator.procedure == nil {
 			panic("You must call pool_init on a Pool before using it");
 		}
@@ -663,14 +687,17 @@ dynamic_pool_alloc :: proc(using pool: ^Dynamic_Pool, bytes: int) -> rawptr {
 		if len(unused_blocks) > 0 {
 			new_block = pop(&unused_blocks);
 		} else {
-			new_block = block_allocator.procedure(block_allocator.data, Allocator_Mode.Alloc,
-			                                      block_size, alignment,
-			                                      nil, 0);
+			data: []byte;
+			data, err = block_allocator.procedure(block_allocator.data, Allocator_Mode.Alloc,
+			                                           block_size, alignment,
+			                                           nil, 0);
+			new_block = raw_data(data);
 		}
 
 		bytes_left = block_size;
 		current_pos = new_block;
 		current_block = new_block;
+		return;
 	}
 
 	n := bytes;
@@ -678,26 +705,29 @@ dynamic_pool_alloc :: proc(using pool: ^Dynamic_Pool, bytes: int) -> rawptr {
 	n += extra;
 	if n >= out_band_size {
 		assert(block_allocator.procedure != nil);
-		memory := block_allocator.procedure(block_allocator.data, Allocator_Mode.Alloc,
+		memory, err := block_allocator.procedure(block_allocator.data, Allocator_Mode.Alloc,
 			                                block_size, alignment,
 			                                nil, 0);
 		if memory != nil {
-			append(&out_band_allocations, (^byte)(memory));
+			append(&out_band_allocations, raw_data(memory));
 		}
-		return memory;
+		return memory, err;
 	}
 
 	if bytes_left < n {
-		cycle_new_block(pool);
+		err := cycle_new_block(pool);
+		if err != nil {
+			return nil, err;
+		}
 		if current_block == nil {
-			return nil;
+			return nil, .Out_Of_Memory;
 		}
 	}
 
 	memory := current_pos;
 	current_pos = ptr_offset((^byte)(current_pos), n);
 	bytes_left -= n;
-	return memory;
+	return byte_slice(memory, bytes), nil;
 }
 
 
@@ -730,7 +760,7 @@ dynamic_pool_free_all :: proc(using pool: ^Dynamic_Pool) {
 
 panic_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
                              size, alignment: int,
-                             old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
+                             old_memory: rawptr, old_size: int,loc := #caller_location) -> ([]byte, Allocator_Error) {
 
 	switch mode {
 	case .Alloc:
@@ -753,13 +783,13 @@ panic_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		if set != nil {
 			set^ = {.Query_Features};
 		}
-		return set;
+		return nil, nil;
 
 	case .Query_Info:
-		return nil;
+		return nil, nil;
 	}
 
-	return nil;
+	return nil, nil;
 }
 
 panic_allocator :: proc() -> Allocator {
@@ -770,70 +800,12 @@ panic_allocator :: proc() -> Allocator {
 }
 
 
-alloca_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
-                              size, alignment: int,
-                              old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
-	switch mode {
-	case .Alloc:
-		switch alignment {
-		case:   return intrinsics.alloca(size, 2*align_of(uintptr));
-		case 0: return intrinsics.alloca(size, 0);
-
-		case 1:     return intrinsics.alloca(size, 1);
-		case 2:     return intrinsics.alloca(size, 2);
-		case 4:     return intrinsics.alloca(size, 4);
-		case 8:     return intrinsics.alloca(size, 8);
-		case 16:    return intrinsics.alloca(size, 16);
-		case 32:    return intrinsics.alloca(size, 32);
-		case 64:    return intrinsics.alloca(size, 64);
-		case 128:   return intrinsics.alloca(size, 128);
-		case 256:   return intrinsics.alloca(size, 256);
-		case 512:   return intrinsics.alloca(size, 512);
-		case 1024:  return intrinsics.alloca(size, 1024);
-		case 2048:  return intrinsics.alloca(size, 2048);
-		case 4096:  return intrinsics.alloca(size, 4096);
-		case 8192:  return intrinsics.alloca(size, 8192);
-		case 16384: return intrinsics.alloca(size, 16384);
-		case 32768: return intrinsics.alloca(size, 32768);
-		case 65536: return intrinsics.alloca(size, 65536);
-		}
-	case .Resize:
-		return default_resize_align(old_memory, old_size, size, alignment, alloca_allocator());
-
-	case .Free:
-		// Do nothing
-	case .Free_All:
-		// Do nothing
-
-	case .Query_Features:
-		set := (^Allocator_Mode_Set)(old_memory);
-		if set != nil {
-			set^ = {.Alloc, .Resize, .Query_Features};
-		}
-		return set;
-
-	case .Query_Info:
-		return nil;
-	}
-	return nil;
-}
-
-alloca_allocator :: proc() -> Allocator {
-	return Allocator{
-		procedure = alloca_allocator_proc,
-		data = nil,
-	};
-}
-
-
-
-
-
 Tracking_Allocator_Entry :: struct {
 	memory:    rawptr,
 	size:      int,
 	alignment: int,
-	location: runtime.Source_Code_Location,
+	err:       Allocator_Error,
+	location:  runtime.Source_Code_Location,
 }
 Tracking_Allocator_Bad_Free_Entry :: struct {
 	memory:   rawptr,
@@ -864,7 +836,9 @@ tracking_allocator :: proc(data: ^Tracking_Allocator) -> Allocator {
 	};
 }
 
-tracking_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode, size, alignment: int, old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
+tracking_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
+                                size, alignment: int,
+                                old_memory: rawptr, old_size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
 	data := (^Tracking_Allocator)(allocator_data);
 	if mode == .Query_Info {
 		info := (^Allocator_Query_Info)(old_memory);
@@ -872,23 +846,27 @@ tracking_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode, si
 			if entry, ok := data.allocation_map[info.pointer]; ok {
 				info.size = entry.size;
 				info.alignment = entry.alignment;
-				return info;
 			}
 			info.pointer = nil;
 		}
 
-		return nil;
+		return nil, nil;
 	}
 
-	result: rawptr;
+	result: []byte;
+	err: Allocator_Error;
 	if mode == .Free && old_memory not_in data.allocation_map {
 		append(&data.bad_free_array, Tracking_Allocator_Bad_Free_Entry{
 			memory = old_memory,
 			location = loc,
 		});
 	} else {
-		result = data.backing.procedure(data.backing.data, mode, size, alignment, old_memory, old_size, flags, loc);
+		result, err = data.backing.procedure(data.backing.data, mode, size, alignment, old_memory, old_size, loc);
+		if err != nil {
+			return result, err;
+		}
 	}
+	result_ptr := raw_data(result);
 
 	if data.allocation_map.allocator.procedure == nil {
 		data.allocation_map.allocator = context.allocator;
@@ -896,22 +874,24 @@ tracking_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode, si
 
 	switch mode {
 	case .Alloc:
-		data.allocation_map[result] = Tracking_Allocator_Entry{
-			memory = result,
+		data.allocation_map[result_ptr] = Tracking_Allocator_Entry{
+			memory = result_ptr,
 			size = size,
 			alignment = alignment,
+			err = err,
 			location = loc,
 		};
 	case .Free:
 		delete_key(&data.allocation_map, old_memory);
 	case .Resize:
-		if old_memory != result {
+		if old_memory != result_ptr {
 			delete_key(&data.allocation_map, old_memory);
 		}
-		data.allocation_map[result] = Tracking_Allocator_Entry{
-			memory = result,
+		data.allocation_map[result_ptr] = Tracking_Allocator_Entry{
+			memory = result_ptr,
 			size = size,
 			alignment = alignment,
+			err = err,
 			location = loc,
 		};
 
@@ -925,13 +905,13 @@ tracking_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode, si
 		if set != nil {
 			set^ = {.Alloc, .Free, .Free_All, .Resize, .Query_Features, .Query_Info};
 		}
-		return set;
+		return nil, nil;
 
 	case .Query_Info:
-		unreachable();
+		return nil, nil;
 	}
 
-	return result;
+	return result, err;
 }
 
 
@@ -1021,13 +1001,13 @@ small_allocator :: proc(s: ^$S/Small_Allocator, backing := context.allocator) ->
 
 
 		case .Query_Features:
-			return nil;
+			return nil, nil;
 
 		case .Query_Info:
-			return nil;
+			return nil, nil;
 		}
 
-		return nil;
+		return nil, nil;
 	};
 	return a;
 }

+ 5 - 3
core/mem/mem.odin

@@ -139,9 +139,11 @@ slice_ptr :: proc(ptr: ^$T, len: int) -> []T {
 	return transmute([]T)Raw_Slice{data = ptr, len = len};
 }
 
-slice_ptr_to_bytes :: proc(ptr: rawptr, len: int) -> []byte {
-	assert(len >= 0);
-	return transmute([]byte)Raw_Slice{data = ptr, len = len};
+byte_slice :: slice_ptr_to_bytes;
+slice_ptr_to_bytes :: #force_inline proc "contextless" (data: rawptr, len: int) -> (res: []byte) {
+	r := (^Raw_Slice)(&res);
+	r.data, r.len = data, min(len, 0);
+	return;
 }
 
 slice_to_bytes :: proc(slice: $E/[]$T) -> []byte {

+ 14 - 9
core/os/os.odin

@@ -133,7 +133,12 @@ read_ptr :: proc(fd: Handle, data: rawptr, len: int) -> (int, Errno) {
 
 heap_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
                             size, alignment: int,
-                            old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
+                            old_memory: rawptr, old_size: int, loc := #caller_location) -> ([]byte, mem.Allocator_Error) {
+	byte_slice :: #force_inline proc "contextless" (data: rawptr, len: int) -> (res: []byte) {
+		r := (^mem.Raw_Slice)(&res);
+		r.data, r.len = data, len;
+		return;
+	}
 
 	//
 	// NOTE(tetra, 2020-01-14): The heap doesn't respect alignment.
@@ -142,7 +147,7 @@ heap_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
 	// the pointer we return to the user.
 	//
 
-	aligned_alloc :: proc(size, alignment: int, old_ptr: rawptr = nil) -> rawptr {
+	aligned_alloc :: proc(size, alignment: int, old_ptr: rawptr = nil) -> ([]byte, mem.Allocator_Error) {
 		a := max(alignment, align_of(rawptr));
 		space := size + a - 1;
 
@@ -159,13 +164,13 @@ heap_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
 		aligned_ptr := (ptr - 1 + uintptr(a)) & -uintptr(a);
 		diff := int(aligned_ptr - ptr);
 		if (size + diff) > space {
-			return nil;
+			return nil, .Out_Of_Memory;
 		}
 
 		aligned_mem = rawptr(aligned_ptr);
 		mem.ptr_offset((^rawptr)(aligned_mem), -1)^ = allocated_mem;
 
-		return aligned_mem;
+		return byte_slice(aligned_mem, size), .None;
 	}
 
 	aligned_free :: proc(p: rawptr) {
@@ -174,9 +179,9 @@ heap_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
 		}
 	}
 
-	aligned_resize :: proc(p: rawptr, old_size: int, new_size: int, new_alignment: int) -> rawptr {
+	aligned_resize :: proc(p: rawptr, old_size: int, new_size: int, new_alignment: int) -> ([]byte, mem.Allocator_Error) {
 		if p == nil {
-			return nil;
+			return nil, nil;
 		}
 		return aligned_alloc(new_size, new_alignment, p);
 	}
@@ -202,13 +207,13 @@ heap_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
 		if set != nil {
 			set^ = {.Alloc, .Free, .Resize, .Query_Features};
 		}
-		return set;
+		return byte_slice(set, size_of(set^)), .None;
 
 	case .Query_Info:
-		return nil;
+		return nil, nil;
 	}
 
-	return nil;
+	return nil, nil;
 }
 
 heap_allocator :: proc() -> mem.Allocator {

+ 8 - 2
core/runtime/core.odin

@@ -252,7 +252,6 @@ Source_Code_Location :: struct {
 
 Assertion_Failure_Proc :: #type proc(prefix, message: string, loc: Source_Code_Location);
 
-
 // Allocation Stuff
 Allocator_Mode :: enum byte {
 	Alloc,
@@ -271,9 +270,16 @@ Allocator_Query_Info :: struct {
 	alignment: Maybe(int),
 }
 
+Allocator_Error :: enum byte {
+	None            = 0,
+	Out_Of_Memory   = 1,
+	Invalid_Pointer = 2,
+}
+
 Allocator_Proc :: #type proc(allocator_data: rawptr, mode: Allocator_Mode,
                              size, alignment: int,
-                             old_memory: rawptr, old_size: int, flags: u64 = 0, location: Source_Code_Location = #caller_location) -> rawptr;
+                             old_memory: rawptr, old_size: int,
+                             location: Source_Code_Location = #caller_location) -> ([]byte, Allocator_Error);
 Allocator :: struct {
 	procedure: Allocator_Proc,
 	data:      rawptr,

+ 14 - 12
core/runtime/core_builtin.odin

@@ -180,12 +180,14 @@ DEFAULT_RESERVE_CAPACITY :: 16;
 
 make_aligned :: proc($T: typeid/[]$E, auto_cast len: int, alignment: int, allocator := context.allocator, loc := #caller_location) -> T {
 	make_slice_error_loc(loc, len);
-	data := mem_alloc(size_of(E)*len, alignment, allocator, loc);
-	if data == nil && size_of(E) != 0 {
+	data, err := mem_alloc_bytes(size_of(E)*len, alignment, allocator, loc);
+	switch {
+	case err != nil:
+		return nil;
+	case data == nil && size_of(E) != 0:
 		return nil;
 	}
-	// mem_zero(data, size_of(E)*len);
-	s := Raw_Slice{data, len};
+	s := Raw_Slice{raw_data(data), len};
 	return transmute(T)s;
 }
 
@@ -449,15 +451,15 @@ reserve_dynamic_array :: proc(array: ^$T/[dynamic]$E, capacity: int, loc := #cal
 	new_size  := capacity * size_of(E);
 	allocator := a.allocator;
 
-	new_data := allocator.procedure(
+	new_data, err := allocator.procedure(
 		allocator.data, .Resize, new_size, align_of(E),
-		a.data, old_size, 0, loc,
+		a.data, old_size, loc,
 	);
-	if new_data == nil {
+	if new_data == nil || err != nil {
 		return false;
 	}
 
-	a.data = new_data;
+	a.data = raw_data(new_data);
 	a.cap = capacity;
 	return true;
 }
@@ -483,15 +485,15 @@ resize_dynamic_array :: proc(array: ^$T/[dynamic]$E, length: int, loc := #caller
 	new_size  := length * size_of(E);
 	allocator := a.allocator;
 
-	new_data := allocator.procedure(
+	new_data, err := allocator.procedure(
 		allocator.data, .Resize, new_size, align_of(E),
-		a.data, old_size, 0, loc,
+		a.data, old_size, loc,
 	);
-	if new_data == nil {
+	if new_data == nil || err != nil {
 		return false;
 	}
 
-	a.data = new_data;
+	a.data = raw_data(new_data);
 	a.len = length;
 	a.cap = length;
 	return true;

+ 6 - 5
core/runtime/core_builtin_soa.odin

@@ -226,13 +226,14 @@ reserve_soa :: proc(array: ^$T/#soa[dynamic]$E, capacity: int, loc := #caller_lo
 
 	old_data := (^rawptr)(array)^;
 
-	new_data := array.allocator.procedure(
+	new_bytes, err := array.allocator.procedure(
 		array.allocator.data, .Alloc, new_size, max_align,
-		nil, old_size, 0, loc,
+		nil, old_size, loc,
 	);
-	if new_data == nil {
+	if new_bytes == nil || err != nil {
 		return false;
 	}
+	new_data := raw_data(new_bytes);
 
 
 	footer.cap = capacity;
@@ -256,9 +257,9 @@ reserve_soa :: proc(array: ^$T/#soa[dynamic]$E, capacity: int, loc := #caller_lo
 		new_offset += type.size * capacity;
 	}
 
-	array.allocator.procedure(
+	_, err = array.allocator.procedure(
 		array.allocator.data, .Free, 0, max_align,
-		old_data, old_size, 0, loc,
+		old_data, old_size, loc,
 	);
 
 	return true;

+ 41 - 27
core/runtime/default_allocators.odin

@@ -5,8 +5,8 @@ when ODIN_DEFAULT_TO_NIL_ALLOCATOR || ODIN_OS == "freestanding" {
 
 	default_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
 	                               size, alignment: int,
-	                               old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
-		return nil;
+	                               old_memory: rawptr, old_size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
+		return nil, .None;
 	}
 
 	default_allocator :: proc() -> Allocator {
@@ -26,6 +26,13 @@ when ODIN_DEFAULT_TO_NIL_ALLOCATOR || ODIN_OS == "freestanding" {
 	}
 }
 
+@(private)
+byte_slice :: #force_inline proc "contextless" (data: rawptr, len: int) -> (res: []byte) {
+	r := (^Raw_Slice)(&res);
+	r.data, r.len = data, len;
+	return;
+}
+
 
 DEFAULT_TEMP_ALLOCATOR_BACKING_SIZE: int : #config(DEFAULT_TEMP_ALLOCATOR_BACKING_SIZE, 1<<22);
 
@@ -35,7 +42,7 @@ Default_Temp_Allocator :: struct {
 	curr_offset:        int,
 	prev_allocation:    rawptr,
 	backup_allocator:   Allocator,
-	leaked_allocations: [dynamic]rawptr,
+	leaked_allocations: [dynamic][]byte,
 }
 
 default_temp_allocator_init :: proc(s: ^Default_Temp_Allocator, size: int, backup_allocator := context.allocator) {
@@ -51,7 +58,7 @@ default_temp_allocator_destroy :: proc(s: ^Default_Temp_Allocator) {
 		return;
 	}
 	for ptr in s.leaked_allocations {
-		free(ptr, s.backup_allocator);
+		free(raw_data(ptr), s.backup_allocator);
 	}
 	delete(s.leaked_allocations);
 	delete(s.data, s.backup_allocator);
@@ -60,7 +67,7 @@ default_temp_allocator_destroy :: proc(s: ^Default_Temp_Allocator) {
 
 default_temp_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
                                     size, alignment: int,
-                                    old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
+                                    old_memory: rawptr, old_size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
 
 	s := (^Default_Temp_Allocator)(allocator_data);
 
@@ -84,7 +91,7 @@ default_temp_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode
 			s.prev_allocation = rawptr(ptr);
 			offset := int(ptr - start);
 			s.curr_offset = offset + size;
-			return rawptr(ptr);
+			return byte_slice(rawptr(ptr), size), .None;
 
 		case size <= len(s.data):
 			start := uintptr(raw_data(s.data));
@@ -94,7 +101,7 @@ default_temp_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode
 			s.prev_allocation = rawptr(ptr);
 			offset := int(ptr - start);
 			s.curr_offset = offset + size;
-			return rawptr(ptr);
+			return byte_slice(rawptr(ptr), size), .None;
 		}
 		a := s.backup_allocator;
 		if a.procedure == nil {
@@ -102,11 +109,14 @@ default_temp_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode
 			s.backup_allocator = a;
 		}
 
-		ptr := mem_alloc(size, alignment, a, loc);
+		data, err := mem_alloc_bytes(size, alignment, a, loc);
+		if err != nil {
+			return data, err;
+		}
 		if s.leaked_allocations == nil {
-			s.leaked_allocations = make([dynamic]rawptr, a);
+			s.leaked_allocations = make([dynamic][]byte, a);
 		}
-		append(&s.leaked_allocations, ptr);
+		append(&s.leaked_allocations, data);
 
 		// TODO(bill): Should leaks be notified about?
 		if logger := context.logger; logger.lowest_level <= .Warning {
@@ -115,11 +125,11 @@ default_temp_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode
 			}
 		}
 
-		return ptr;
+		return data, .None;
 
 	case .Free:
 		if old_memory == nil {
-			return nil;
+			return nil, .None;
 		}
 
 		start := uintptr(raw_data(s.data));
@@ -129,30 +139,32 @@ default_temp_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode
 		if s.prev_allocation == old_memory {
 			s.curr_offset = int(uintptr(s.prev_allocation) - start);
 			s.prev_allocation = nil;
-			return nil;
+			return nil, .None;
 		}
 
 		if start <= old_ptr && old_ptr < end {
 			// NOTE(bill): Cannot free this pointer but it is valid
-			return nil;
+			return nil, .None;
 		}
 
 		if len(s.leaked_allocations) != 0 {
-			for ptr, i in s.leaked_allocations {
+			for data, i in s.leaked_allocations {
+				ptr := raw_data(data);
 				if ptr == old_memory {
 					free(ptr, s.backup_allocator);
 					ordered_remove(&s.leaked_allocations, i);
-					return nil;
+					return nil, .None;
 				}
 			}
 		}
-		panic("invalid pointer passed to default_temp_allocator");
+		return nil, .Invalid_Pointer;
+		// panic("invalid pointer passed to default_temp_allocator");
 
 	case .Free_All:
 		s.curr_offset = 0;
 		s.prev_allocation = nil;
-		for ptr in s.leaked_allocations {
-			free(ptr, s.backup_allocator);
+		for data in s.leaked_allocations {
+			free(raw_data(data), s.backup_allocator);
 		}
 		clear(&s.leaked_allocations);
 
@@ -163,26 +175,28 @@ default_temp_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode
 		if old_memory == s.prev_allocation && old_ptr & uintptr(alignment)-1 == 0 {
 			if old_ptr+uintptr(size) < end {
 				s.curr_offset = int(old_ptr-begin)+size;
-				return old_memory;
+				return byte_slice(old_memory, size), .None;
 			}
 		}
-		ptr := default_temp_allocator_proc(allocator_data, .Alloc, size, alignment, old_memory, old_size, flags, loc);
-		mem_copy(ptr, old_memory, old_size);
-		default_temp_allocator_proc(allocator_data, .Free, 0, alignment, old_memory, old_size, flags, loc);
-		return ptr;
+		ptr, err := default_temp_allocator_proc(allocator_data, .Alloc, size, alignment, old_memory, old_size, loc);
+		if err == .None {
+			copy(ptr, byte_slice(old_memory, old_size));
+			_, err = default_temp_allocator_proc(allocator_data, .Free, 0, alignment, old_memory, old_size, loc);
+		}
+		return ptr, err;
 
 	case .Query_Features:
 		set := (^Allocator_Mode_Set)(old_memory);
 		if set != nil {
 			set^ = {.Alloc, .Free, .Free_All, .Resize, .Query_Features};
 		}
-		return set;
+		return nil, nil;
 
 	case .Query_Info:
-		return nil;
+		return nil, .None;
 	}
 
-	return nil;
+	return nil, .None;
 }
 
 default_temp_allocator :: proc(allocator: ^Default_Temp_Allocator) -> Allocator {

+ 6 - 3
core/runtime/dynamic_array_internal.odin

@@ -29,10 +29,13 @@ __dynamic_array_reserve :: proc(array_: rawptr, elem_size, elem_align: int, cap:
 	new_size  := cap * elem_size;
 	allocator := array.allocator;
 
-	new_data := allocator.procedure(allocator.data, .Resize, new_size, elem_align, array.data, old_size, 0, loc);
+	new_data, err := allocator.procedure(allocator.data, .Resize, new_size, elem_align, array.data, old_size, loc);
+	if err != nil {
+		return false;
+	}
 	if new_data != nil || elem_size == 0 {
-		array.data = new_data;
-		array.cap = cap;
+		array.data = raw_data(new_data);
+		array.cap = min(cap, len(new_data)/elem_size);
 		return true;
 	}
 	return false;

+ 2 - 2
core/runtime/dynamic_map_internal.odin

@@ -173,8 +173,8 @@ __slice_resize :: proc(array_: ^$T/[]$E, new_count: int, allocator: Allocator, l
 	old_size := array.len*size_of(T);
 	new_size := new_count*size_of(T);
 
-	new_data := mem_resize(array.data, old_size, new_size, align_of(T), allocator, loc);
-	if new_data == nil {
+	new_data, err := mem_resize(array.data, old_size, new_size, align_of(T), allocator, loc);
+	if new_data == nil || err != nil {
 		return false;
 	}
 	array.data = new_data;

+ 30 - 13
core/runtime/internal.odin

@@ -159,6 +159,16 @@ mem_copy_non_overlapping :: proc "contextless" (dst, src: rawptr, len: int) -> r
 
 DEFAULT_ALIGNMENT :: 2*align_of(rawptr);
 
+mem_alloc_bytes :: #force_inline proc(size: int, alignment: int = DEFAULT_ALIGNMENT, allocator := context.allocator, loc := #caller_location) -> ([]byte, Allocator_Error) {
+	if size == 0 {
+		return nil, nil;
+	}
+	if allocator.procedure == nil {
+		return nil, nil;
+	}
+	return allocator.procedure(allocator.data, .Alloc, size, alignment, nil, 0, loc);
+}
+
 mem_alloc :: #force_inline proc(size: int, alignment: int = DEFAULT_ALIGNMENT, allocator := context.allocator, loc := #caller_location) -> rawptr {
 	if size == 0 {
 		return nil;
@@ -166,36 +176,43 @@ mem_alloc :: #force_inline proc(size: int, alignment: int = DEFAULT_ALIGNMENT, a
 	if allocator.procedure == nil {
 		return nil;
 	}
-	return allocator.procedure(allocator.data, .Alloc, size, alignment, nil, 0, 0, loc);
+	data, err := allocator.procedure(allocator.data, .Alloc, size, alignment, nil, 0, loc);
+	_ = err;
+	return raw_data(data);
 }
 
-mem_free :: #force_inline proc(ptr: rawptr, allocator := context.allocator, loc := #caller_location) {
+mem_free :: #force_inline proc(ptr: rawptr, allocator := context.allocator, loc := #caller_location) -> Allocator_Error {
 	if ptr == nil {
-		return;
+		return .None;
 	}
 	if allocator.procedure == nil {
-		return;
+		return .None;
 	}
-	allocator.procedure(allocator.data, .Free, 0, 0, ptr, 0, 0, loc);
+	_, err := allocator.procedure(allocator.data, .Free, 0, 0, ptr, 0, loc);
+	return err;
 }
 
-mem_free_all :: #force_inline proc(allocator := context.allocator, loc := #caller_location) {
+mem_free_all :: #force_inline proc(allocator := context.allocator, loc := #caller_location) -> (err: Allocator_Error) {
 	if allocator.procedure != nil {
-		allocator.procedure(allocator.data, .Free_All, 0, 0, nil, 0, 0, loc);
+		_, err = allocator.procedure(allocator.data, .Free_All, 0, 0, nil, 0, loc);
 	}
+	return;
 }
 
-mem_resize :: #force_inline proc(ptr: rawptr, old_size, new_size: int, alignment: int = DEFAULT_ALIGNMENT, allocator := context.allocator, loc := #caller_location) -> rawptr {
+mem_resize :: #force_inline proc(ptr: rawptr, old_size, new_size: int, alignment: int = DEFAULT_ALIGNMENT, allocator := context.allocator, loc := #caller_location) -> (new_ptr: rawptr, err: Allocator_Error) {
+	new_data: []byte;
 	switch {
 	case allocator.procedure == nil:
-		return nil;
+		return;
 	case new_size == 0:
-		allocator.procedure(allocator.data, .Free, 0, 0, ptr, 0, 0, loc);
-		return nil;
+		new_data, err = allocator.procedure(allocator.data, .Free, 0, 0, ptr, 0, loc);
 	case ptr == nil:
-		return allocator.procedure(allocator.data, .Alloc, new_size, alignment, nil, 0, 0, loc);
+		new_data, err = allocator.procedure(allocator.data, .Alloc, new_size, alignment, nil, 0, loc);
+	case:
+		new_data, err = allocator.procedure(allocator.data, .Resize, new_size, alignment, ptr, old_size, loc);
 	}
-	return allocator.procedure(allocator.data, .Resize, new_size, alignment, ptr, old_size, 0, loc);
+	new_ptr = raw_data(new_data);
+	return;
 }
 memory_equal :: proc "contextless" (a, b: rawptr, n: int) -> bool {
 	return memory_compare(a, b, n) == 0;

+ 9 - 9
core/runtime/os_specific_windows.odin

@@ -88,7 +88,7 @@ heap_free :: proc "contextless" (ptr: rawptr) {
 
 default_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
                                size, alignment: int,
-                               old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
+                               old_memory: rawptr, old_size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
 
 	//
 	// NOTE(tetra, 2020-01-14): The heap doesn't respect alignment.
@@ -97,7 +97,7 @@ default_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 	// the pointer we return to the user.
 	//
 
-	aligned_alloc :: proc "contextless" (size, alignment: int, old_ptr: rawptr = nil) -> rawptr {
+	aligned_alloc :: proc "contextless" (size, alignment: int, old_ptr: rawptr = nil) -> ([]byte, Allocator_Error) {
 		a := max(alignment, align_of(rawptr));
 		space := size + a - 1;
 
@@ -114,13 +114,13 @@ default_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		aligned_ptr := (ptr - 1 + uintptr(a)) & -uintptr(a);
 		diff := int(aligned_ptr - ptr);
 		if (size + diff) > space {
-			return nil;
+			return nil, .Out_Of_Memory;
 		}
 
 		aligned_mem = rawptr(aligned_ptr);
 		ptr_offset((^rawptr)(aligned_mem), -1)^ = allocated_mem;
 
-		return aligned_mem;
+		return byte_slice(aligned_mem, size), nil;
 	}
 
 	aligned_free :: proc "contextless" (p: rawptr) {
@@ -129,9 +129,9 @@ default_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		}
 	}
 
-	aligned_resize :: proc "contextless" (p: rawptr, old_size: int, new_size: int, new_alignment: int) -> rawptr {
+	aligned_resize :: proc "contextless" (p: rawptr, old_size: int, new_size: int, new_alignment: int) -> ([]byte, Allocator_Error) {
 		if p == nil {
-			return nil;
+			return nil, nil;
 		}
 		return aligned_alloc(new_size, new_alignment, p);
 	}
@@ -157,13 +157,13 @@ default_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
 		if set != nil {
 			set^ = {.Alloc, .Free, .Resize, .Query_Features};
 		}
-		return set;
+		return nil, nil;
 
 	case .Query_Info:
-		return nil;
+		return nil, nil;
 	}
 
-	return nil;
+	return nil, nil;
 }
 
 default_allocator :: proc() -> Allocator {

+ 1 - 1
src/build_settings.cpp

@@ -838,7 +838,7 @@ void init_build_context(TargetMetrics *cross_target) {
 			bc->link_flags = str_lit("-arch arm64 ");
 			break;
 		}
-		if (!bc->use_llvm_api) {
+		if ((bc->command_kind & Command__does_build) != 0 && !bc->use_llvm_api) {
 			gb_printf_err("The arm64 architecture is only supported with -llvm-api\n");;
 			gb_exit(1);
 		}