heap_allocator.odin 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110
  1. package runtime
  2. import "base:intrinsics"
  3. heap_allocator :: proc() -> Allocator {
  4. return Allocator{
  5. procedure = heap_allocator_proc,
  6. data = nil,
  7. }
  8. }
  9. heap_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
  10. size, alignment: int,
  11. old_memory: rawptr, old_size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
  12. //
  13. // NOTE(tetra, 2020-01-14): The heap doesn't respect alignment.
  14. // Instead, we overallocate by `alignment + size_of(rawptr) - 1`, and insert
  15. // padding. We also store the original pointer returned by heap_alloc right before
  16. // the pointer we return to the user.
  17. //
  18. aligned_alloc :: proc(size, alignment: int, old_ptr: rawptr = nil, zero_memory := true) -> ([]byte, Allocator_Error) {
  19. a := max(alignment, align_of(rawptr))
  20. space := size + a - 1
  21. allocated_mem: rawptr
  22. if old_ptr != nil {
  23. original_old_ptr := ([^]rawptr)(old_ptr)[-1]
  24. allocated_mem = heap_resize(original_old_ptr, space+size_of(rawptr))
  25. } else {
  26. allocated_mem = heap_alloc(space+size_of(rawptr), zero_memory)
  27. }
  28. aligned_mem := rawptr(([^]u8)(allocated_mem)[size_of(rawptr):])
  29. ptr := uintptr(aligned_mem)
  30. aligned_ptr := (ptr - 1 + uintptr(a)) & -uintptr(a)
  31. diff := int(aligned_ptr - ptr)
  32. if (size + diff) > space || allocated_mem == nil {
  33. return nil, .Out_Of_Memory
  34. }
  35. aligned_mem = rawptr(aligned_ptr)
  36. ([^]rawptr)(aligned_mem)[-1] = allocated_mem
  37. return byte_slice(aligned_mem, size), nil
  38. }
  39. aligned_free :: proc(p: rawptr) {
  40. if p != nil {
  41. heap_free(([^]rawptr)(p)[-1])
  42. }
  43. }
  44. aligned_resize :: proc(p: rawptr, old_size: int, new_size: int, new_alignment: int, zero_memory := true) -> (new_memory: []byte, err: Allocator_Error) {
  45. if p == nil {
  46. return nil, nil
  47. }
  48. new_memory = aligned_alloc(new_size, new_alignment, p, zero_memory) or_return
  49. // NOTE: heap_resize does not zero the new memory, so we do it
  50. if zero_memory && new_size > old_size {
  51. new_region := raw_data(new_memory[old_size:])
  52. intrinsics.mem_zero(new_region, new_size - old_size)
  53. }
  54. return
  55. }
  56. switch mode {
  57. case .Alloc, .Alloc_Non_Zeroed:
  58. return aligned_alloc(size, alignment, nil, mode == .Alloc)
  59. case .Free:
  60. aligned_free(old_memory)
  61. case .Free_All:
  62. return nil, .Mode_Not_Implemented
  63. case .Resize, .Resize_Non_Zeroed:
  64. if old_memory == nil {
  65. return aligned_alloc(size, alignment, nil, mode == .Resize)
  66. }
  67. return aligned_resize(old_memory, old_size, size, alignment, mode == .Resize)
  68. case .Query_Features:
  69. set := (^Allocator_Mode_Set)(old_memory)
  70. if set != nil {
  71. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Resize, .Resize_Non_Zeroed, .Query_Features}
  72. }
  73. return nil, nil
  74. case .Query_Info:
  75. return nil, .Mode_Not_Implemented
  76. }
  77. return nil, nil
  78. }
  79. heap_alloc :: proc(size: int, zero_memory := true) -> rawptr {
  80. return _heap_alloc(size, zero_memory)
  81. }
  82. heap_resize :: proc(ptr: rawptr, new_size: int) -> rawptr {
  83. return _heap_resize(ptr, new_size)
  84. }
  85. heap_free :: proc(ptr: rawptr) {
  86. _heap_free(ptr)
  87. }