default_temp_allocator_arena.odin 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334
  1. package runtime
  2. import "base:intrinsics"
  3. // import "base:sanitizer"
  4. DEFAULT_ARENA_GROWING_MINIMUM_BLOCK_SIZE :: uint(DEFAULT_TEMP_ALLOCATOR_BACKING_SIZE)
  5. Memory_Block :: struct {
  6. prev: ^Memory_Block,
  7. allocator: Allocator,
  8. base: [^]byte,
  9. used: uint,
  10. capacity: uint,
  11. }
  12. // NOTE: This is a growing arena that is only used for the default temp allocator.
  13. // For your own growing arena needs, prefer `Arena` from `core:mem/virtual`.
  14. Arena :: struct {
  15. backing_allocator: Allocator,
  16. curr_block: ^Memory_Block,
  17. total_used: uint,
  18. total_capacity: uint,
  19. minimum_block_size: uint,
  20. temp_count: uint,
  21. }
  22. @(private, require_results)
  23. safe_add :: #force_inline proc "contextless" (x, y: uint) -> (uint, bool) {
  24. z, did_overflow := intrinsics.overflow_add(x, y)
  25. return z, !did_overflow
  26. }
  27. @(require_results)
  28. memory_block_alloc :: proc(allocator: Allocator, capacity: uint, alignment: uint, loc := #caller_location) -> (block: ^Memory_Block, err: Allocator_Error) {
  29. total_size := uint(capacity + max(alignment, size_of(Memory_Block)))
  30. base_offset := uintptr(max(alignment, size_of(Memory_Block)))
  31. min_alignment: int = max(16, align_of(Memory_Block), int(alignment))
  32. data := mem_alloc(int(total_size), min_alignment, allocator, loc) or_return
  33. block = (^Memory_Block)(raw_data(data))
  34. end := uintptr(raw_data(data)[len(data):])
  35. block.allocator = allocator
  36. block.base = ([^]byte)(uintptr(block) + base_offset)
  37. block.capacity = uint(end - uintptr(block.base))
  38. // sanitizer.address_poison(block.base, block.capacity)
  39. // Should be zeroed
  40. assert(block.used == 0)
  41. assert(block.prev == nil)
  42. return
  43. }
  44. memory_block_dealloc :: proc "contextless" (block_to_free: ^Memory_Block, loc := #caller_location) {
  45. if block_to_free != nil {
  46. allocator := block_to_free.allocator
  47. // sanitizer.address_unpoison(block_to_free.base, block_to_free.capacity)
  48. context = default_context()
  49. context.allocator = allocator
  50. mem_free(block_to_free, allocator, loc)
  51. }
  52. }
  53. @(require_results)
  54. alloc_from_memory_block :: proc(block: ^Memory_Block, min_size, alignment: uint) -> (data: []byte, err: Allocator_Error) {
  55. calc_alignment_offset :: proc "contextless" (block: ^Memory_Block, alignment: uintptr) -> uint {
  56. alignment_offset := uint(0)
  57. ptr := uintptr(block.base[block.used:])
  58. mask := alignment-1
  59. if ptr & mask != 0 {
  60. alignment_offset = uint(alignment - (ptr & mask))
  61. }
  62. return alignment_offset
  63. }
  64. if block == nil {
  65. return nil, .Out_Of_Memory
  66. }
  67. alignment_offset := calc_alignment_offset(block, uintptr(alignment))
  68. size, size_ok := safe_add(min_size, alignment_offset)
  69. if !size_ok {
  70. err = .Out_Of_Memory
  71. return
  72. }
  73. if to_be_used, ok := safe_add(block.used, size); !ok || to_be_used > block.capacity {
  74. err = .Out_Of_Memory
  75. return
  76. }
  77. data = block.base[block.used+alignment_offset:][:min_size]
  78. // sanitizer.address_unpoison(block.base[block.used:block.used+size])
  79. block.used += size
  80. return
  81. }
  82. @(require_results)
  83. arena_alloc :: proc(arena: ^Arena, size, alignment: uint, loc := #caller_location) -> (data: []byte, err: Allocator_Error) {
  84. align_forward_uint :: proc "contextless" (ptr, align: uint) -> uint {
  85. p := ptr
  86. modulo := p & (align-1)
  87. if modulo != 0 {
  88. p += align - modulo
  89. }
  90. return p
  91. }
  92. assert(alignment & (alignment-1) == 0, "non-power of two alignment", loc)
  93. size := size
  94. if size == 0 {
  95. return
  96. }
  97. prev_used := 0 if arena.curr_block == nil else arena.curr_block.used
  98. data, err = alloc_from_memory_block(arena.curr_block, size, alignment)
  99. if err == .Out_Of_Memory {
  100. if arena.minimum_block_size == 0 {
  101. arena.minimum_block_size = DEFAULT_ARENA_GROWING_MINIMUM_BLOCK_SIZE
  102. }
  103. needed := align_forward_uint(size, alignment)
  104. block_size := max(needed, arena.minimum_block_size)
  105. if arena.backing_allocator.procedure == nil {
  106. arena.backing_allocator = default_allocator()
  107. }
  108. new_block := memory_block_alloc(arena.backing_allocator, block_size, alignment, loc) or_return
  109. new_block.prev = arena.curr_block
  110. arena.curr_block = new_block
  111. arena.total_capacity += new_block.capacity
  112. prev_used = 0
  113. data, err = alloc_from_memory_block(arena.curr_block, size, alignment)
  114. }
  115. arena.total_used += arena.curr_block.used - prev_used
  116. return
  117. }
  118. // `arena_init` will initialize the arena with a usable block.
  119. // This procedure is not necessary to use the Arena as the default zero as `arena_alloc` will set things up if necessary
  120. @(require_results)
  121. arena_init :: proc(arena: ^Arena, size: uint, backing_allocator: Allocator, loc := #caller_location) -> Allocator_Error {
  122. arena^ = {}
  123. arena.backing_allocator = backing_allocator
  124. arena.minimum_block_size = max(size, 1<<12) // minimum block size of 4 KiB
  125. new_block := memory_block_alloc(arena.backing_allocator, arena.minimum_block_size, 0, loc) or_return
  126. arena.curr_block = new_block
  127. arena.total_capacity += new_block.capacity
  128. return nil
  129. }
  130. arena_free_last_memory_block :: proc(arena: ^Arena, loc := #caller_location) {
  131. if free_block := arena.curr_block; free_block != nil {
  132. arena.curr_block = free_block.prev
  133. arena.total_capacity -= free_block.capacity
  134. memory_block_dealloc(free_block, loc)
  135. }
  136. }
  137. // `arena_free_all` will free all but the first memory block, and then reset the memory block
  138. arena_free_all :: proc(arena: ^Arena, loc := #caller_location) {
  139. for arena.curr_block != nil && arena.curr_block.prev != nil {
  140. arena_free_last_memory_block(arena, loc)
  141. }
  142. if arena.curr_block != nil {
  143. intrinsics.mem_zero(arena.curr_block.base, arena.curr_block.used)
  144. arena.curr_block.used = 0
  145. // sanitizer.address_poison(arena.curr_block.base, arena.curr_block.capacity)
  146. }
  147. arena.total_used = 0
  148. }
  149. arena_destroy :: proc "contextless" (arena: ^Arena, loc := #caller_location) {
  150. for arena.curr_block != nil {
  151. free_block := arena.curr_block
  152. arena.curr_block = free_block.prev
  153. arena.total_capacity -= free_block.capacity
  154. memory_block_dealloc(free_block, loc)
  155. }
  156. arena.total_used = 0
  157. arena.total_capacity = 0
  158. }
  159. @(require_results)
  160. arena_allocator :: proc(arena: ^Arena) -> Allocator {
  161. return Allocator{arena_allocator_proc, arena}
  162. }
  163. arena_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
  164. size, alignment: int,
  165. old_memory: rawptr, old_size: int,
  166. location := #caller_location) -> (data: []byte, err: Allocator_Error) {
  167. arena := (^Arena)(allocator_data)
  168. size, alignment := uint(size), uint(alignment)
  169. old_size := uint(old_size)
  170. switch mode {
  171. case .Alloc, .Alloc_Non_Zeroed:
  172. return arena_alloc(arena, size, alignment, location)
  173. case .Free:
  174. err = .Mode_Not_Implemented
  175. case .Free_All:
  176. arena_free_all(arena, location)
  177. case .Resize, .Resize_Non_Zeroed:
  178. old_data := ([^]byte)(old_memory)
  179. switch {
  180. case old_data == nil:
  181. return arena_alloc(arena, size, alignment, location)
  182. case size == old_size:
  183. // return old memory
  184. data = old_data[:size]
  185. return
  186. case size == 0:
  187. err = .Mode_Not_Implemented
  188. return
  189. case uintptr(old_data) & uintptr(alignment-1) == 0:
  190. if size < old_size {
  191. // shrink data in-place
  192. data = old_data[:size]
  193. return
  194. }
  195. if block := arena.curr_block; block != nil {
  196. start := uint(uintptr(old_memory)) - uint(uintptr(block.base))
  197. old_end := start + old_size
  198. new_end := start + size
  199. if start < old_end && old_end == block.used && new_end <= block.capacity {
  200. // grow data in-place, adjusting next allocation
  201. block.used = uint(new_end)
  202. data = block.base[start:new_end]
  203. // sanitizer.address_unpoison(data)
  204. return
  205. }
  206. }
  207. }
  208. new_memory := arena_alloc(arena, size, alignment, location) or_return
  209. if new_memory == nil {
  210. return
  211. }
  212. copy(new_memory, old_data[:old_size])
  213. return new_memory, nil
  214. case .Query_Features:
  215. set := (^Allocator_Mode_Set)(old_memory)
  216. if set != nil {
  217. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free_All, .Resize, .Query_Features}
  218. }
  219. case .Query_Info:
  220. err = .Mode_Not_Implemented
  221. }
  222. return
  223. }
  224. Arena_Temp :: struct {
  225. arena: ^Arena,
  226. block: ^Memory_Block,
  227. used: uint,
  228. }
  229. @(require_results)
  230. arena_temp_begin :: proc(arena: ^Arena, loc := #caller_location) -> (temp: Arena_Temp) {
  231. assert(arena != nil, "nil arena", loc)
  232. temp.arena = arena
  233. temp.block = arena.curr_block
  234. if arena.curr_block != nil {
  235. temp.used = arena.curr_block.used
  236. }
  237. arena.temp_count += 1
  238. return
  239. }
  240. arena_temp_end :: proc(temp: Arena_Temp, loc := #caller_location) {
  241. if temp.arena == nil {
  242. assert(temp.block == nil)
  243. assert(temp.used == 0)
  244. return
  245. }
  246. arena := temp.arena
  247. if temp.block != nil {
  248. memory_block_found := false
  249. for block := arena.curr_block; block != nil; block = block.prev {
  250. if block == temp.block {
  251. memory_block_found = true
  252. break
  253. }
  254. }
  255. if !memory_block_found {
  256. assert(arena.curr_block == temp.block, "memory block stored within Arena_Temp not owned by Arena", loc)
  257. }
  258. for arena.curr_block != temp.block {
  259. arena_free_last_memory_block(arena)
  260. }
  261. if block := arena.curr_block; block != nil {
  262. assert(block.used >= temp.used, "out of order use of arena_temp_end", loc)
  263. amount_to_zero := block.used-temp.used
  264. intrinsics.mem_zero(block.base[temp.used:], amount_to_zero)
  265. // sanitizer.address_poison(block.base[temp.used:block.capacity])
  266. block.used = temp.used
  267. arena.total_used -= amount_to_zero
  268. }
  269. }
  270. assert(arena.temp_count > 0, "double-use of arena_temp_end", loc)
  271. arena.temp_count -= 1
  272. }
  273. // Ignore the use of a `arena_temp_begin` entirely
  274. arena_temp_ignore :: proc(temp: Arena_Temp, loc := #caller_location) {
  275. assert(temp.arena != nil, "nil arena", loc)
  276. arena := temp.arena
  277. assert(arena.temp_count > 0, "double-use of arena_temp_end", loc)
  278. arena.temp_count -= 1
  279. }
  280. arena_check_temp :: proc(arena: ^Arena, loc := #caller_location) {
  281. assert(arena.temp_count == 0, "Arena_Temp not been ended", loc)
  282. }