alloc.odin 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427
  1. package mem
  2. import "core:runtime"
  3. DEFAULT_ALIGNMENT :: 2*align_of(rawptr);
  4. Allocator_Mode :: enum byte {
  5. Alloc,
  6. Free,
  7. Free_All,
  8. Resize,
  9. }
  10. Allocator_Proc :: #type proc(allocator_data: rawptr, mode: Allocator_Mode,
  11. size, alignment: int,
  12. old_memory: rawptr, old_size: int, flags: u64 = 0, location := #caller_location) -> rawptr;
  13. Allocator :: struct {
  14. procedure: Allocator_Proc,
  15. data: rawptr,
  16. }
  17. alloc :: inline proc(size: int, alignment: int = DEFAULT_ALIGNMENT, allocator := context.allocator, loc := #caller_location) -> rawptr {
  18. if size == 0 do return nil;
  19. if allocator.procedure == nil do return nil;
  20. return allocator.procedure(allocator.data, Allocator_Mode.Alloc, size, alignment, nil, 0, 0, loc);
  21. }
  22. free :: inline proc(ptr: rawptr, allocator := context.allocator, loc := #caller_location) {
  23. if ptr == nil do return;
  24. if allocator.procedure == nil do return;
  25. allocator.procedure(allocator.data, Allocator_Mode.Free, 0, 0, ptr, 0, 0, loc);
  26. }
  27. free_all :: inline proc(allocator := context.allocator, loc := #caller_location) {
  28. if allocator.procedure != nil {
  29. allocator.procedure(allocator.data, Allocator_Mode.Free_All, 0, 0, nil, 0, 0, loc);
  30. }
  31. }
  32. resize :: inline proc(ptr: rawptr, old_size, new_size: int, alignment: int = DEFAULT_ALIGNMENT, allocator := context.allocator, loc := #caller_location) -> rawptr {
  33. if allocator.procedure == nil {
  34. return nil;
  35. }
  36. if new_size == 0 {
  37. free(ptr, allocator, loc);
  38. return nil;
  39. } else if ptr == nil {
  40. return allocator.procedure(allocator.data, Allocator_Mode.Alloc, new_size, alignment, nil, 0, 0, loc);
  41. }
  42. return allocator.procedure(allocator.data, Allocator_Mode.Resize, new_size, alignment, ptr, old_size, 0, loc);
  43. }
  44. delete_string :: proc(str: string, allocator := context.allocator, loc := #caller_location) {
  45. free(raw_data(str), allocator, loc);
  46. }
  47. delete_cstring :: proc(str: cstring, allocator := context.allocator, loc := #caller_location) {
  48. free((^byte)(str), allocator, loc);
  49. }
  50. delete_dynamic_array :: proc(array: $T/[dynamic]$E, loc := #caller_location) {
  51. free(raw_data(array), array.allocator, loc);
  52. }
  53. delete_slice :: proc(array: $T/[]$E, allocator := context.allocator, loc := #caller_location) {
  54. free(raw_data(array), allocator, loc);
  55. }
  56. delete_map :: proc(m: $T/map[$K]$V, loc := #caller_location) {
  57. raw := transmute(Raw_Map)m;
  58. delete_slice(raw.hashes);
  59. free(raw.entries.data, raw.entries.allocator, loc);
  60. }
  61. delete :: proc[
  62. delete_string,
  63. delete_cstring,
  64. delete_dynamic_array,
  65. delete_slice,
  66. delete_map,
  67. ];
  68. new :: inline proc($T: typeid, allocator := context.allocator, loc := #caller_location) -> ^T {
  69. ptr := (^T)(alloc(size_of(T), align_of(T), allocator, loc));
  70. if ptr != nil do ptr^ = T{};
  71. return ptr;
  72. }
  73. new_clone :: inline proc(data: $T, allocator := context.allocator, loc := #caller_location) -> ^T {
  74. ptr := (^T)(alloc(size_of(T), align_of(T), allocator, loc));
  75. if ptr != nil do ptr^ = data;
  76. return ptr;
  77. }
  78. make_slice :: proc($T: typeid/[]$E, auto_cast len: int, allocator := context.allocator, loc := #caller_location) -> T {
  79. runtime.make_slice_error_loc(loc, len);
  80. data := alloc(size_of(E)*len, align_of(E), allocator, loc);
  81. s := Raw_Slice{data, len};
  82. return transmute(T)s;
  83. }
  84. make_dynamic_array :: proc($T: typeid/[dynamic]$E, allocator := context.allocator, loc := #caller_location) -> T {
  85. return make_dynamic_array_len_cap(T, 0, 16, allocator, loc);
  86. }
  87. make_dynamic_array_len :: proc($T: typeid/[dynamic]$E, auto_cast len: int, allocator := context.allocator, loc := #caller_location) -> T {
  88. return make_dynamic_array_len_cap(T, len, len, allocator, loc);
  89. }
  90. make_dynamic_array_len_cap :: proc($T: typeid/[dynamic]$E, auto_cast len: int, auto_cast cap: int, allocator := context.allocator, loc := #caller_location) -> T {
  91. runtime.make_dynamic_array_error_loc(loc, len, cap);
  92. data := alloc(size_of(E)*cap, align_of(E), allocator, loc);
  93. s := Raw_Dynamic_Array{data, len, cap, allocator};
  94. return transmute(T)s;
  95. }
  96. make_map :: proc($T: typeid/map[$K]$E, auto_cast cap: int = 16, allocator := context.allocator, loc := #caller_location) -> T {
  97. runtime.make_map_expr_error_loc(loc, cap);
  98. context.allocator = allocator;
  99. m: T;
  100. reserve_map(&m, cap);
  101. return m;
  102. }
  103. make :: proc[
  104. make_slice,
  105. make_dynamic_array,
  106. make_dynamic_array_len,
  107. make_dynamic_array_len_cap,
  108. make_map,
  109. ];
  110. default_resize_align :: proc(old_memory: rawptr, old_size, new_size, alignment: int, allocator := context.allocator, loc := #caller_location) -> rawptr {
  111. if old_memory == nil do return alloc(new_size, alignment, allocator, loc);
  112. if new_size == 0 {
  113. free(old_memory, allocator, loc);
  114. return nil;
  115. }
  116. if new_size == old_size do return old_memory;
  117. new_memory := alloc(new_size, alignment, allocator, loc);
  118. if new_memory == nil do return nil;
  119. copy(new_memory, old_memory, min(old_size, new_size));;
  120. free(old_memory, allocator, loc);
  121. return new_memory;
  122. }
  123. nil_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
  124. size, alignment: int,
  125. old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
  126. return nil;
  127. }
  128. nil_allocator :: proc() -> Allocator {
  129. return Allocator{
  130. procedure = nil_allocator_proc,
  131. data = nil,
  132. };
  133. }
  134. Scratch_Allocator :: struct {
  135. data: []byte,
  136. curr_offset: int,
  137. prev_offset: int,
  138. backup_allocator: Allocator,
  139. leaked_allocations: [dynamic]rawptr,
  140. }
  141. scratch_allocator_init :: proc(scratch: ^Scratch_Allocator, data: []byte, backup_allocator := context.allocator) {
  142. scratch.data = data;
  143. scratch.curr_offset = 0;
  144. scratch.prev_offset = 0;
  145. scratch.backup_allocator = backup_allocator;
  146. }
  147. scratch_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
  148. size, alignment: int,
  149. old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
  150. scratch := (^Scratch_Allocator)(allocator_data);
  151. if scratch.data == nil {
  152. DEFAULT_SCRATCH_BACKING_SIZE :: 1<<22;
  153. scratch_allocator_init(scratch, make([]byte, 1<<22));
  154. }
  155. switch mode {
  156. case Allocator_Mode.Alloc:
  157. switch {
  158. case scratch.curr_offset+size <= len(scratch.data):
  159. offset := align_forward_uintptr(uintptr(scratch.curr_offset), uintptr(alignment));
  160. ptr := &scratch.data[offset];
  161. zero(ptr, size);
  162. scratch.prev_offset = int(offset);
  163. scratch.curr_offset = int(offset) + size;
  164. return ptr;
  165. case size <= len(scratch.data):
  166. offset := align_forward_uintptr(uintptr(0), uintptr(alignment));
  167. ptr := &scratch.data[offset];
  168. zero(ptr, size);
  169. scratch.prev_offset = int(offset);
  170. scratch.curr_offset = int(offset) + size;
  171. return ptr;
  172. }
  173. // TODO(bill): Should leaks be notified about? Should probably use a logging system that is built into the context system
  174. a := scratch.backup_allocator;
  175. if a.procedure == nil {
  176. a = context.allocator;
  177. scratch.backup_allocator = a;
  178. }
  179. ptr := alloc(size, alignment, a, loc);
  180. if scratch.leaked_allocations == nil {
  181. scratch.leaked_allocations = make([dynamic]rawptr, a);
  182. }
  183. append(&scratch.leaked_allocations, ptr);
  184. return ptr;
  185. case Allocator_Mode.Free:
  186. last_ptr := rawptr(&scratch.data[scratch.prev_offset]);
  187. if old_memory == last_ptr {
  188. size := scratch.curr_offset - scratch.prev_offset;
  189. scratch.curr_offset = scratch.prev_offset;
  190. zero(last_ptr, size);
  191. return nil;
  192. }
  193. // NOTE(bill): It's scratch memory, don't worry about freeing
  194. case Allocator_Mode.Free_All:
  195. scratch.curr_offset = 0;
  196. scratch.prev_offset = 0;
  197. for ptr in scratch.leaked_allocations {
  198. free(ptr, scratch.backup_allocator);
  199. }
  200. clear(&scratch.leaked_allocations);
  201. case Allocator_Mode.Resize:
  202. last_ptr := rawptr(&scratch.data[scratch.prev_offset]);
  203. if old_memory == last_ptr && len(scratch.data)-scratch.prev_offset >= size {
  204. scratch.curr_offset = scratch.prev_offset+size;
  205. return old_memory;
  206. }
  207. return scratch_allocator_proc(allocator_data, Allocator_Mode.Alloc, size, alignment, old_memory, old_size, flags, loc);
  208. }
  209. return nil;
  210. }
  211. scratch_allocator :: proc(scratch: ^Scratch_Allocator) -> Allocator {
  212. return Allocator{
  213. procedure = scratch_allocator_proc,
  214. data = scratch,
  215. };
  216. }
  217. Pool :: struct {
  218. block_size: int,
  219. out_band_size: int,
  220. alignment: int,
  221. unused_blocks: [dynamic]rawptr,
  222. used_blocks: [dynamic]rawptr,
  223. out_band_allocations: [dynamic]rawptr,
  224. current_block: rawptr,
  225. current_pos: rawptr,
  226. bytes_left: int,
  227. block_allocator: Allocator,
  228. }
  229. POOL_BLOCK_SIZE_DEFAULT :: 65536;
  230. POOL_OUT_OF_BAND_SIZE_DEFAULT :: 6554;
  231. pool_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
  232. size, alignment: int,
  233. old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
  234. pool := (^Pool)(allocator_data);
  235. switch mode {
  236. case Allocator_Mode.Alloc:
  237. return pool_alloc(pool, size);
  238. case Allocator_Mode.Free:
  239. panic("Allocator_Mode.Free is not supported for a pool");
  240. case Allocator_Mode.Free_All:
  241. pool_free_all(pool);
  242. case Allocator_Mode.Resize:
  243. panic("Allocator_Mode.Resize is not supported for a pool");
  244. if old_size >= size {
  245. return old_memory;
  246. }
  247. ptr := pool_alloc(pool, size);
  248. copy(ptr, old_memory, old_size);
  249. return ptr;
  250. }
  251. return nil;
  252. }
  253. pool_allocator :: proc(pool: ^Pool) -> Allocator {
  254. return Allocator{
  255. procedure = pool_allocator_proc,
  256. data = pool,
  257. };
  258. }
  259. pool_init :: proc(pool: ^Pool,
  260. block_allocator := Allocator{} , array_allocator := Allocator{},
  261. block_size := POOL_BLOCK_SIZE_DEFAULT, out_band_size := POOL_OUT_OF_BAND_SIZE_DEFAULT,
  262. alignment := 8) {
  263. pool.block_size = block_size;
  264. pool.out_band_size = out_band_size;
  265. pool.alignment = alignment;
  266. if block_allocator.procedure == nil {
  267. block_allocator = context.allocator;
  268. }
  269. if array_allocator.procedure == nil {
  270. array_allocator = context.allocator;
  271. }
  272. pool.block_allocator = block_allocator;
  273. pool.out_band_allocations.allocator = array_allocator;
  274. pool. unused_blocks.allocator = array_allocator;
  275. pool. used_blocks.allocator = array_allocator;
  276. }
  277. pool_destroy :: proc(using pool: ^Pool) {
  278. pool_free_all(pool);
  279. delete(unused_blocks);
  280. delete(used_blocks);
  281. zero(pool, size_of(pool^));
  282. }
  283. pool_alloc :: proc(using pool: ^Pool, bytes: int) -> rawptr {
  284. cycle_new_block :: proc(using pool: ^Pool) {
  285. if block_allocator.procedure == nil {
  286. panic("You must call pool_init on a Pool before using it");
  287. }
  288. if current_block != nil {
  289. append(&used_blocks, current_block);
  290. }
  291. new_block: rawptr;
  292. if len(unused_blocks) > 0 {
  293. new_block = pop(&unused_blocks);
  294. } else {
  295. new_block = block_allocator.procedure(block_allocator.data, Allocator_Mode.Alloc,
  296. block_size, alignment,
  297. nil, 0);
  298. }
  299. bytes_left = block_size;
  300. current_pos = new_block;
  301. current_block = new_block;
  302. }
  303. extra := alignment - (bytes % alignment);
  304. bytes += extra;
  305. if bytes >= out_band_size {
  306. assert(block_allocator.procedure != nil);
  307. memory := block_allocator.procedure(block_allocator.data, Allocator_Mode.Alloc,
  308. block_size, alignment,
  309. nil, 0);
  310. if memory != nil {
  311. append(&out_band_allocations, (^byte)(memory));
  312. }
  313. return memory;
  314. }
  315. if bytes_left < bytes {
  316. cycle_new_block(pool);
  317. if current_block == nil {
  318. return nil;
  319. }
  320. }
  321. memory := current_pos;
  322. current_pos = ptr_offset((^byte)(current_pos), bytes);
  323. bytes_left -= bytes;
  324. return memory;
  325. }
  326. pool_reset :: proc(using pool: ^Pool) {
  327. if current_block != nil {
  328. append(&unused_blocks, current_block);
  329. current_block = nil;
  330. }
  331. for block in used_blocks {
  332. append(&unused_blocks, block);
  333. }
  334. clear(&used_blocks);
  335. for a in out_band_allocations {
  336. free(a, block_allocator);
  337. }
  338. clear(&out_band_allocations);
  339. }
  340. pool_free_all :: proc(using pool: ^Pool) {
  341. pool_reset(pool);
  342. for block in unused_blocks {
  343. free(block, block_allocator);
  344. }
  345. clear(&unused_blocks);
  346. }