allocators.odin 68 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334
  1. package mem
  2. import "base:intrinsics"
  3. import "base:runtime"
  4. /*
  5. Nil allocator.
  6. The `nil` allocator returns `nil` on every allocation attempt. This type of
  7. allocator can be used in scenarios where memory doesn't need to be allocated,
  8. but an attempt to allocate memory is not an error.
  9. */
  10. @(require_results)
  11. nil_allocator :: proc() -> Allocator {
  12. return Allocator{
  13. procedure = nil_allocator_proc,
  14. data = nil,
  15. }
  16. }
  17. nil_allocator_proc :: proc(
  18. allocator_data: rawptr,
  19. mode: Allocator_Mode,
  20. size, alignment: int,
  21. old_memory: rawptr,
  22. old_size: int,
  23. loc := #caller_location,
  24. ) -> ([]byte, Allocator_Error) {
  25. return nil, nil
  26. }
  27. /*
  28. Panic allocator.
  29. The panic allocator is a type of allocator that panics on any allocation
  30. attempt. This type of allocator can be used in scenarios where memory should
  31. not be allocated, and an attempt to allocate memory is an error.
  32. */
  33. @(require_results)
  34. panic_allocator :: proc() -> Allocator {
  35. return Allocator{
  36. procedure = panic_allocator_proc,
  37. data = nil,
  38. }
  39. }
  40. panic_allocator_proc :: proc(
  41. allocator_data: rawptr,
  42. mode: Allocator_Mode,
  43. size, alignment: int,
  44. old_memory: rawptr,
  45. old_size: int,
  46. loc := #caller_location,
  47. ) -> ([]byte, Allocator_Error) {
  48. switch mode {
  49. case .Alloc:
  50. if size > 0 {
  51. panic("mem: panic allocator, .Alloc called", loc=loc)
  52. }
  53. case .Alloc_Non_Zeroed:
  54. if size > 0 {
  55. panic("mem: panic allocator, .Alloc_Non_Zeroed called", loc=loc)
  56. }
  57. case .Resize:
  58. if size > 0 {
  59. panic("mem: panic allocator, .Resize called", loc=loc)
  60. }
  61. case .Resize_Non_Zeroed:
  62. if size > 0 {
  63. panic("mem: panic allocator, .Resize_Non_Zeroed called", loc=loc)
  64. }
  65. case .Free:
  66. if old_memory != nil {
  67. panic("mem: panic allocator, .Free called", loc=loc)
  68. }
  69. case .Free_All:
  70. panic("mem: panic allocator, .Free_All called", loc=loc)
  71. case .Query_Features:
  72. set := (^Allocator_Mode_Set)(old_memory)
  73. if set != nil {
  74. set^ = {.Query_Features}
  75. }
  76. return nil, nil
  77. case .Query_Info:
  78. panic("mem: panic allocator, .Query_Info called", loc=loc)
  79. }
  80. return nil, nil
  81. }
  82. /*
  83. Arena allocator data.
  84. */
  85. Arena :: struct {
  86. data: []byte,
  87. offset: int,
  88. peak_used: int,
  89. temp_count: int,
  90. }
  91. /*
  92. Arena allocator.
  93. The arena allocator (also known as a linear allocator, bump allocator,
  94. region allocator) is an allocator that uses a single backing buffer for
  95. allocations.
  96. The buffer is being used contiguously, from start by end. Each subsequent
  97. allocation occupies the next adjacent region of memory in the buffer. Since
  98. arena allocator does not keep track of any metadata associated with the
  99. allocations and their locations, it is impossible to free individual
  100. allocations.
  101. The arena allocator can be used for temporary allocations in frame-based memory
  102. management. Games are one example of such applications. A global arena can be
  103. used for any temporary memory allocations, and at the end of each frame all
  104. temporary allocations are freed. Since no temporary object is going to live
  105. longer than a frame, no lifetimes are violated.
  106. */
  107. @(require_results)
  108. arena_allocator :: proc(arena: ^Arena) -> Allocator {
  109. return Allocator{
  110. procedure = arena_allocator_proc,
  111. data = arena,
  112. }
  113. }
  114. /*
  115. Initialize an arena.
  116. This procedure initializes the arena `a` with memory region `data` as it's
  117. backing buffer.
  118. */
  119. arena_init :: proc(a: ^Arena, data: []byte) {
  120. a.data = data
  121. a.offset = 0
  122. a.peak_used = 0
  123. a.temp_count = 0
  124. }
  125. /*
  126. Allocate memory from an arena.
  127. This procedure allocates `size` bytes of memory aligned on a boundary specified
  128. by `alignment` from an arena `a`. The allocated memory is zero-initialized.
  129. This procedure returns a pointer to the newly allocated memory region.
  130. */
  131. @(require_results)
  132. arena_alloc :: proc(
  133. a: ^Arena,
  134. size: int,
  135. alignment := DEFAULT_ALIGNMENT,
  136. loc := #caller_location,
  137. ) -> (rawptr, Allocator_Error) {
  138. bytes, err := arena_alloc_bytes(a, size, alignment, loc)
  139. return raw_data(bytes), err
  140. }
  141. /*
  142. Allocate memory from an arena.
  143. This procedure allocates `size` bytes of memory aligned on a boundary specified
  144. by `alignment` from an arena `a`. The allocated memory is zero-initialized.
  145. This procedure returns a slice of the newly allocated memory region.
  146. */
  147. @(require_results)
  148. arena_alloc_bytes :: proc(
  149. a: ^Arena,
  150. size: int,
  151. alignment := DEFAULT_ALIGNMENT,
  152. loc := #caller_location,
  153. ) -> ([]byte, Allocator_Error) {
  154. bytes, err := arena_alloc_bytes_non_zeroed(a, size, alignment, loc)
  155. if bytes != nil {
  156. zero_slice(bytes)
  157. }
  158. return bytes, err
  159. }
  160. /*
  161. Allocate non-initialized memory from an arena.
  162. This procedure allocates `size` bytes of memory aligned on a boundary specified
  163. by `alignment` from an arena `a`. The allocated memory is not explicitly
  164. zero-initialized. This procedure returns a pointer to the newly allocated
  165. memory region.
  166. */
  167. @(require_results)
  168. arena_alloc_non_zeroed :: proc(
  169. a: ^Arena,
  170. size: int,
  171. alignment := DEFAULT_ALIGNMENT,
  172. loc := #caller_location,
  173. ) -> (rawptr, Allocator_Error) {
  174. bytes, err := arena_alloc_bytes_non_zeroed(a, size, alignment, loc)
  175. return raw_data(bytes), err
  176. }
  177. /*
  178. Allocate non-initialized memory from an arena.
  179. This procedure allocates `size` bytes of memory aligned on a boundary specified
  180. by `alignment` from an arena `a`. The allocated memory is not explicitly
  181. zero-initialized. This procedure returns a slice of the newly allocated
  182. memory region.
  183. */
  184. @(require_results)
  185. arena_alloc_bytes_non_zeroed :: proc(
  186. a: ^Arena,
  187. size: int,
  188. alignment := DEFAULT_ALIGNMENT,
  189. loc := #caller_location
  190. ) -> ([]byte, Allocator_Error) {
  191. if a.data == nil {
  192. panic("Arena is not initialized", loc)
  193. }
  194. #no_bounds_check end := &a.data[a.offset]
  195. ptr := align_forward(end, uintptr(alignment))
  196. total_size := size + ptr_sub((^byte)(ptr), (^byte)(end))
  197. if a.offset + total_size > len(a.data) {
  198. return nil, .Out_Of_Memory
  199. }
  200. a.offset += total_size
  201. a.peak_used = max(a.peak_used, a.offset)
  202. return byte_slice(ptr, size), nil
  203. }
  204. /*
  205. Free all memory to an arena.
  206. */
  207. arena_free_all :: proc(a: ^Arena) {
  208. a.offset = 0
  209. }
  210. arena_allocator_proc :: proc(
  211. allocator_data: rawptr,
  212. mode: Allocator_Mode,
  213. size: int,
  214. alignment: int,
  215. old_memory: rawptr,
  216. old_size: int,
  217. loc := #caller_location,
  218. ) -> ([]byte, Allocator_Error) {
  219. arena := cast(^Arena)allocator_data
  220. switch mode {
  221. case .Alloc:
  222. return arena_alloc_bytes(arena, size, alignment, loc)
  223. case .Alloc_Non_Zeroed:
  224. return arena_alloc_bytes_non_zeroed(arena, size, alignment, loc)
  225. case .Free:
  226. return nil, .Mode_Not_Implemented
  227. case .Free_All:
  228. arena_free_all(arena)
  229. case .Resize:
  230. return default_resize_bytes_align(byte_slice(old_memory, old_size), size, alignment, arena_allocator(arena), loc)
  231. case .Resize_Non_Zeroed:
  232. return default_resize_bytes_align_non_zeroed(byte_slice(old_memory, old_size), size, alignment, arena_allocator(arena), loc)
  233. case .Query_Features:
  234. set := (^Allocator_Mode_Set)(old_memory)
  235. if set != nil {
  236. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
  237. }
  238. return nil, nil
  239. case .Query_Info:
  240. return nil, .Mode_Not_Implemented
  241. }
  242. return nil, nil
  243. }
  244. /*
  245. Temporary memory region of arena.
  246. Temporary memory regions of arena act as "savepoints" for arena. When one is
  247. created, the subsequent allocations are done inside the temporary memory
  248. region. When `end_arena_temp_memory` is called, the arena is rolled back, and
  249. all of the memory that was allocated from the arena will be freed.
  250. Multiple temporary memory regions can exist at the same time for an arena.
  251. */
  252. Arena_Temp_Memory :: struct {
  253. arena: ^Arena,
  254. prev_offset: int,
  255. }
  256. /*
  257. Start a temporary memory region.
  258. This procedure creates a temporary memory region. After a temporary memory
  259. region is created, all allocations are said to be *inside* the temporary memory
  260. region, until `end_arena_temp_memory` is called.
  261. */
  262. @(require_results)
  263. begin_arena_temp_memory :: proc(a: ^Arena) -> Arena_Temp_Memory {
  264. tmp: Arena_Temp_Memory
  265. tmp.arena = a
  266. tmp.prev_offset = a.offset
  267. a.temp_count += 1
  268. return tmp
  269. }
  270. /*
  271. End a temporary memory region.
  272. This procedure ends the temporary memory region for an arena. All of the
  273. allocations *inside* the temporary memory region will be freed to the arena.
  274. */
  275. end_arena_temp_memory :: proc(tmp: Arena_Temp_Memory) {
  276. assert(tmp.arena.offset >= tmp.prev_offset)
  277. assert(tmp.arena.temp_count > 0)
  278. tmp.arena.offset = tmp.prev_offset
  279. tmp.arena.temp_count -= 1
  280. }
  281. /* Preserved for compatibility */
  282. Scratch_Allocator :: Scratch
  283. scratch_allocator_init :: scratch_init
  284. scratch_allocator_destroy :: scratch_destroy
  285. /*
  286. Scratch allocator data.
  287. */
  288. Scratch :: struct {
  289. data: []byte,
  290. curr_offset: int,
  291. prev_allocation: rawptr,
  292. backup_allocator: Allocator,
  293. leaked_allocations: [dynamic][]byte,
  294. }
  295. /*
  296. Scratch allocator.
  297. The scratch allocator works in a similar way to the `Arena` allocator. The
  298. scratch allocator has a backing buffer, that is being allocated in
  299. contiguous regions, from start to end.
  300. Each subsequent allocation will be the next adjacent region of memory in the
  301. backing buffer. If the allocation doesn't fit into the remaining space of the
  302. backing buffer, this allocation is put at the start of the buffer, and all
  303. previous allocations will become invalidated. If the allocation doesn't fit
  304. into the backing buffer as a whole, it will be allocated using a backing
  305. allocator, and pointer to the allocated memory region will be put into the
  306. `leaked_allocations` array.
  307. The `leaked_allocations` array is managed by the `context` allocator.
  308. */
  309. @(require_results)
  310. scratch_allocator :: proc(allocator: ^Scratch) -> Allocator {
  311. return Allocator{
  312. procedure = scratch_allocator_proc,
  313. data = allocator,
  314. }
  315. }
  316. /*
  317. Initialize scratch allocator.
  318. */
  319. scratch_init :: proc(s: ^Scratch, size: int, backup_allocator := context.allocator) -> Allocator_Error {
  320. s.data = make_aligned([]byte, size, 2*align_of(rawptr), backup_allocator) or_return
  321. s.curr_offset = 0
  322. s.prev_allocation = nil
  323. s.backup_allocator = backup_allocator
  324. s.leaked_allocations.allocator = backup_allocator
  325. return nil
  326. }
  327. /*
  328. Free all data associated with a scratch allocator.
  329. */
  330. scratch_destroy :: proc(s: ^Scratch) {
  331. if s == nil {
  332. return
  333. }
  334. for ptr in s.leaked_allocations {
  335. free_bytes(ptr, s.backup_allocator)
  336. }
  337. delete(s.leaked_allocations)
  338. delete(s.data, s.backup_allocator)
  339. s^ = {}
  340. }
  341. /*
  342. Allocate memory from scratch allocator.
  343. This procedure allocates `size` bytes of memory aligned on a boundary specified
  344. by `alignment`. The allocated memory region is zero-initialized. This procedure
  345. returns a pointer to the allocated memory region.
  346. */
  347. @(require_results)
  348. scratch_alloc :: proc(
  349. s: ^Scratch,
  350. size: int,
  351. alignment := DEFAULT_ALIGNMENT,
  352. loc := #caller_location,
  353. ) -> (rawptr, Allocator_Error) {
  354. bytes, err := scratch_alloc_bytes(s, size, alignment, loc)
  355. return raw_data(bytes), err
  356. }
  357. /*
  358. Allocate memory from scratch allocator.
  359. This procedure allocates `size` bytes of memory aligned on a boundary specified
  360. by `alignment`. The allocated memory region is zero-initialized. This procedure
  361. returns a slice of the allocated memory region.
  362. */
  363. @(require_results)
  364. scratch_alloc_bytes :: proc(
  365. s: ^Scratch,
  366. size: int,
  367. alignment := DEFAULT_ALIGNMENT,
  368. loc := #caller_location,
  369. ) -> ([]byte, Allocator_Error) {
  370. bytes, err := scratch_alloc_bytes_non_zeroed(s, size, alignment, loc)
  371. if bytes != nil {
  372. zero_slice(bytes)
  373. }
  374. return bytes, err
  375. }
  376. /*
  377. Allocate non-initialized memory from scratch allocator.
  378. This procedure allocates `size` bytes of memory aligned on a boundary specified
  379. by `alignment`. The allocated memory region is not explicitly zero-initialized.
  380. This procedure returns a pointer to the allocated memory region.
  381. */
  382. @(require_results)
  383. scratch_alloc_non_zeroed :: proc(
  384. s: ^Scratch,
  385. size: int,
  386. alignment := DEFAULT_ALIGNMENT,
  387. loc := #caller_location,
  388. ) -> (rawptr, Allocator_Error) {
  389. bytes, err := scratch_alloc_bytes_non_zeroed(s, size, alignment, loc)
  390. return raw_data(bytes), err
  391. }
  392. /*
  393. Allocate non-initialized memory from scratch allocator.
  394. This procedure allocates `size` bytes of memory aligned on a boundary specified
  395. by `alignment`. The allocated memory region is not explicitly zero-initialized.
  396. This procedure returns a slice of the allocated memory region.
  397. */
  398. @(require_results)
  399. scratch_alloc_bytes_non_zeroed :: proc(
  400. s: ^Scratch,
  401. size: int,
  402. alignment := DEFAULT_ALIGNMENT,
  403. loc := #caller_location,
  404. ) -> ([]byte, Allocator_Error) {
  405. if s.data == nil {
  406. DEFAULT_BACKING_SIZE :: 4 * Megabyte
  407. if !(context.allocator.procedure != scratch_allocator_proc && context.allocator.data != s) {
  408. panic("cyclic initialization of the scratch allocator with itself", loc)
  409. }
  410. scratch_init(s, DEFAULT_BACKING_SIZE)
  411. }
  412. size := size
  413. size = align_forward_int(size, alignment)
  414. if size <= len(s.data) {
  415. offset := uintptr(0)
  416. if s.curr_offset+size <= len(s.data) {
  417. offset = uintptr(s.curr_offset)
  418. } else {
  419. offset = 0
  420. }
  421. start := uintptr(raw_data(s.data))
  422. ptr := align_forward_uintptr(offset+start, uintptr(alignment))
  423. s.prev_allocation = rawptr(ptr)
  424. s.curr_offset = int(offset) + size
  425. return byte_slice(rawptr(ptr), size), nil
  426. } else {
  427. a := s.backup_allocator
  428. if a.procedure == nil {
  429. a = context.allocator
  430. s.backup_allocator = a
  431. }
  432. ptr, err := alloc_bytes_non_zeroed(size, alignment, a, loc)
  433. if err != nil {
  434. return ptr, err
  435. }
  436. if s.leaked_allocations == nil {
  437. s.leaked_allocations, err = make([dynamic][]byte, a)
  438. }
  439. append(&s.leaked_allocations, ptr)
  440. if logger := context.logger; logger.lowest_level <= .Warning {
  441. if logger.procedure != nil {
  442. logger.procedure(logger.data, .Warning, "mem.Scratch resorted to backup_allocator" , logger.options, loc)
  443. }
  444. }
  445. return ptr, err
  446. }
  447. }
  448. /*
  449. Free memory to the scratch allocator.
  450. This procedure frees the memory region allocated at pointer `ptr`.
  451. If `ptr` is not the latest allocation and is not a leaked allocation, this
  452. operation is a no-op.
  453. */
  454. scratch_free :: proc(s: ^Scratch, ptr: rawptr, loc := #caller_location) -> Allocator_Error {
  455. if s.data == nil {
  456. panic("Free on an uninitialized scratch allocator", loc)
  457. }
  458. if ptr == nil {
  459. return nil
  460. }
  461. start := uintptr(raw_data(s.data))
  462. end := start + uintptr(len(s.data))
  463. old_ptr := uintptr(ptr)
  464. if s.prev_allocation == ptr {
  465. s.curr_offset = int(uintptr(s.prev_allocation) - start)
  466. s.prev_allocation = nil
  467. return nil
  468. }
  469. if start <= old_ptr && old_ptr < end {
  470. // NOTE(bill): Cannot free this pointer but it is valid
  471. return nil
  472. }
  473. if len(s.leaked_allocations) != 0 {
  474. for data, i in s.leaked_allocations {
  475. ptr := raw_data(data)
  476. if ptr == ptr {
  477. free_bytes(data, s.backup_allocator, loc)
  478. ordered_remove(&s.leaked_allocations, i, loc)
  479. return nil
  480. }
  481. }
  482. }
  483. return .Invalid_Pointer
  484. }
  485. /*
  486. Free all memory to the scratch allocator.
  487. */
  488. scratch_free_all :: proc(s: ^Scratch, loc := #caller_location) {
  489. s.curr_offset = 0
  490. s.prev_allocation = nil
  491. for ptr in s.leaked_allocations {
  492. free_bytes(ptr, s.backup_allocator, loc)
  493. }
  494. clear(&s.leaked_allocations)
  495. }
  496. /*
  497. Resize an allocation.
  498. This procedure resizes a memory region, defined by its location, `old_memory`,
  499. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  500. newly allocated memory, if any is zero-initialized.
  501. If `old_memory` is `nil`, this procedure acts just like `scratch_alloc()`,
  502. allocating a memory region `size` bytes in size, aligned on a boundary specified
  503. by `alignment`.
  504. If `size` is 0, this procedure acts just like `scratch_free()`, freeing the
  505. memory region located at an address specified by `old_memory`.
  506. This procedure returns the pointer to the resized memory region.
  507. */
  508. @(require_results)
  509. scratch_resize :: proc(
  510. s: ^Scratch,
  511. old_memory: rawptr,
  512. old_size: int,
  513. size: int,
  514. alignment := DEFAULT_ALIGNMENT,
  515. loc := #caller_location
  516. ) -> (rawptr, Allocator_Error) {
  517. bytes, err := scratch_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment, loc)
  518. return raw_data(bytes), err
  519. }
  520. /*
  521. Resize an allocation.
  522. This procedure resizes a memory region, specified by `old_data`, to have a size
  523. `size` and alignment `alignment`. The newly allocated memory, if any is
  524. zero-initialized.
  525. If `old_memory` is `nil`, this procedure acts just like `scratch_alloc()`,
  526. allocating a memory region `size` bytes in size, aligned on a boundary specified
  527. by `alignment`.
  528. If `size` is 0, this procedure acts just like `scratch_free()`, freeing the
  529. memory region located at an address specified by `old_memory`.
  530. This procedure returns the slice of the resized memory region.
  531. */
  532. @(require_results)
  533. scratch_resize_bytes :: proc(
  534. s: ^Scratch,
  535. old_data: []byte,
  536. size: int,
  537. alignment := DEFAULT_ALIGNMENT,
  538. loc := #caller_location
  539. ) -> ([]byte, Allocator_Error) {
  540. bytes, err := scratch_resize_bytes_non_zeroed(s, old_data, size, alignment, loc)
  541. if bytes != nil && size > len(old_data) {
  542. zero_slice(bytes[size:])
  543. }
  544. return bytes, err
  545. }
  546. /*
  547. Resize an allocation without zero-initialization.
  548. This procedure resizes a memory region, defined by its location, `old_memory`,
  549. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  550. newly allocated memory, if any is not explicitly zero-initialized.
  551. If `old_memory` is `nil`, this procedure acts just like `scratch_alloc()`,
  552. allocating a memory region `size` bytes in size, aligned on a boundary specified
  553. by `alignment`.
  554. If `size` is 0, this procedure acts just like `scratch_free()`, freeing the
  555. memory region located at an address specified by `old_memory`.
  556. This procedure returns the pointer to the resized memory region.
  557. */
  558. @(require_results)
  559. scratch_resize_non_zeroed :: proc(
  560. s: ^Scratch,
  561. old_memory: rawptr,
  562. old_size: int,
  563. size: int,
  564. alignment := DEFAULT_ALIGNMENT,
  565. loc := #caller_location
  566. ) -> (rawptr, Allocator_Error) {
  567. bytes, err := scratch_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment, loc)
  568. return raw_data(bytes), err
  569. }
  570. /*
  571. Resize an allocation.
  572. This procedure resizes a memory region, specified by `old_data`, to have a size
  573. `size` and alignment `alignment`. The newly allocated memory, if any is not
  574. explicitly zero-initialized.
  575. If `old_memory` is `nil`, this procedure acts just like `scratch_alloc()`,
  576. allocating a memory region `size` bytes in size, aligned on a boundary specified
  577. by `alignment`.
  578. If `size` is 0, this procedure acts just like `scratch_free()`, freeing the
  579. memory region located at an address specified by `old_memory`.
  580. This procedure returns the slice of the resized memory region.
  581. */
  582. @(require_results)
  583. scratch_resize_bytes_non_zeroed :: proc(
  584. s: ^Scratch,
  585. old_data: []byte,
  586. size: int,
  587. alignment := DEFAULT_ALIGNMENT,
  588. loc := #caller_location
  589. ) -> ([]byte, Allocator_Error) {
  590. old_memory := raw_data(old_data)
  591. old_size := len(old_data)
  592. if s.data == nil {
  593. DEFAULT_BACKING_SIZE :: 4 * Megabyte
  594. if !(context.allocator.procedure != scratch_allocator_proc && context.allocator.data != s) {
  595. panic("cyclic initialization of the scratch allocator with itself", loc)
  596. }
  597. scratch_init(s, DEFAULT_BACKING_SIZE)
  598. }
  599. begin := uintptr(raw_data(s.data))
  600. end := begin + uintptr(len(s.data))
  601. old_ptr := uintptr(old_memory)
  602. if begin <= old_ptr && old_ptr < end && old_ptr+uintptr(size) < end {
  603. s.curr_offset = int(old_ptr-begin)+size
  604. return byte_slice(old_memory, size), nil
  605. }
  606. data, err := scratch_alloc_bytes_non_zeroed(s, size, alignment, loc)
  607. if err != nil {
  608. return data, err
  609. }
  610. runtime.copy(data, byte_slice(old_memory, old_size))
  611. err = scratch_free(s, old_memory, loc)
  612. return data, err
  613. }
  614. scratch_allocator_proc :: proc(
  615. allocator_data: rawptr,
  616. mode: Allocator_Mode,
  617. size, alignment: int,
  618. old_memory: rawptr,
  619. old_size: int,
  620. loc := #caller_location,
  621. ) -> ([]byte, Allocator_Error) {
  622. s := (^Scratch)(allocator_data)
  623. size := size
  624. switch mode {
  625. case .Alloc:
  626. return scratch_alloc_bytes(s, size, alignment, loc)
  627. case .Alloc_Non_Zeroed:
  628. return scratch_alloc_bytes_non_zeroed(s, size, alignment, loc)
  629. case .Free:
  630. return nil, scratch_free(s, old_memory, loc)
  631. case .Free_All:
  632. scratch_free_all(s, loc)
  633. case .Resize:
  634. return scratch_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment, loc)
  635. case .Resize_Non_Zeroed:
  636. return scratch_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment, loc)
  637. case .Query_Features:
  638. set := (^Allocator_Mode_Set)(old_memory)
  639. if set != nil {
  640. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
  641. }
  642. return nil, nil
  643. case .Query_Info:
  644. return nil, .Mode_Not_Implemented
  645. }
  646. return nil, nil
  647. }
  648. /*
  649. Stack allocator data.
  650. */
  651. Stack :: struct {
  652. data: []byte,
  653. prev_offset: int,
  654. curr_offset: int,
  655. peak_used: int,
  656. }
  657. /*
  658. Header of a stack allocation.
  659. */
  660. Stack_Allocation_Header :: struct {
  661. prev_offset: int,
  662. padding: int,
  663. }
  664. /*
  665. Stack allocator.
  666. The stack allocator is an allocator that allocates data in the backing buffer
  667. linearly, from start to end. Each subsequent allocation will get the next
  668. adjacent memory region.
  669. Unlike arena allocator, the stack allocator saves allocation metadata and has
  670. a strict freeing order. Only the last allocated element can be freed. After the
  671. last allocated element is freed, the next previous allocated element becomes
  672. available for freeing.
  673. The metadata is stored in the allocation headers, that are located before the
  674. start of each allocated memory region. Each header points to the start of the
  675. previous allocation header.
  676. */
  677. @(require_results)
  678. stack_allocator :: proc(stack: ^Stack) -> Allocator {
  679. return Allocator{
  680. procedure = stack_allocator_proc,
  681. data = stack,
  682. }
  683. }
  684. /*
  685. Initialize the stack allocator.
  686. This procedure initializes the stack allocator with a backing buffer specified
  687. by `data` parameter.
  688. */
  689. stack_init :: proc(s: ^Stack, data: []byte) {
  690. s.data = data
  691. s.prev_offset = 0
  692. s.curr_offset = 0
  693. s.peak_used = 0
  694. }
  695. /*
  696. Allocate memory from stack.
  697. This procedure allocates `size` bytes of memory, aligned to the boundary
  698. specified by `alignment`. The allocated memory is zero-initialized. This
  699. procedure returns the pointer to the allocated memory.
  700. */
  701. @(require_results)
  702. stack_alloc :: proc(
  703. s: ^Stack,
  704. size: int,
  705. alignment := DEFAULT_ALIGNMENT,
  706. loc := #caller_location
  707. ) -> (rawptr, Allocator_Error) {
  708. bytes, err := stack_alloc_bytes(s, size, alignment, loc)
  709. return raw_data(bytes), err
  710. }
  711. /*
  712. Allocate memory from stack.
  713. This procedure allocates `size` bytes of memory, aligned to the boundary
  714. specified by `alignment`. The allocated memory is zero-initialized. This
  715. procedure returns the slice of the allocated memory.
  716. */
  717. @(require_results)
  718. stack_alloc_bytes :: proc(
  719. s: ^Stack,
  720. size: int,
  721. alignment := DEFAULT_ALIGNMENT,
  722. loc := #caller_location
  723. ) -> ([]byte, Allocator_Error) {
  724. bytes, err := stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  725. if bytes != nil {
  726. zero_slice(bytes)
  727. }
  728. return bytes, err
  729. }
  730. /*
  731. Allocate memory from stack.
  732. This procedure allocates `size` bytes of memory, aligned to the boundary
  733. specified by `alignment`. The allocated memory is not explicitly
  734. zero-initialized. This procedure returns the pointer to the allocated memory.
  735. */
  736. @(require_results)
  737. stack_alloc_non_zeroed :: proc(
  738. s: ^Stack,
  739. size: int,
  740. alignment := DEFAULT_ALIGNMENT,
  741. loc := #caller_location
  742. ) -> (rawptr, Allocator_Error) {
  743. bytes, err := stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  744. return raw_data(bytes), err
  745. }
  746. /*
  747. Allocate memory from stack.
  748. This procedure allocates `size` bytes of memory, aligned to the boundary
  749. specified by `alignment`. The allocated memory is not explicitly
  750. zero-initialized. This procedure returns the slice of the allocated memory.
  751. */
  752. @(require_results)
  753. stack_alloc_bytes_non_zeroed :: proc(
  754. s: ^Stack,
  755. size: int,
  756. alignment := DEFAULT_ALIGNMENT,
  757. loc := #caller_location
  758. ) -> ([]byte, Allocator_Error) {
  759. if s.data == nil {
  760. panic("Stack allocation on an uninitialized stack allocator", loc)
  761. }
  762. curr_addr := uintptr(raw_data(s.data)) + uintptr(s.curr_offset)
  763. padding := calc_padding_with_header(
  764. curr_addr,
  765. uintptr(alignment),
  766. size_of(Stack_Allocation_Header),
  767. )
  768. if s.curr_offset + padding + size > len(s.data) {
  769. return nil, .Out_Of_Memory
  770. }
  771. s.prev_offset = s.curr_offset
  772. s.curr_offset += padding
  773. next_addr := curr_addr + uintptr(padding)
  774. header := (^Stack_Allocation_Header)(next_addr - size_of(Stack_Allocation_Header))
  775. header.padding = padding
  776. header.prev_offset = s.prev_offset
  777. s.curr_offset += size
  778. s.peak_used = max(s.peak_used, s.curr_offset)
  779. return byte_slice(rawptr(next_addr), size), nil
  780. }
  781. /*
  782. Free memory to the stack.
  783. This procedure frees the memory region starting at `old_memory` to the stack.
  784. If the freeing does is an out of order freeing, the `.Invalid_Pointer` error
  785. is returned.
  786. */
  787. stack_free :: proc(
  788. s: ^Stack,
  789. old_memory: rawptr,
  790. loc := #caller_location,
  791. ) -> (Allocator_Error) {
  792. if s.data == nil {
  793. panic("Stack free on an uninitialized stack allocator", loc)
  794. }
  795. if old_memory == nil {
  796. return nil
  797. }
  798. start := uintptr(raw_data(s.data))
  799. end := start + uintptr(len(s.data))
  800. curr_addr := uintptr(old_memory)
  801. if !(start <= curr_addr && curr_addr < end) {
  802. panic("Out of bounds memory address passed to stack allocator (free)", loc)
  803. }
  804. if curr_addr >= start+uintptr(s.curr_offset) {
  805. // NOTE(bill): Allow double frees
  806. return nil
  807. }
  808. header := (^Stack_Allocation_Header)(curr_addr - size_of(Stack_Allocation_Header))
  809. old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)))
  810. if old_offset != header.prev_offset {
  811. // panic("Out of order stack allocator free");
  812. return .Invalid_Pointer
  813. }
  814. s.curr_offset = old_offset
  815. s.prev_offset = header.prev_offset
  816. return nil
  817. }
  818. /*
  819. Free all allocations to the stack.
  820. */
  821. stack_free_all :: proc(s: ^Stack, loc := #caller_location) {
  822. s.prev_offset = 0
  823. s.curr_offset = 0
  824. }
  825. /*
  826. Resize an allocation.
  827. This procedure resizes a memory region, defined by its location, `old_memory`,
  828. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  829. newly allocated memory, if any is zero-initialized.
  830. If `old_memory` is `nil`, this procedure acts just like `stack_alloc()`,
  831. allocating a memory region `size` bytes in size, aligned on a boundary specified
  832. by `alignment`.
  833. If `size` is 0, this procedure acts just like `stack_free()`, freeing the
  834. memory region located at an address specified by `old_memory`.
  835. This procedure returns the pointer to the resized memory region.
  836. */
  837. @(require_results)
  838. stack_resize :: proc(
  839. s: ^Stack,
  840. old_memory: rawptr,
  841. old_size: int,
  842. size: int,
  843. alignment := DEFAULT_ALIGNMENT,
  844. loc := #caller_location,
  845. ) -> (rawptr, Allocator_Error) {
  846. bytes, err := stack_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment)
  847. return raw_data(bytes), err
  848. }
  849. /*
  850. Resize an allocation.
  851. This procedure resizes a memory region, specified by the `old_data` parameter
  852. to have a size `size` and alignment `alignment`. The newly allocated memory,
  853. if any is zero-initialized.
  854. If `old_memory` is `nil`, this procedure acts just like `stack_alloc()`,
  855. allocating a memory region `size` bytes in size, aligned on a boundary specified
  856. by `alignment`.
  857. If `size` is 0, this procedure acts just like `stack_free()`, freeing the
  858. memory region located at an address specified by `old_memory`.
  859. This procedure returns the slice of the resized memory region.
  860. */
  861. @(require_results)
  862. stack_resize_bytes :: proc(
  863. s: ^Stack,
  864. old_data: []byte,
  865. size: int,
  866. alignment := DEFAULT_ALIGNMENT,
  867. loc := #caller_location,
  868. ) -> ([]byte, Allocator_Error) {
  869. bytes, err := stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  870. if bytes != nil {
  871. if old_data == nil {
  872. zero_slice(bytes)
  873. } else if size > len(old_data) {
  874. zero_slice(bytes[len(old_data):])
  875. }
  876. }
  877. return bytes, err
  878. }
  879. /*
  880. Resize an allocation without zero-initialization.
  881. This procedure resizes a memory region, defined by its location, `old_memory`,
  882. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  883. newly allocated memory, if any is not explicitly zero-initialized.
  884. If `old_memory` is `nil`, this procedure acts just like `stack_alloc()`,
  885. allocating a memory region `size` bytes in size, aligned on a boundary specified
  886. by `alignment`.
  887. If `size` is 0, this procedure acts just like `stack_free()`, freeing the
  888. memory region located at an address specified by `old_memory`.
  889. This procedure returns the pointer to the resized memory region.
  890. */
  891. @(require_results)
  892. stack_resize_non_zeroed :: proc(
  893. s: ^Stack,
  894. old_memory: rawptr,
  895. old_size: int,
  896. size: int,
  897. alignment := DEFAULT_ALIGNMENT,
  898. loc := #caller_location,
  899. ) -> (rawptr, Allocator_Error) {
  900. bytes, err := stack_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment)
  901. return raw_data(bytes), err
  902. }
  903. /*
  904. Resize an allocation without zero-initialization.
  905. This procedure resizes a memory region, specified by the `old_data` parameter
  906. to have a size `size` and alignment `alignment`. The newly allocated memory,
  907. if any is not explicitly zero-initialized.
  908. If `old_memory` is `nil`, this procedure acts just like `stack_alloc()`,
  909. allocating a memory region `size` bytes in size, aligned on a boundary specified
  910. by `alignment`.
  911. If `size` is 0, this procedure acts just like `stack_free()`, freeing the
  912. memory region located at an address specified by `old_memory`.
  913. This procedure returns the slice of the resized memory region.
  914. */
  915. @(require_results)
  916. stack_resize_bytes_non_zeroed :: proc(
  917. s: ^Stack,
  918. old_data: []byte,
  919. size: int,
  920. alignment := DEFAULT_ALIGNMENT,
  921. loc := #caller_location,
  922. ) -> ([]byte, Allocator_Error) {
  923. old_memory := raw_data(old_data)
  924. old_size := len(old_data)
  925. if s.data == nil {
  926. panic("Stack free all on an uninitialized stack allocator", loc)
  927. }
  928. if old_memory == nil {
  929. return stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  930. }
  931. if size == 0 {
  932. return nil, nil
  933. }
  934. start := uintptr(raw_data(s.data))
  935. end := start + uintptr(len(s.data))
  936. curr_addr := uintptr(old_memory)
  937. if !(start <= curr_addr && curr_addr < end) {
  938. panic("Out of bounds memory address passed to stack allocator (resize)")
  939. }
  940. if curr_addr >= start+uintptr(s.curr_offset) {
  941. // NOTE(bill): Allow double frees
  942. return nil, nil
  943. }
  944. if old_size == size {
  945. return byte_slice(old_memory, size), nil
  946. }
  947. header := (^Stack_Allocation_Header)(curr_addr - size_of(Stack_Allocation_Header))
  948. old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)))
  949. if old_offset != header.prev_offset {
  950. data, err := stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  951. if err == nil {
  952. runtime.copy(data, byte_slice(old_memory, old_size))
  953. }
  954. return data, err
  955. }
  956. old_memory_size := uintptr(s.curr_offset) - (curr_addr - start)
  957. assert(old_memory_size == uintptr(old_size))
  958. diff := size - old_size
  959. s.curr_offset += diff // works for smaller sizes too
  960. if diff > 0 {
  961. zero(rawptr(curr_addr + uintptr(diff)), diff)
  962. }
  963. return byte_slice(old_memory, size), nil
  964. }
  965. stack_allocator_proc :: proc(
  966. allocator_data: rawptr,
  967. mode: Allocator_Mode,
  968. size: int,
  969. alignment: int,
  970. old_memory: rawptr,
  971. old_size: int,
  972. loc := #caller_location,
  973. ) -> ([]byte, Allocator_Error) {
  974. s := cast(^Stack)allocator_data
  975. if s.data == nil {
  976. return nil, .Invalid_Argument
  977. }
  978. switch mode {
  979. case .Alloc:
  980. return stack_alloc_bytes(s, size, alignment, loc)
  981. case .Alloc_Non_Zeroed:
  982. return stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  983. case .Free:
  984. return nil, stack_free(s, old_memory, loc)
  985. case .Free_All:
  986. stack_free_all(s, loc)
  987. case .Resize:
  988. return stack_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment, loc)
  989. case .Resize_Non_Zeroed:
  990. return stack_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment, loc)
  991. case .Query_Features:
  992. set := (^Allocator_Mode_Set)(old_memory)
  993. if set != nil {
  994. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
  995. }
  996. return nil, nil
  997. case .Query_Info:
  998. return nil, .Mode_Not_Implemented
  999. }
  1000. return nil, nil
  1001. }
  1002. /*
  1003. Allocation header of the small stack allocator.
  1004. */
  1005. Small_Stack_Allocation_Header :: struct {
  1006. padding: u8,
  1007. }
  1008. /*
  1009. Small stack allocator data.
  1010. */
  1011. Small_Stack :: struct {
  1012. data: []byte,
  1013. offset: int,
  1014. peak_used: int,
  1015. }
  1016. /*
  1017. Initialize small stack.
  1018. This procedure initializes the small stack allocator with `data` as its backing
  1019. buffer.
  1020. */
  1021. small_stack_init :: proc(s: ^Small_Stack, data: []byte) {
  1022. s.data = data
  1023. s.offset = 0
  1024. s.peak_used = 0
  1025. }
  1026. /*
  1027. Small stack allocator.
  1028. The small stack allocator is just like a stack allocator, with the only
  1029. difference being an extremely small header size. Unlike the stack allocator,
  1030. small stack allows out-of order freeing of memory.
  1031. The memory is allocated in the backing buffer linearly, from start to end.
  1032. Each subsequent allocation will get the next adjacent memory region.
  1033. The metadata is stored in the allocation headers, that are located before the
  1034. start of each allocated memory region. Each header contains the amount of
  1035. padding bytes between that header and end of the previous allocation.
  1036. */
  1037. @(require_results)
  1038. small_stack_allocator :: proc(stack: ^Small_Stack) -> Allocator {
  1039. return Allocator{
  1040. procedure = small_stack_allocator_proc,
  1041. data = stack,
  1042. }
  1043. }
  1044. /*
  1045. Allocate memory from small stack.
  1046. This procedure allocates `size` bytes of memory aligned to a boundary specified
  1047. by `alignment`. The allocated memory is zero-initialized. This procedure
  1048. returns a pointer to the allocated memory region.
  1049. */
  1050. @(require_results)
  1051. small_stack_alloc :: proc(
  1052. s: ^Small_Stack,
  1053. size: int,
  1054. alignment := DEFAULT_ALIGNMENT,
  1055. loc := #caller_location,
  1056. ) -> (rawptr, Allocator_Error) {
  1057. bytes, err := small_stack_alloc_bytes(s, size, alignment, loc)
  1058. return raw_data(bytes), err
  1059. }
  1060. /*
  1061. Allocate memory from small stack.
  1062. This procedure allocates `size` bytes of memory aligned to a boundary specified
  1063. by `alignment`. The allocated memory is zero-initialized. This procedure
  1064. returns a slice of the allocated memory region.
  1065. */
  1066. @(require_results)
  1067. small_stack_alloc_bytes :: proc(
  1068. s: ^Small_Stack,
  1069. size: int,
  1070. alignment := DEFAULT_ALIGNMENT,
  1071. loc := #caller_location,
  1072. ) -> ([]byte, Allocator_Error) {
  1073. bytes, err := small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  1074. if bytes != nil {
  1075. zero_slice(bytes)
  1076. }
  1077. return bytes, err
  1078. }
  1079. /*
  1080. Allocate memory from small stack.
  1081. This procedure allocates `size` bytes of memory aligned to a boundary specified
  1082. by `alignment`. The allocated memory is not explicitly zero-initialized. This
  1083. procedure returns a pointer to the allocated memory region.
  1084. */
  1085. @(require_results)
  1086. small_stack_alloc_non_zeroed :: proc(
  1087. s: ^Small_Stack,
  1088. size: int,
  1089. alignment := DEFAULT_ALIGNMENT,
  1090. loc := #caller_location,
  1091. ) -> (rawptr, Allocator_Error) {
  1092. bytes, err := small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  1093. return raw_data(bytes), err
  1094. }
  1095. /*
  1096. Allocate memory from small stack.
  1097. This procedure allocates `size` bytes of memory aligned to a boundary specified
  1098. by `alignment`. The allocated memory is not explicitly zero-initialized. This
  1099. procedure returns a slice of the allocated memory region.
  1100. */
  1101. @(require_results)
  1102. small_stack_alloc_bytes_non_zeroed :: proc(
  1103. s: ^Small_Stack,
  1104. size: int,
  1105. alignment := DEFAULT_ALIGNMENT,
  1106. loc := #caller_location,
  1107. ) -> ([]byte, Allocator_Error) {
  1108. if s.data == nil {
  1109. panic("Small stack is not initialized", loc)
  1110. }
  1111. alignment := alignment
  1112. alignment = clamp(alignment, 1, 8*size_of(Stack_Allocation_Header{}.padding)/2)
  1113. curr_addr := uintptr(raw_data(s.data)) + uintptr(s.offset)
  1114. padding := calc_padding_with_header(curr_addr, uintptr(alignment), size_of(Small_Stack_Allocation_Header))
  1115. if s.offset + padding + size > len(s.data) {
  1116. return nil, .Out_Of_Memory
  1117. }
  1118. s.offset += padding
  1119. next_addr := curr_addr + uintptr(padding)
  1120. header := (^Small_Stack_Allocation_Header)(next_addr - size_of(Small_Stack_Allocation_Header))
  1121. header.padding = auto_cast padding
  1122. s.offset += size
  1123. s.peak_used = max(s.peak_used, s.offset)
  1124. return byte_slice(rawptr(next_addr), size), nil
  1125. }
  1126. /*
  1127. Allocate memory from small stack.
  1128. This procedure allocates `size` bytes of memory aligned to a boundary specified
  1129. by `alignment`. The allocated memory is not explicitly zero-initialized. This
  1130. procedure returns a slice of the allocated memory region.
  1131. */
  1132. small_stack_free :: proc(
  1133. s: ^Small_Stack,
  1134. old_memory: rawptr,
  1135. loc := #caller_location,
  1136. ) -> Allocator_Error {
  1137. if s.data == nil {
  1138. panic("Small stack is not initialized", loc)
  1139. }
  1140. if old_memory == nil {
  1141. return nil
  1142. }
  1143. start := uintptr(raw_data(s.data))
  1144. end := start + uintptr(len(s.data))
  1145. curr_addr := uintptr(old_memory)
  1146. if !(start <= curr_addr && curr_addr < end) {
  1147. // panic("Out of bounds memory address passed to stack allocator (free)");
  1148. return .Invalid_Pointer
  1149. }
  1150. if curr_addr >= start+uintptr(s.offset) {
  1151. // NOTE(bill): Allow double frees
  1152. return nil
  1153. }
  1154. header := (^Small_Stack_Allocation_Header)(curr_addr - size_of(Small_Stack_Allocation_Header))
  1155. old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)))
  1156. s.offset = old_offset
  1157. return nil
  1158. }
  1159. /*
  1160. Free all memory to small stack.
  1161. */
  1162. small_stack_free_all :: proc(s: ^Small_Stack) {
  1163. s.offset = 0
  1164. }
  1165. /*
  1166. Resize an allocation.
  1167. This procedure resizes a memory region, defined by its location, `old_memory`,
  1168. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  1169. newly allocated memory, if any is zero-initialized.
  1170. If `old_memory` is `nil`, this procedure acts just like `small_stack_alloc()`,
  1171. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1172. by `alignment`.
  1173. If `size` is 0, this procedure acts just like `small_stack_free()`, freeing the
  1174. memory region located at an address specified by `old_memory`.
  1175. This procedure returns the pointer to the resized memory region.
  1176. */
  1177. @(require_results)
  1178. small_stack_resize :: proc(
  1179. s: ^Small_Stack,
  1180. old_memory: rawptr,
  1181. old_size: int,
  1182. size: int,
  1183. alignment := DEFAULT_ALIGNMENT,
  1184. loc := #caller_location,
  1185. ) -> (rawptr, Allocator_Error) {
  1186. bytes, err := small_stack_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment, loc)
  1187. return raw_data(bytes), err
  1188. }
  1189. /*
  1190. Resize an allocation.
  1191. This procedure resizes a memory region, specified by the `old_data` parameter
  1192. to have a size `size` and alignment `alignment`. The newly allocated memory,
  1193. if any is zero-initialized.
  1194. If `old_memory` is `nil`, this procedure acts just like `small_stack_alloc()`,
  1195. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1196. by `alignment`.
  1197. If `size` is 0, this procedure acts just like `small_stack_free()`, freeing the
  1198. memory region located at an address specified by `old_memory`.
  1199. This procedure returns the slice of the resized memory region.
  1200. */
  1201. @(require_results)
  1202. small_stack_resize_bytes :: proc(
  1203. s: ^Small_Stack,
  1204. old_data: []byte,
  1205. size: int,
  1206. alignment := DEFAULT_ALIGNMENT,
  1207. loc := #caller_location,
  1208. ) -> ([]byte, Allocator_Error) {
  1209. bytes, err := small_stack_resize_bytes_non_zeroed(s, old_data, size, alignment, loc)
  1210. if bytes != nil {
  1211. if old_data == nil {
  1212. zero_slice(bytes)
  1213. } else if size > len(old_data) {
  1214. zero_slice(bytes[len(old_data):])
  1215. }
  1216. }
  1217. return bytes, err
  1218. }
  1219. /*
  1220. Resize an allocation without zero-initialization.
  1221. This procedure resizes a memory region, defined by its location, `old_memory`,
  1222. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  1223. newly allocated memory, if any is not explicitly zero-initialized.
  1224. If `old_memory` is `nil`, this procedure acts just like `small_stack_alloc()`,
  1225. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1226. by `alignment`.
  1227. If `size` is 0, this procedure acts just like `small_stack_free()`, freeing the
  1228. memory region located at an address specified by `old_memory`.
  1229. This procedure returns the pointer to the resized memory region.
  1230. */
  1231. @(require_results)
  1232. small_stack_resize_non_zeroed :: proc(
  1233. s: ^Small_Stack,
  1234. old_memory: rawptr,
  1235. old_size: int,
  1236. size: int,
  1237. alignment := DEFAULT_ALIGNMENT,
  1238. loc := #caller_location,
  1239. ) -> (rawptr, Allocator_Error) {
  1240. bytes, err := small_stack_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment, loc)
  1241. return raw_data(bytes), err
  1242. }
  1243. /*
  1244. Resize an allocation without zero-initialization.
  1245. This procedure resizes a memory region, specified by the `old_data` parameter
  1246. to have a size `size` and alignment `alignment`. The newly allocated memory,
  1247. if any is not explicitly zero-initialized.
  1248. If `old_memory` is `nil`, this procedure acts just like `small_stack_alloc()`,
  1249. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1250. by `alignment`.
  1251. If `size` is 0, this procedure acts just like `small_stack_free()`, freeing the
  1252. memory region located at an address specified by `old_memory`.
  1253. This procedure returns the slice of the resized memory region.
  1254. */
  1255. @(require_results)
  1256. small_stack_resize_bytes_non_zeroed :: proc(
  1257. s: ^Small_Stack,
  1258. old_data: []byte,
  1259. size: int,
  1260. alignment := DEFAULT_ALIGNMENT,
  1261. loc := #caller_location,
  1262. ) -> ([]byte, Allocator_Error) {
  1263. if s.data == nil {
  1264. panic("Small stack is not initialized", loc)
  1265. }
  1266. old_memory := raw_data(old_data)
  1267. old_size := len(old_data)
  1268. alignment := alignment
  1269. alignment = clamp(alignment, 1, 8*size_of(Stack_Allocation_Header{}.padding)/2)
  1270. if old_memory == nil {
  1271. return small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  1272. }
  1273. if size == 0 {
  1274. return nil, nil
  1275. }
  1276. start := uintptr(raw_data(s.data))
  1277. end := start + uintptr(len(s.data))
  1278. curr_addr := uintptr(old_memory)
  1279. if !(start <= curr_addr && curr_addr < end) {
  1280. // panic("Out of bounds memory address passed to stack allocator (resize)");
  1281. return nil, .Invalid_Pointer
  1282. }
  1283. if curr_addr >= start+uintptr(s.offset) {
  1284. // NOTE(bill): Treat as a double free
  1285. return nil, nil
  1286. }
  1287. if old_size == size {
  1288. return byte_slice(old_memory, size), nil
  1289. }
  1290. data, err := small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  1291. if err == nil {
  1292. runtime.copy(data, byte_slice(old_memory, old_size))
  1293. }
  1294. return data, err
  1295. }
  1296. small_stack_allocator_proc :: proc(
  1297. allocator_data: rawptr,
  1298. mode: Allocator_Mode,
  1299. size, alignment: int,
  1300. old_memory: rawptr,
  1301. old_size: int,
  1302. loc := #caller_location,
  1303. ) -> ([]byte, Allocator_Error) {
  1304. s := cast(^Small_Stack)allocator_data
  1305. if s.data == nil {
  1306. return nil, .Invalid_Argument
  1307. }
  1308. switch mode {
  1309. case .Alloc:
  1310. return small_stack_alloc_bytes(s, size, alignment, loc)
  1311. case .Alloc_Non_Zeroed:
  1312. return small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  1313. case .Free:
  1314. return nil, small_stack_free(s, old_memory, loc)
  1315. case .Free_All:
  1316. small_stack_free_all(s)
  1317. case .Resize:
  1318. return small_stack_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment, loc)
  1319. case .Resize_Non_Zeroed:
  1320. return small_stack_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment, loc)
  1321. case .Query_Features:
  1322. set := (^Allocator_Mode_Set)(old_memory)
  1323. if set != nil {
  1324. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
  1325. }
  1326. return nil, nil
  1327. case .Query_Info:
  1328. return nil, .Mode_Not_Implemented
  1329. }
  1330. return nil, nil
  1331. }
  1332. /* Preserved for compatibility */
  1333. Dynamic_Pool :: Dynamic_Arena
  1334. DYNAMIC_POOL_BLOCK_SIZE_DEFAULT :: DYNAMIC_ARENA_BLOCK_SIZE_DEFAULT
  1335. DYNAMIC_POOL_OUT_OF_BAND_SIZE_DEFAULT :: DYNAMIC_ARENA_OUT_OF_BAND_SIZE_DEFAULT
  1336. dynamic_pool_allocator_proc :: dynamic_arena_allocator_proc
  1337. dynamic_pool_free_all :: dynamic_arena_free_all
  1338. dynamic_pool_reset :: dynamic_arena_reset
  1339. dynamic_pool_alloc_bytes :: dynamic_arena_alloc_bytes
  1340. dynamic_pool_alloc :: dynamic_arena_alloc
  1341. dynamic_pool_init :: dynamic_arena_init
  1342. dynamic_pool_allocator :: dynamic_arena_allocator
  1343. dynamic_pool_destroy :: dynamic_arena_destroy
  1344. /*
  1345. Default block size for dynamic arena.
  1346. */
  1347. DYNAMIC_ARENA_BLOCK_SIZE_DEFAULT :: 65536
  1348. /*
  1349. Default out-band size of the dynamic arena.
  1350. */
  1351. DYNAMIC_ARENA_OUT_OF_BAND_SIZE_DEFAULT :: 6554
  1352. /*
  1353. Dynamic arena allocator data.
  1354. */
  1355. Dynamic_Arena :: struct {
  1356. block_size: int,
  1357. out_band_size: int,
  1358. alignment: int,
  1359. unused_blocks: [dynamic]rawptr,
  1360. used_blocks: [dynamic]rawptr,
  1361. out_band_allocations: [dynamic]rawptr,
  1362. current_block: rawptr,
  1363. current_pos: rawptr,
  1364. bytes_left: int,
  1365. block_allocator: Allocator,
  1366. }
  1367. /*
  1368. Initialize a dynamic arena.
  1369. This procedure initializes a dynamic arena. The specified `block_allocator`
  1370. will be used to allocate arena blocks, and `array_allocator` to allocate
  1371. arrays of blocks and out-band blocks. The blocks have the default size of
  1372. `block_size` and out-band threshold will be `out_band_size`. All allocations
  1373. will be aligned to a boundary specified by `alignment`.
  1374. */
  1375. dynamic_arena_init :: proc(
  1376. pool: ^Dynamic_Arena,
  1377. block_allocator := context.allocator,
  1378. array_allocator := context.allocator,
  1379. block_size := DYNAMIC_ARENA_BLOCK_SIZE_DEFAULT,
  1380. out_band_size := DYNAMIC_ARENA_OUT_OF_BAND_SIZE_DEFAULT,
  1381. alignment := DEFAULT_ALIGNMENT,
  1382. ) {
  1383. pool.block_size = block_size
  1384. pool.out_band_size = out_band_size
  1385. pool.alignment = alignment
  1386. pool.block_allocator = block_allocator
  1387. pool.out_band_allocations.allocator = array_allocator
  1388. pool.unused_blocks.allocator = array_allocator
  1389. pool.used_blocks.allocator = array_allocator
  1390. }
  1391. /*
  1392. Dynamic arena allocator.
  1393. The dynamic arena allocator uses blocks of a specific size, allocated on-demand
  1394. using the block allocator. This allocator acts similarly to arena. All
  1395. allocations in a block happen contiguously, from start to end. If an allocation
  1396. does not fit into the remaining space of the block, and its size is smaller
  1397. than the specified out-band size, a new block is allocated using the
  1398. `block_allocator` and the allocation is performed from a newly-allocated block.
  1399. If an allocation has bigger size than the specified out-band size, a new block
  1400. is allocated such that the allocation fits into this new block. This is referred
  1401. to as an *out-band allocation*. The out-band blocks are kept separately from
  1402. normal blocks.
  1403. Just like arena, the dynamic arena does not support freeing of individual
  1404. objects.
  1405. */
  1406. @(require_results)
  1407. dynamic_arena_allocator :: proc(a: ^Dynamic_Arena) -> Allocator {
  1408. return Allocator{
  1409. procedure = dynamic_arena_allocator_proc,
  1410. data = a,
  1411. }
  1412. }
  1413. /*
  1414. Destroy a dynamic arena.
  1415. This procedure frees all allocations, made on a dynamic arena, including the
  1416. unused blocks, as well as the arrays for storing blocks.
  1417. */
  1418. dynamic_arena_destroy :: proc(a: ^Dynamic_Arena) {
  1419. dynamic_arena_free_all(a)
  1420. delete(a.unused_blocks)
  1421. delete(a.used_blocks)
  1422. delete(a.out_band_allocations)
  1423. zero(a, size_of(a^))
  1424. }
  1425. @(private="file")
  1426. _dynamic_arena_cycle_new_block :: proc(a: ^Dynamic_Arena, loc := #caller_location) -> (err: Allocator_Error) {
  1427. if a.block_allocator.procedure == nil {
  1428. panic("You must call arena_init on a Pool before using it", loc)
  1429. }
  1430. if a.current_block != nil {
  1431. append(&a.used_blocks, a.current_block, loc=loc)
  1432. }
  1433. new_block: rawptr
  1434. if len(a.unused_blocks) > 0 {
  1435. new_block = pop(&a.unused_blocks)
  1436. } else {
  1437. data: []byte
  1438. data, err = a.block_allocator.procedure(
  1439. a.block_allocator.data,
  1440. Allocator_Mode.Alloc,
  1441. a.block_size,
  1442. a.alignment,
  1443. nil,
  1444. 0,
  1445. )
  1446. new_block = raw_data(data)
  1447. }
  1448. a.bytes_left = a.block_size
  1449. a.current_pos = new_block
  1450. a.current_block = new_block
  1451. return
  1452. }
  1453. /*
  1454. Allocate memory from a dynamic arena.
  1455. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1456. by `alignment` from a dynamic arena `a`. The allocated memory is
  1457. zero-initialized. This procedure returns a pointer to the newly allocated memory
  1458. region.
  1459. */
  1460. @(private, require_results)
  1461. dynamic_arena_alloc :: proc(a: ^Dynamic_Arena, size: int, loc := #caller_location) -> (rawptr, Allocator_Error) {
  1462. data, err := dynamic_arena_alloc_bytes(a, size, loc)
  1463. return raw_data(data), err
  1464. }
  1465. /*
  1466. Allocate memory from a dynamic arena.
  1467. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1468. by `alignment` from a dynamic arena `a`. The allocated memory is
  1469. zero-initialized. This procedure returns a slice of the newly allocated memory
  1470. region.
  1471. */
  1472. @(require_results)
  1473. dynamic_arena_alloc_bytes :: proc(a: ^Dynamic_Arena, size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
  1474. bytes, err := dynamic_arena_alloc_bytes_non_zeroed(a, size, loc)
  1475. if bytes != nil {
  1476. zero_slice(bytes)
  1477. }
  1478. return bytes, err
  1479. }
  1480. /*
  1481. Allocate non-initialized memory from a dynamic arena.
  1482. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1483. by `alignment` from a dynamic arena `a`. The allocated memory is not explicitly
  1484. zero-initialized. This procedure returns a pointer to the newly allocated
  1485. memory region.
  1486. */
  1487. @(require_results)
  1488. dynamic_arena_alloc_non_zeroed :: proc(a: ^Dynamic_Arena, size: int, loc := #caller_location) -> (rawptr, Allocator_Error) {
  1489. data, err := dynamic_arena_alloc_bytes_non_zeroed(a, size, loc)
  1490. return raw_data(data), err
  1491. }
  1492. /*
  1493. Allocate non-initialized memory from a dynamic arena.
  1494. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1495. by `alignment` from a dynamic arena `a`. The allocated memory is not explicitly
  1496. zero-initialized. This procedure returns a slice of the newly allocated
  1497. memory region.
  1498. */
  1499. @(require_results)
  1500. dynamic_arena_alloc_bytes_non_zeroed :: proc(a: ^Dynamic_Arena, size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
  1501. n := align_formula(size, a.alignment)
  1502. if n > a.block_size {
  1503. return nil, .Invalid_Argument
  1504. }
  1505. if n >= a.out_band_size {
  1506. assert(a.block_allocator.procedure != nil, "Backing block allocator must be initialized", loc=loc)
  1507. memory, err := alloc_bytes_non_zeroed(a.block_size, a.alignment, a.block_allocator, loc)
  1508. if memory != nil {
  1509. append(&a.out_band_allocations, raw_data(memory), loc = loc)
  1510. }
  1511. return memory, err
  1512. }
  1513. if a.bytes_left < n {
  1514. err := _dynamic_arena_cycle_new_block(a, loc)
  1515. if err != nil {
  1516. return nil, err
  1517. }
  1518. if a.current_block == nil {
  1519. return nil, .Out_Of_Memory
  1520. }
  1521. }
  1522. memory := a.current_pos
  1523. a.current_pos = ([^]byte)(a.current_pos)[n:]
  1524. a.bytes_left -= n
  1525. return ([^]byte)(memory)[:size], nil
  1526. }
  1527. /*
  1528. Reset the dynamic arena.
  1529. This procedure frees all the allocations, owned by the dynamic arena, excluding
  1530. the unused blocks.
  1531. */
  1532. dynamic_arena_reset :: proc(a: ^Dynamic_Arena, loc := #caller_location) {
  1533. if a.current_block != nil {
  1534. append(&a.unused_blocks, a.current_block, loc=loc)
  1535. a.current_block = nil
  1536. }
  1537. for block in a.used_blocks {
  1538. append(&a.unused_blocks, block, loc=loc)
  1539. }
  1540. clear(&a.used_blocks)
  1541. for allocation in a.out_band_allocations {
  1542. free(allocation, a.block_allocator, loc=loc)
  1543. }
  1544. clear(&a.out_band_allocations)
  1545. a.bytes_left = 0 // Make new allocations call `_dynamic_arena_cycle_new_block` again.
  1546. }
  1547. /*
  1548. Free all memory from a dynamic arena.
  1549. This procedure frees all the allocations, owned by the dynamic arena, including
  1550. the unused blocks.
  1551. */
  1552. dynamic_arena_free_all :: proc(a: ^Dynamic_Arena, loc := #caller_location) {
  1553. dynamic_arena_reset(a)
  1554. for block in a.unused_blocks {
  1555. free(block, a.block_allocator, loc)
  1556. }
  1557. clear(&a.unused_blocks)
  1558. }
  1559. /*
  1560. Resize an allocation.
  1561. This procedure resizes a memory region, defined by its location, `old_memory`,
  1562. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  1563. newly allocated memory, if any is zero-initialized.
  1564. If `old_memory` is `nil`, this procedure acts just like `dynamic_arena_alloc()`,
  1565. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1566. by `alignment`.
  1567. If `size` is 0, this procedure acts just like `dynamic_arena_free()`, freeing
  1568. the memory region located at an address specified by `old_memory`.
  1569. This procedure returns the pointer to the resized memory region.
  1570. */
  1571. @(require_results)
  1572. dynamic_arena_resize :: proc(
  1573. a: ^Dynamic_Arena,
  1574. old_memory: rawptr,
  1575. old_size: int,
  1576. size: int,
  1577. loc := #caller_location,
  1578. ) -> (rawptr, Allocator_Error) {
  1579. bytes, err := dynamic_arena_resize_bytes(a, byte_slice(old_memory, old_size), size, loc)
  1580. return raw_data(bytes), err
  1581. }
  1582. /*
  1583. Resize an allocation.
  1584. This procedure resizes a memory region, specified by `old_data`, to have a size
  1585. `size` and alignment `alignment`. The newly allocated memory, if any is
  1586. zero-initialized.
  1587. If `old_memory` is `nil`, this procedure acts just like `dynamic_arena_alloc()`,
  1588. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1589. by `alignment`.
  1590. If `size` is 0, this procedure acts just like `dynamic_arena_free()`, freeing the
  1591. memory region located at an address specified by `old_memory`.
  1592. This procedure returns the slice of the resized memory region.
  1593. */
  1594. @(require_results)
  1595. dynamic_arena_resize_bytes :: proc(
  1596. a: ^Dynamic_Arena,
  1597. old_data: []byte,
  1598. size: int,
  1599. loc := #caller_location,
  1600. ) -> ([]byte, Allocator_Error) {
  1601. bytes, err := dynamic_arena_resize_bytes_non_zeroed(a, old_data, size, loc)
  1602. if bytes != nil {
  1603. if old_data == nil {
  1604. zero_slice(bytes)
  1605. } else if size > len(old_data) {
  1606. zero_slice(bytes[len(old_data):])
  1607. }
  1608. }
  1609. return bytes, err
  1610. }
  1611. /*
  1612. Resize an allocation without zero-initialization.
  1613. This procedure resizes a memory region, defined by its location, `old_memory`,
  1614. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  1615. newly allocated memory, if any is not explicitly zero-initialized.
  1616. If `old_memory` is `nil`, this procedure acts just like `dynamic_arena_alloc()`,
  1617. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1618. by `alignment`.
  1619. If `size` is 0, this procedure acts just like `dynamic_arena_free()`, freeing the
  1620. memory region located at an address specified by `old_memory`.
  1621. This procedure returns the pointer to the resized memory region.
  1622. */
  1623. @(require_results)
  1624. dynamic_arena_resize_non_zeroed :: proc(
  1625. a: ^Dynamic_Arena,
  1626. old_memory: rawptr,
  1627. old_size: int,
  1628. size: int,
  1629. loc := #caller_location,
  1630. ) -> (rawptr, Allocator_Error) {
  1631. bytes, err := dynamic_arena_resize_bytes_non_zeroed(a, byte_slice(old_memory, old_size), size, loc)
  1632. return raw_data(bytes), err
  1633. }
  1634. /*
  1635. Resize an allocation.
  1636. This procedure resizes a memory region, specified by `old_data`, to have a size
  1637. `size` and alignment `alignment`. The newly allocated memory, if any is not
  1638. explicitly zero-initialized.
  1639. If `old_memory` is `nil`, this procedure acts just like `dynamic_arena_alloc()`,
  1640. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1641. by `alignment`.
  1642. If `size` is 0, this procedure acts just like `dynamic_arena_free()`, freeing
  1643. the memory region located at an address specified by `old_memory`.
  1644. This procedure returns the slice of the resized memory region.
  1645. */
  1646. @(require_results)
  1647. dynamic_arena_resize_bytes_non_zeroed :: proc(
  1648. a: ^Dynamic_Arena,
  1649. old_data: []byte,
  1650. size: int,
  1651. loc := #caller_location,
  1652. ) -> ([]byte, Allocator_Error) {
  1653. old_memory := raw_data(old_data)
  1654. old_size := len(old_data)
  1655. if old_size >= size {
  1656. return byte_slice(old_memory, size), nil
  1657. }
  1658. data, err := dynamic_arena_alloc_bytes_non_zeroed(a, size, loc)
  1659. if err == nil {
  1660. runtime.copy(data, byte_slice(old_memory, old_size))
  1661. }
  1662. return data, err
  1663. }
  1664. dynamic_arena_allocator_proc :: proc(
  1665. allocator_data: rawptr,
  1666. mode: Allocator_Mode,
  1667. size: int,
  1668. alignment: int,
  1669. old_memory: rawptr,
  1670. old_size: int,
  1671. loc := #caller_location,
  1672. ) -> ([]byte, Allocator_Error) {
  1673. arena := (^Dynamic_Arena)(allocator_data)
  1674. switch mode {
  1675. case .Alloc:
  1676. return dynamic_arena_alloc_bytes(arena, size, loc)
  1677. case .Alloc_Non_Zeroed:
  1678. return dynamic_arena_alloc_bytes_non_zeroed(arena, size, loc)
  1679. case .Free:
  1680. return nil, .Mode_Not_Implemented
  1681. case .Free_All:
  1682. dynamic_arena_free_all(arena, loc)
  1683. case .Resize:
  1684. return dynamic_arena_resize_bytes(arena, byte_slice(old_memory, old_size), size, loc)
  1685. case .Resize_Non_Zeroed:
  1686. return dynamic_arena_resize_bytes_non_zeroed(arena, byte_slice(old_memory, old_size), size, loc)
  1687. case .Query_Features:
  1688. set := (^Allocator_Mode_Set)(old_memory)
  1689. if set != nil {
  1690. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features, .Query_Info}
  1691. }
  1692. return nil, nil
  1693. case .Query_Info:
  1694. info := (^Allocator_Query_Info)(old_memory)
  1695. if info != nil && info.pointer != nil {
  1696. info.size = arena.block_size
  1697. info.alignment = arena.alignment
  1698. return byte_slice(info, size_of(info^)), nil
  1699. }
  1700. return nil, nil
  1701. }
  1702. return nil, nil
  1703. }
  1704. /*
  1705. Header of the buddy block.
  1706. */
  1707. Buddy_Block :: struct #align(align_of(uint)) {
  1708. size: uint,
  1709. is_free: bool,
  1710. }
  1711. /*
  1712. Obtain the next buddy block.
  1713. */
  1714. @(require_results)
  1715. buddy_block_next :: proc(block: ^Buddy_Block) -> ^Buddy_Block {
  1716. return (^Buddy_Block)(([^]byte)(block)[block.size:])
  1717. }
  1718. /*
  1719. Split the block into two, by truncating the given block to a given size.
  1720. */
  1721. @(require_results)
  1722. buddy_block_split :: proc(block: ^Buddy_Block, size: uint) -> ^Buddy_Block {
  1723. block := block
  1724. if block != nil && size != 0 {
  1725. // Recursive Split
  1726. for size < block.size {
  1727. sz := block.size >> 1
  1728. block.size = sz
  1729. block = buddy_block_next(block)
  1730. block.size = sz
  1731. block.is_free = true
  1732. }
  1733. if size <= block.size {
  1734. return block
  1735. }
  1736. }
  1737. // Block cannot fit the requested allocation size
  1738. return nil
  1739. }
  1740. /*
  1741. Coalesce contiguous blocks in a range of blocks into one.
  1742. */
  1743. buddy_block_coalescence :: proc(head, tail: ^Buddy_Block) {
  1744. for {
  1745. // Keep looping until there are no more buddies to coalesce
  1746. block := head
  1747. buddy := buddy_block_next(block)
  1748. no_coalescence := true
  1749. for block < tail && buddy < tail { // make sure the buddies are within the range
  1750. if block.is_free && buddy.is_free && block.size == buddy.size {
  1751. // Coalesce buddies into one
  1752. block.size <<= 1
  1753. block = buddy_block_next(block)
  1754. if block < tail {
  1755. buddy = buddy_block_next(block)
  1756. no_coalescence = false
  1757. }
  1758. } else if block.size < buddy.size {
  1759. // The buddy block is split into smaller blocks
  1760. block = buddy
  1761. buddy = buddy_block_next(buddy)
  1762. } else {
  1763. block = buddy_block_next(buddy)
  1764. if block < tail {
  1765. // Leave the buddy block for the next iteration
  1766. buddy = buddy_block_next(block)
  1767. }
  1768. }
  1769. }
  1770. if no_coalescence {
  1771. return
  1772. }
  1773. }
  1774. }
  1775. /*
  1776. Find the best block for storing a given size in a range of blocks.
  1777. */
  1778. @(require_results)
  1779. buddy_block_find_best :: proc(head, tail: ^Buddy_Block, size: uint) -> ^Buddy_Block {
  1780. assert(size != 0)
  1781. best_block: ^Buddy_Block
  1782. block := head // left
  1783. buddy := buddy_block_next(block) // right
  1784. // The entire memory section between head and tail is free,
  1785. // just call 'buddy_block_split' to get the allocation
  1786. if buddy == tail && block.is_free {
  1787. return buddy_block_split(block, size)
  1788. }
  1789. // Find the block which is the 'best_block' to requested allocation sized
  1790. for block < tail && buddy < tail { // make sure the buddies are within the range
  1791. // If both buddies are free, coalesce them together
  1792. // NOTE: this is an optimization to reduce fragmentation
  1793. // this could be completely ignored
  1794. if block.is_free && buddy.is_free && block.size == buddy.size {
  1795. block.size <<= 1
  1796. if size <= block.size && (best_block == nil || block.size <= best_block.size) {
  1797. best_block = block
  1798. }
  1799. block = buddy_block_next(buddy)
  1800. if block < tail {
  1801. // Delay the buddy block for the next iteration
  1802. buddy = buddy_block_next(block)
  1803. }
  1804. continue
  1805. }
  1806. if block.is_free && size <= block.size &&
  1807. (best_block == nil || block.size <= best_block.size) {
  1808. best_block = block
  1809. }
  1810. if buddy.is_free && size <= buddy.size &&
  1811. (best_block == nil || buddy.size < best_block.size) {
  1812. // If each buddy are the same size, then it makes more sense
  1813. // to pick the buddy as it "bounces around" less
  1814. best_block = buddy
  1815. }
  1816. if block.size <= buddy.size {
  1817. block = buddy_block_next(buddy)
  1818. if (block < tail) {
  1819. // Delay the buddy block for the next iteration
  1820. buddy = buddy_block_next(block)
  1821. }
  1822. } else {
  1823. // Buddy was split into smaller blocks
  1824. block = buddy
  1825. buddy = buddy_block_next(buddy)
  1826. }
  1827. }
  1828. if best_block != nil {
  1829. // This will handle the case if the 'best_block' is also the perfect fit
  1830. return buddy_block_split(best_block, size)
  1831. }
  1832. // Maybe out of memory
  1833. return nil
  1834. }
  1835. /*
  1836. The buddy allocator data.
  1837. */
  1838. Buddy_Allocator :: struct {
  1839. head: ^Buddy_Block,
  1840. tail: ^Buddy_Block,
  1841. alignment: uint,
  1842. }
  1843. /*
  1844. Buddy allocator.
  1845. The buddy allocator is a type of allocator that splits the backing buffer into
  1846. multiple regions called buddy blocks. Initially, the allocator only has one
  1847. block with the size of the backing buffer. Upon each allocation, the allocator
  1848. finds the smallest block that can fit the size of requested memory region, and
  1849. splits the block according to the allocation size. If no block can be found,
  1850. the contiguous free blocks are coalesced and the search is performed again.
  1851. */
  1852. @(require_results)
  1853. buddy_allocator :: proc(b: ^Buddy_Allocator) -> Allocator {
  1854. return Allocator{
  1855. procedure = buddy_allocator_proc,
  1856. data = b,
  1857. }
  1858. }
  1859. /*
  1860. Initialize the buddy allocator.
  1861. This procedure initializes the buddy allocator `b` with a backing buffer `data`
  1862. and block alignment specified by `alignment`.
  1863. */
  1864. buddy_allocator_init :: proc(b: ^Buddy_Allocator, data: []byte, alignment: uint, loc := #caller_location) {
  1865. assert(data != nil)
  1866. assert(is_power_of_two(uintptr(len(data))), "Size of the backing buffer must be power of two", loc)
  1867. assert(is_power_of_two(uintptr(alignment)), "Alignment must be a power of two", loc)
  1868. alignment := alignment
  1869. if alignment < size_of(Buddy_Block) {
  1870. alignment = size_of(Buddy_Block)
  1871. }
  1872. ptr := raw_data(data)
  1873. assert(uintptr(ptr) % uintptr(alignment) == 0, "data is not aligned to minimum alignment", loc)
  1874. b.head = (^Buddy_Block)(ptr)
  1875. b.head.size = len(data)
  1876. b.head.is_free = true
  1877. b.tail = buddy_block_next(b.head)
  1878. b.alignment = alignment
  1879. }
  1880. /*
  1881. Get required block size to fit in the allocation as well as the alignment padding.
  1882. */
  1883. @(require_results)
  1884. buddy_block_size_required :: proc(b: ^Buddy_Allocator, size: uint) -> uint {
  1885. size := size
  1886. actual_size := b.alignment
  1887. size += size_of(Buddy_Block)
  1888. size = align_forward_uint(size, b.alignment)
  1889. for size > actual_size {
  1890. actual_size <<= 1
  1891. }
  1892. return actual_size
  1893. }
  1894. /*
  1895. Allocate memory from a buddy allocator.
  1896. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1897. by `alignment`. The allocated memory region is zero-initialized. This procedure
  1898. returns a pointer to the allocated memory region.
  1899. */
  1900. @(require_results)
  1901. buddy_allocator_alloc :: proc(b: ^Buddy_Allocator, size: uint) -> (rawptr, Allocator_Error) {
  1902. bytes, err := buddy_allocator_alloc_bytes(b, size)
  1903. return raw_data(bytes), err
  1904. }
  1905. /*
  1906. Allocate memory from a buddy allocator.
  1907. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1908. by `alignment`. The allocated memory region is zero-initialized. This procedure
  1909. returns a slice of the allocated memory region.
  1910. */
  1911. @(require_results)
  1912. buddy_allocator_alloc_bytes :: proc(b: ^Buddy_Allocator, size: uint) -> ([]byte, Allocator_Error) {
  1913. bytes, err := buddy_allocator_alloc_bytes_non_zeroed(b, size)
  1914. if bytes != nil {
  1915. zero_slice(bytes)
  1916. }
  1917. return bytes, err
  1918. }
  1919. /*
  1920. Allocate non-initialized memory from a buddy allocator.
  1921. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1922. by `alignment`. The allocated memory region is not explicitly zero-initialized.
  1923. This procedure returns a pointer to the allocated memory region.
  1924. */
  1925. @(require_results)
  1926. buddy_allocator_alloc_non_zeroed :: proc(b: ^Buddy_Allocator, size: uint) -> (rawptr, Allocator_Error) {
  1927. bytes, err := buddy_allocator_alloc_bytes_non_zeroed(b, size)
  1928. return raw_data(bytes), err
  1929. }
  1930. /*
  1931. Allocate non-initialized memory from a buddy allocator.
  1932. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1933. by `alignment`. The allocated memory region is not explicitly zero-initialized.
  1934. This procedure returns a slice of the allocated memory region.
  1935. */
  1936. @(require_results)
  1937. buddy_allocator_alloc_bytes_non_zeroed :: proc(b: ^Buddy_Allocator, size: uint) -> ([]byte, Allocator_Error) {
  1938. if size != 0 {
  1939. actual_size := buddy_block_size_required(b, size)
  1940. found := buddy_block_find_best(b.head, b.tail, actual_size)
  1941. if found != nil {
  1942. // Try to coalesce all the free buddy blocks and then search again
  1943. buddy_block_coalescence(b.head, b.tail)
  1944. found = buddy_block_find_best(b.head, b.tail, actual_size)
  1945. }
  1946. if found == nil {
  1947. return nil, .Out_Of_Memory
  1948. }
  1949. found.is_free = false
  1950. data := ([^]byte)(found)[b.alignment:][:size]
  1951. return data, nil
  1952. }
  1953. return nil, nil
  1954. }
  1955. /*
  1956. Free memory to the buddy allocator.
  1957. This procedure frees the memory region allocated at pointer `ptr`.
  1958. If `ptr` is not the latest allocation and is not a leaked allocation, this
  1959. operation is a no-op.
  1960. */
  1961. buddy_allocator_free :: proc(b: ^Buddy_Allocator, ptr: rawptr) -> Allocator_Error {
  1962. if ptr != nil {
  1963. if !(b.head <= ptr && ptr <= b.tail) {
  1964. return .Invalid_Pointer
  1965. }
  1966. block := (^Buddy_Block)(([^]byte)(ptr)[-b.alignment:])
  1967. block.is_free = true
  1968. buddy_block_coalescence(b.head, b.tail)
  1969. }
  1970. return nil
  1971. }
  1972. /*
  1973. Free all memory to the buddy allocator.
  1974. */
  1975. buddy_allocator_free_all :: proc(b: ^Buddy_Allocator) {
  1976. alignment := b.alignment
  1977. head := ([^]byte)(b.head)
  1978. tail := ([^]byte)(b.tail)
  1979. data := head[:ptr_sub(tail, head)]
  1980. buddy_allocator_init(b, data, alignment)
  1981. }
  1982. buddy_allocator_proc :: proc(
  1983. allocator_data: rawptr,
  1984. mode: Allocator_Mode,
  1985. size, alignment: int,
  1986. old_memory: rawptr,
  1987. old_size: int,
  1988. loc := #caller_location,
  1989. ) -> ([]byte, Allocator_Error) {
  1990. b := (^Buddy_Allocator)(allocator_data)
  1991. switch mode {
  1992. case .Alloc:
  1993. return buddy_allocator_alloc_bytes(b, uint(size))
  1994. case .Alloc_Non_Zeroed:
  1995. return buddy_allocator_alloc_bytes_non_zeroed(b, uint(size))
  1996. case .Resize:
  1997. return default_resize_bytes_align(byte_slice(old_memory, old_size), size, alignment, buddy_allocator(b), loc)
  1998. case .Resize_Non_Zeroed:
  1999. return default_resize_bytes_align_non_zeroed(byte_slice(old_memory, old_size), size, alignment, buddy_allocator(b), loc)
  2000. case .Free:
  2001. return nil, buddy_allocator_free(b, old_memory)
  2002. case .Free_All:
  2003. buddy_allocator_free_all(b)
  2004. case .Query_Features:
  2005. set := (^Allocator_Mode_Set)(old_memory)
  2006. if set != nil {
  2007. set^ = {.Query_Features, .Alloc, .Alloc_Non_Zeroed, .Resize, .Resize_Non_Zeroed, .Free, .Free_All, .Query_Info}
  2008. }
  2009. return nil, nil
  2010. case .Query_Info:
  2011. info := (^Allocator_Query_Info)(old_memory)
  2012. if info != nil && info.pointer != nil {
  2013. ptr := info.pointer
  2014. if !(b.head <= ptr && ptr <= b.tail) {
  2015. return nil, .Invalid_Pointer
  2016. }
  2017. block := (^Buddy_Block)(([^]byte)(ptr)[-b.alignment:])
  2018. info.size = int(block.size)
  2019. info.alignment = int(b.alignment)
  2020. return byte_slice(info, size_of(info^)), nil
  2021. }
  2022. return nil, nil
  2023. }
  2024. return nil, nil
  2025. }
  2026. // An allocator that keeps track of allocation sizes and passes it along to resizes.
  2027. // This is useful if you are using a library that needs an equivalent of `realloc` but want to use
  2028. // the Odin allocator interface.
  2029. //
  2030. // You want to wrap your allocator into this one if you are trying to use any allocator that relies
  2031. // on the old size to work.
  2032. //
  2033. // The overhead of this allocator is an extra max(alignment, size_of(Header)) bytes allocated for each allocation, these bytes are
  2034. // used to store the size and original pointer.
  2035. Compat_Allocator :: struct {
  2036. parent: Allocator,
  2037. }
  2038. compat_allocator_init :: proc(rra: ^Compat_Allocator, allocator := context.allocator) {
  2039. rra.parent = allocator
  2040. }
  2041. compat_allocator :: proc(rra: ^Compat_Allocator) -> Allocator {
  2042. return Allocator{
  2043. data = rra,
  2044. procedure = compat_allocator_proc,
  2045. }
  2046. }
  2047. compat_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
  2048. size, alignment: int,
  2049. old_memory: rawptr, old_size: int,
  2050. location := #caller_location) -> (data: []byte, err: Allocator_Error) {
  2051. size, old_size := size, old_size
  2052. Header :: struct {
  2053. size: int,
  2054. ptr: rawptr,
  2055. }
  2056. rra := (^Compat_Allocator)(allocator_data)
  2057. switch mode {
  2058. case .Alloc, .Alloc_Non_Zeroed:
  2059. a := max(alignment, size_of(Header))
  2060. size += a
  2061. assert(size >= 0, "overflow")
  2062. allocation := rra.parent.procedure(rra.parent.data, mode, size, alignment, old_memory, old_size, location) or_return
  2063. #no_bounds_check data = allocation[a:]
  2064. ([^]Header)(raw_data(data))[-1] = {
  2065. size = size,
  2066. ptr = raw_data(allocation),
  2067. }
  2068. return
  2069. case .Free:
  2070. header := ([^]Header)(old_memory)[-1]
  2071. return rra.parent.procedure(rra.parent.data, mode, size, alignment, header.ptr, header.size, location)
  2072. case .Resize, .Resize_Non_Zeroed:
  2073. header := ([^]Header)(old_memory)[-1]
  2074. a := max(alignment, size_of(header))
  2075. size += a
  2076. assert(size >= 0, "overflow")
  2077. allocation := rra.parent.procedure(rra.parent.data, mode, size, alignment, header.ptr, header.size, location) or_return
  2078. #no_bounds_check data = allocation[a:]
  2079. ([^]Header)(raw_data(data))[-1] = {
  2080. size = size,
  2081. ptr = raw_data(allocation),
  2082. }
  2083. return
  2084. case .Free_All, .Query_Info, .Query_Features:
  2085. return rra.parent.procedure(rra.parent.data, mode, size, alignment, old_memory, old_size, location)
  2086. case: unreachable()
  2087. }
  2088. }