allocators.odin 69 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357
  1. package mem
  2. import "base:intrinsics"
  3. import "base:runtime"
  4. /*
  5. Nil allocator.
  6. The `nil` allocator returns `nil` on every allocation attempt. This type of
  7. allocator can be used in scenarios where memory doesn't need to be allocated,
  8. but an attempt to allocate memory is not an error.
  9. */
  10. @(require_results)
  11. nil_allocator :: proc() -> Allocator {
  12. return Allocator{
  13. procedure = nil_allocator_proc,
  14. data = nil,
  15. }
  16. }
  17. nil_allocator_proc :: proc(
  18. allocator_data: rawptr,
  19. mode: Allocator_Mode,
  20. size, alignment: int,
  21. old_memory: rawptr,
  22. old_size: int,
  23. loc := #caller_location,
  24. ) -> ([]byte, Allocator_Error) {
  25. return nil, nil
  26. }
  27. /*
  28. Panic allocator.
  29. The panic allocator is a type of allocator that panics on any allocation
  30. attempt. This type of allocator can be used in scenarios where memory should
  31. not be allocated, and an attempt to allocate memory is an error.
  32. */
  33. @(require_results)
  34. panic_allocator :: proc() -> Allocator {
  35. return Allocator{
  36. procedure = panic_allocator_proc,
  37. data = nil,
  38. }
  39. }
  40. panic_allocator_proc :: proc(
  41. allocator_data: rawptr,
  42. mode: Allocator_Mode,
  43. size, alignment: int,
  44. old_memory: rawptr,
  45. old_size: int,
  46. loc := #caller_location,
  47. ) -> ([]byte, Allocator_Error) {
  48. switch mode {
  49. case .Alloc:
  50. if size > 0 {
  51. panic("mem: panic allocator, .Alloc called", loc=loc)
  52. }
  53. case .Alloc_Non_Zeroed:
  54. if size > 0 {
  55. panic("mem: panic allocator, .Alloc_Non_Zeroed called", loc=loc)
  56. }
  57. case .Resize:
  58. if size > 0 {
  59. panic("mem: panic allocator, .Resize called", loc=loc)
  60. }
  61. case .Resize_Non_Zeroed:
  62. if size > 0 {
  63. panic("mem: panic allocator, .Resize_Non_Zeroed called", loc=loc)
  64. }
  65. case .Free:
  66. if old_memory != nil {
  67. panic("mem: panic allocator, .Free called", loc=loc)
  68. }
  69. case .Free_All:
  70. panic("mem: panic allocator, .Free_All called", loc=loc)
  71. case .Query_Features:
  72. set := (^Allocator_Mode_Set)(old_memory)
  73. if set != nil {
  74. set^ = {.Query_Features}
  75. }
  76. return nil, nil
  77. case .Query_Info:
  78. panic("mem: panic allocator, .Query_Info called", loc=loc)
  79. }
  80. return nil, nil
  81. }
  82. /*
  83. Arena allocator data.
  84. */
  85. Arena :: struct {
  86. data: []byte,
  87. offset: int,
  88. peak_used: int,
  89. temp_count: int,
  90. }
  91. /*
  92. Arena allocator.
  93. The arena allocator (also known as a linear allocator, bump allocator,
  94. region allocator) is an allocator that uses a single backing buffer for
  95. allocations.
  96. The buffer is being used contiguously, from start by end. Each subsequent
  97. allocation occupies the next adjacent region of memory in the buffer. Since
  98. arena allocator does not keep track of any metadata associated with the
  99. allocations and their locations, it is impossible to free individual
  100. allocations.
  101. The arena allocator can be used for temporary allocations in frame-based memory
  102. management. Games are one example of such applications. A global arena can be
  103. used for any temporary memory allocations, and at the end of each frame all
  104. temporary allocations are freed. Since no temporary object is going to live
  105. longer than a frame, no lifetimes are violated.
  106. */
  107. @(require_results)
  108. arena_allocator :: proc(arena: ^Arena) -> Allocator {
  109. return Allocator{
  110. procedure = arena_allocator_proc,
  111. data = arena,
  112. }
  113. }
  114. /*
  115. Initialize an arena.
  116. This procedure initializes the arena `a` with memory region `data` as it's
  117. backing buffer.
  118. */
  119. arena_init :: proc(a: ^Arena, data: []byte) {
  120. a.data = data
  121. a.offset = 0
  122. a.peak_used = 0
  123. a.temp_count = 0
  124. }
  125. @(deprecated="prefer 'mem.arena_init'")
  126. init_arena :: proc(a: ^Arena, data: []byte) {
  127. a.data = data
  128. a.offset = 0
  129. a.peak_used = 0
  130. a.temp_count = 0
  131. }
  132. /*
  133. Allocate memory from an arena.
  134. This procedure allocates `size` bytes of memory aligned on a boundary specified
  135. by `alignment` from an arena `a`. The allocated memory is zero-initialized.
  136. This procedure returns a pointer to the newly allocated memory region.
  137. */
  138. @(require_results)
  139. arena_alloc :: proc(
  140. a: ^Arena,
  141. size: int,
  142. alignment := DEFAULT_ALIGNMENT,
  143. loc := #caller_location,
  144. ) -> (rawptr, Allocator_Error) {
  145. bytes, err := arena_alloc_bytes(a, size, alignment, loc)
  146. return raw_data(bytes), err
  147. }
  148. /*
  149. Allocate memory from an arena.
  150. This procedure allocates `size` bytes of memory aligned on a boundary specified
  151. by `alignment` from an arena `a`. The allocated memory is zero-initialized.
  152. This procedure returns a slice of the newly allocated memory region.
  153. */
  154. @(require_results)
  155. arena_alloc_bytes :: proc(
  156. a: ^Arena,
  157. size: int,
  158. alignment := DEFAULT_ALIGNMENT,
  159. loc := #caller_location,
  160. ) -> ([]byte, Allocator_Error) {
  161. bytes, err := arena_alloc_bytes_non_zeroed(a, size, alignment, loc)
  162. if bytes != nil {
  163. zero_slice(bytes)
  164. }
  165. return bytes, err
  166. }
  167. /*
  168. Allocate non-initialized memory from an arena.
  169. This procedure allocates `size` bytes of memory aligned on a boundary specified
  170. by `alignment` from an arena `a`. The allocated memory is not explicitly
  171. zero-initialized. This procedure returns a pointer to the newly allocated
  172. memory region.
  173. */
  174. @(require_results)
  175. arena_alloc_non_zeroed :: proc(
  176. a: ^Arena,
  177. size: int,
  178. alignment := DEFAULT_ALIGNMENT,
  179. loc := #caller_location,
  180. ) -> (rawptr, Allocator_Error) {
  181. bytes, err := arena_alloc_bytes_non_zeroed(a, size, alignment, loc)
  182. return raw_data(bytes), err
  183. }
  184. /*
  185. Allocate non-initialized memory from an arena.
  186. This procedure allocates `size` bytes of memory aligned on a boundary specified
  187. by `alignment` from an arena `a`. The allocated memory is not explicitly
  188. zero-initialized. This procedure returns a slice of the newly allocated
  189. memory region.
  190. */
  191. @(require_results)
  192. arena_alloc_bytes_non_zeroed :: proc(
  193. a: ^Arena,
  194. size: int,
  195. alignment := DEFAULT_ALIGNMENT,
  196. loc := #caller_location
  197. ) -> ([]byte, Allocator_Error) {
  198. if a.data == nil {
  199. panic("Arena is not initialized", loc)
  200. }
  201. #no_bounds_check end := &a.data[a.offset]
  202. ptr := align_forward(end, uintptr(alignment))
  203. total_size := size + ptr_sub((^byte)(ptr), (^byte)(end))
  204. if a.offset + total_size > len(a.data) {
  205. return nil, .Out_Of_Memory
  206. }
  207. a.offset += total_size
  208. a.peak_used = max(a.peak_used, a.offset)
  209. return byte_slice(ptr, size), nil
  210. }
  211. /*
  212. Free all memory to an arena.
  213. */
  214. arena_free_all :: proc(a: ^Arena) {
  215. a.offset = 0
  216. }
  217. arena_allocator_proc :: proc(
  218. allocator_data: rawptr,
  219. mode: Allocator_Mode,
  220. size: int,
  221. alignment: int,
  222. old_memory: rawptr,
  223. old_size: int,
  224. loc := #caller_location,
  225. ) -> ([]byte, Allocator_Error) {
  226. arena := cast(^Arena)allocator_data
  227. switch mode {
  228. case .Alloc:
  229. return arena_alloc_bytes(arena, size, alignment, loc)
  230. case .Alloc_Non_Zeroed:
  231. return arena_alloc_bytes_non_zeroed(arena, size, alignment, loc)
  232. case .Free:
  233. return nil, .Mode_Not_Implemented
  234. case .Free_All:
  235. arena_free_all(arena)
  236. case .Resize:
  237. return default_resize_bytes_align(byte_slice(old_memory, old_size), size, alignment, arena_allocator(arena), loc)
  238. case .Resize_Non_Zeroed:
  239. return default_resize_bytes_align_non_zeroed(byte_slice(old_memory, old_size), size, alignment, arena_allocator(arena), loc)
  240. case .Query_Features:
  241. set := (^Allocator_Mode_Set)(old_memory)
  242. if set != nil {
  243. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
  244. }
  245. return nil, nil
  246. case .Query_Info:
  247. return nil, .Mode_Not_Implemented
  248. }
  249. return nil, nil
  250. }
  251. /*
  252. Temporary memory region of arena.
  253. Temporary memory regions of arena act as "savepoints" for arena. When one is
  254. created, the subsequent allocations are done inside the temporary memory
  255. region. When `end_arena_temp_memory` is called, the arena is rolled back, and
  256. all of the memory that was allocated from the arena will be freed.
  257. Multiple temporary memory regions can exist at the same time for an arena.
  258. */
  259. Arena_Temp_Memory :: struct {
  260. arena: ^Arena,
  261. prev_offset: int,
  262. }
  263. /*
  264. Start a temporary memory region.
  265. This procedure creates a temporary memory region. After a temporary memory
  266. region is created, all allocations are said to be *inside* the temporary memory
  267. region, until `end_arena_temp_memory` is called.
  268. */
  269. @(require_results)
  270. begin_arena_temp_memory :: proc(a: ^Arena) -> Arena_Temp_Memory {
  271. tmp: Arena_Temp_Memory
  272. tmp.arena = a
  273. tmp.prev_offset = a.offset
  274. a.temp_count += 1
  275. return tmp
  276. }
  277. /*
  278. End a temporary memory region.
  279. This procedure ends the temporary memory region for an arena. All of the
  280. allocations *inside* the temporary memory region will be freed to the arena.
  281. */
  282. end_arena_temp_memory :: proc(tmp: Arena_Temp_Memory) {
  283. assert(tmp.arena.offset >= tmp.prev_offset)
  284. assert(tmp.arena.temp_count > 0)
  285. tmp.arena.offset = tmp.prev_offset
  286. tmp.arena.temp_count -= 1
  287. }
  288. /* Preserved for compatibility */
  289. Scratch_Allocator :: Scratch
  290. scratch_allocator_init :: scratch_init
  291. scratch_allocator_destroy :: scratch_destroy
  292. /*
  293. Scratch allocator data.
  294. */
  295. Scratch :: struct {
  296. data: []byte,
  297. curr_offset: int,
  298. prev_allocation: rawptr,
  299. backup_allocator: Allocator,
  300. leaked_allocations: [dynamic][]byte,
  301. }
  302. /*
  303. Scratch allocator.
  304. The scratch allocator works in a similar way to the `Arena` allocator. The
  305. scratch allocator has a backing buffer, that is being allocated in
  306. contiguous regions, from start to end.
  307. Each subsequent allocation will be the next adjacent region of memory in the
  308. backing buffer. If the allocation doesn't fit into the remaining space of the
  309. backing buffer, this allocation is put at the start of the buffer, and all
  310. previous allocations will become invalidated. If the allocation doesn't fit
  311. into the backing buffer as a whole, it will be allocated using a backing
  312. allocator, and pointer to the allocated memory region will be put into the
  313. `leaked_allocations` array.
  314. The `leaked_allocations` array is managed by the `context` allocator.
  315. */
  316. @(require_results)
  317. scratch_allocator :: proc(allocator: ^Scratch) -> Allocator {
  318. return Allocator{
  319. procedure = scratch_allocator_proc,
  320. data = allocator,
  321. }
  322. }
  323. /*
  324. Initialize scratch allocator.
  325. */
  326. scratch_init :: proc(s: ^Scratch, size: int, backup_allocator := context.allocator) -> Allocator_Error {
  327. s.data = make_aligned([]byte, size, 2*align_of(rawptr), backup_allocator) or_return
  328. s.curr_offset = 0
  329. s.prev_allocation = nil
  330. s.backup_allocator = backup_allocator
  331. s.leaked_allocations.allocator = backup_allocator
  332. return nil
  333. }
  334. /*
  335. Free all data associated with a scratch allocator.
  336. */
  337. scratch_destroy :: proc(s: ^Scratch) {
  338. if s == nil {
  339. return
  340. }
  341. for ptr in s.leaked_allocations {
  342. free_bytes(ptr, s.backup_allocator)
  343. }
  344. delete(s.leaked_allocations)
  345. delete(s.data, s.backup_allocator)
  346. s^ = {}
  347. }
  348. /*
  349. Allocate memory from scratch allocator.
  350. This procedure allocates `size` bytes of memory aligned on a boundary specified
  351. by `alignment`. The allocated memory region is zero-initialized. This procedure
  352. returns a pointer to the allocated memory region.
  353. */
  354. @(require_results)
  355. scratch_alloc :: proc(
  356. s: ^Scratch,
  357. size: int,
  358. alignment := DEFAULT_ALIGNMENT,
  359. loc := #caller_location,
  360. ) -> (rawptr, Allocator_Error) {
  361. bytes, err := scratch_alloc_bytes(s, size, alignment, loc)
  362. return raw_data(bytes), err
  363. }
  364. /*
  365. Allocate memory from scratch allocator.
  366. This procedure allocates `size` bytes of memory aligned on a boundary specified
  367. by `alignment`. The allocated memory region is zero-initialized. This procedure
  368. returns a slice of the allocated memory region.
  369. */
  370. @(require_results)
  371. scratch_alloc_bytes :: proc(
  372. s: ^Scratch,
  373. size: int,
  374. alignment := DEFAULT_ALIGNMENT,
  375. loc := #caller_location,
  376. ) -> ([]byte, Allocator_Error) {
  377. bytes, err := scratch_alloc_bytes_non_zeroed(s, size, alignment, loc)
  378. if bytes != nil {
  379. zero_slice(bytes)
  380. }
  381. return bytes, err
  382. }
  383. /*
  384. Allocate non-initialized memory from scratch allocator.
  385. This procedure allocates `size` bytes of memory aligned on a boundary specified
  386. by `alignment`. The allocated memory region is not explicitly zero-initialized.
  387. This procedure returns a pointer to the allocated memory region.
  388. */
  389. @(require_results)
  390. scratch_alloc_non_zeroed :: proc(
  391. s: ^Scratch,
  392. size: int,
  393. alignment := DEFAULT_ALIGNMENT,
  394. loc := #caller_location,
  395. ) -> (rawptr, Allocator_Error) {
  396. bytes, err := scratch_alloc_bytes_non_zeroed(s, size, alignment, loc)
  397. return raw_data(bytes), err
  398. }
  399. /*
  400. Allocate non-initialized memory from scratch allocator.
  401. This procedure allocates `size` bytes of memory aligned on a boundary specified
  402. by `alignment`. The allocated memory region is not explicitly zero-initialized.
  403. This procedure returns a slice of the allocated memory region.
  404. */
  405. @(require_results)
  406. scratch_alloc_bytes_non_zeroed :: proc(
  407. s: ^Scratch,
  408. size: int,
  409. alignment := DEFAULT_ALIGNMENT,
  410. loc := #caller_location,
  411. ) -> ([]byte, Allocator_Error) {
  412. if s.data == nil {
  413. DEFAULT_BACKING_SIZE :: 4 * Megabyte
  414. if !(context.allocator.procedure != scratch_allocator_proc && context.allocator.data != s) {
  415. panic("cyclic initialization of the scratch allocator with itself", loc)
  416. }
  417. scratch_init(s, DEFAULT_BACKING_SIZE)
  418. }
  419. size := size
  420. size = align_forward_int(size, alignment)
  421. if size <= len(s.data) {
  422. offset := uintptr(0)
  423. if s.curr_offset+size <= len(s.data) {
  424. offset = uintptr(s.curr_offset)
  425. } else {
  426. offset = 0
  427. }
  428. start := uintptr(raw_data(s.data))
  429. ptr := align_forward_uintptr(offset+start, uintptr(alignment))
  430. s.prev_allocation = rawptr(ptr)
  431. s.curr_offset = int(offset) + size
  432. return byte_slice(rawptr(ptr), size), nil
  433. } else {
  434. a := s.backup_allocator
  435. if a.procedure == nil {
  436. a = context.allocator
  437. s.backup_allocator = a
  438. }
  439. ptr, err := alloc_bytes_non_zeroed(size, alignment, a, loc)
  440. if err != nil {
  441. return ptr, err
  442. }
  443. if s.leaked_allocations == nil {
  444. s.leaked_allocations, err = make([dynamic][]byte, a)
  445. }
  446. append(&s.leaked_allocations, ptr)
  447. if logger := context.logger; logger.lowest_level <= .Warning {
  448. if logger.procedure != nil {
  449. logger.procedure(logger.data, .Warning, "mem.Scratch resorted to backup_allocator" , logger.options, loc)
  450. }
  451. }
  452. return ptr, err
  453. }
  454. }
  455. /*
  456. Free memory to the scratch allocator.
  457. This procedure frees the memory region allocated at pointer `ptr`.
  458. If `ptr` is not the latest allocation and is not a leaked allocation, this
  459. operation is a no-op.
  460. */
  461. scratch_free :: proc(s: ^Scratch, ptr: rawptr, loc := #caller_location) -> Allocator_Error {
  462. if s.data == nil {
  463. panic("Free on an uninitialized scratch allocator", loc)
  464. }
  465. if ptr == nil {
  466. return nil
  467. }
  468. start := uintptr(raw_data(s.data))
  469. end := start + uintptr(len(s.data))
  470. old_ptr := uintptr(ptr)
  471. if s.prev_allocation == ptr {
  472. s.curr_offset = int(uintptr(s.prev_allocation) - start)
  473. s.prev_allocation = nil
  474. return nil
  475. }
  476. if start <= old_ptr && old_ptr < end {
  477. // NOTE(bill): Cannot free this pointer but it is valid
  478. return nil
  479. }
  480. if len(s.leaked_allocations) != 0 {
  481. for data, i in s.leaked_allocations {
  482. ptr := raw_data(data)
  483. if ptr == ptr {
  484. free_bytes(data, s.backup_allocator, loc)
  485. ordered_remove(&s.leaked_allocations, i, loc)
  486. return nil
  487. }
  488. }
  489. }
  490. return .Invalid_Pointer
  491. }
  492. /*
  493. Free all memory to the scratch allocator.
  494. */
  495. scratch_free_all :: proc(s: ^Scratch, loc := #caller_location) {
  496. s.curr_offset = 0
  497. s.prev_allocation = nil
  498. for ptr in s.leaked_allocations {
  499. free_bytes(ptr, s.backup_allocator, loc)
  500. }
  501. clear(&s.leaked_allocations)
  502. }
  503. /*
  504. Resize an allocation.
  505. This procedure resizes a memory region, defined by its location, `old_memory`,
  506. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  507. newly allocated memory, if any is zero-initialized.
  508. If `old_memory` is `nil`, this procedure acts just like `scratch_alloc()`,
  509. allocating a memory region `size` bytes in size, aligned on a boundary specified
  510. by `alignment`.
  511. If `size` is 0, this procedure acts just like `scratch_free()`, freeing the
  512. memory region located at an address specified by `old_memory`.
  513. This procedure returns the pointer to the resized memory region.
  514. */
  515. @(require_results)
  516. scratch_resize :: proc(
  517. s: ^Scratch,
  518. old_memory: rawptr,
  519. old_size: int,
  520. size: int,
  521. alignment := DEFAULT_ALIGNMENT,
  522. loc := #caller_location
  523. ) -> (rawptr, Allocator_Error) {
  524. bytes, err := scratch_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment, loc)
  525. return raw_data(bytes), err
  526. }
  527. /*
  528. Resize an allocation.
  529. This procedure resizes a memory region, specified by `old_data`, to have a size
  530. `size` and alignment `alignment`. The newly allocated memory, if any is
  531. zero-initialized.
  532. If `old_memory` is `nil`, this procedure acts just like `scratch_alloc()`,
  533. allocating a memory region `size` bytes in size, aligned on a boundary specified
  534. by `alignment`.
  535. If `size` is 0, this procedure acts just like `scratch_free()`, freeing the
  536. memory region located at an address specified by `old_memory`.
  537. This procedure returns the slice of the resized memory region.
  538. */
  539. @(require_results)
  540. scratch_resize_bytes :: proc(
  541. s: ^Scratch,
  542. old_data: []byte,
  543. size: int,
  544. alignment := DEFAULT_ALIGNMENT,
  545. loc := #caller_location
  546. ) -> ([]byte, Allocator_Error) {
  547. bytes, err := scratch_resize_bytes_non_zeroed(s, old_data, size, alignment, loc)
  548. if bytes != nil && size > len(old_data) {
  549. zero_slice(bytes[size:])
  550. }
  551. return bytes, err
  552. }
  553. /*
  554. Resize an allocation without zero-initialization.
  555. This procedure resizes a memory region, defined by its location, `old_memory`,
  556. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  557. newly allocated memory, if any is not explicitly zero-initialized.
  558. If `old_memory` is `nil`, this procedure acts just like `scratch_alloc()`,
  559. allocating a memory region `size` bytes in size, aligned on a boundary specified
  560. by `alignment`.
  561. If `size` is 0, this procedure acts just like `scratch_free()`, freeing the
  562. memory region located at an address specified by `old_memory`.
  563. This procedure returns the pointer to the resized memory region.
  564. */
  565. @(require_results)
  566. scratch_resize_non_zeroed :: proc(
  567. s: ^Scratch,
  568. old_memory: rawptr,
  569. old_size: int,
  570. size: int,
  571. alignment := DEFAULT_ALIGNMENT,
  572. loc := #caller_location
  573. ) -> (rawptr, Allocator_Error) {
  574. bytes, err := scratch_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment, loc)
  575. return raw_data(bytes), err
  576. }
  577. /*
  578. Resize an allocation.
  579. This procedure resizes a memory region, specified by `old_data`, to have a size
  580. `size` and alignment `alignment`. The newly allocated memory, if any is not
  581. explicitly zero-initialized.
  582. If `old_memory` is `nil`, this procedure acts just like `scratch_alloc()`,
  583. allocating a memory region `size` bytes in size, aligned on a boundary specified
  584. by `alignment`.
  585. If `size` is 0, this procedure acts just like `scratch_free()`, freeing the
  586. memory region located at an address specified by `old_memory`.
  587. This procedure returns the slice of the resized memory region.
  588. */
  589. @(require_results)
  590. scratch_resize_bytes_non_zeroed :: proc(
  591. s: ^Scratch,
  592. old_data: []byte,
  593. size: int,
  594. alignment := DEFAULT_ALIGNMENT,
  595. loc := #caller_location
  596. ) -> ([]byte, Allocator_Error) {
  597. old_memory := raw_data(old_data)
  598. old_size := len(old_data)
  599. if s.data == nil {
  600. DEFAULT_BACKING_SIZE :: 4 * Megabyte
  601. if !(context.allocator.procedure != scratch_allocator_proc && context.allocator.data != s) {
  602. panic("cyclic initialization of the scratch allocator with itself", loc)
  603. }
  604. scratch_init(s, DEFAULT_BACKING_SIZE)
  605. }
  606. begin := uintptr(raw_data(s.data))
  607. end := begin + uintptr(len(s.data))
  608. old_ptr := uintptr(old_memory)
  609. if begin <= old_ptr && old_ptr < end && old_ptr+uintptr(size) < end {
  610. s.curr_offset = int(old_ptr-begin)+size
  611. return byte_slice(old_memory, size), nil
  612. }
  613. data, err := scratch_alloc_bytes_non_zeroed(s, size, alignment, loc)
  614. if err != nil {
  615. return data, err
  616. }
  617. runtime.copy(data, byte_slice(old_memory, old_size))
  618. err = scratch_free(s, old_memory, loc)
  619. return data, err
  620. }
  621. scratch_allocator_proc :: proc(
  622. allocator_data: rawptr,
  623. mode: Allocator_Mode,
  624. size, alignment: int,
  625. old_memory: rawptr,
  626. old_size: int,
  627. loc := #caller_location,
  628. ) -> ([]byte, Allocator_Error) {
  629. s := (^Scratch)(allocator_data)
  630. size := size
  631. switch mode {
  632. case .Alloc:
  633. return scratch_alloc_bytes(s, size, alignment, loc)
  634. case .Alloc_Non_Zeroed:
  635. return scratch_alloc_bytes_non_zeroed(s, size, alignment, loc)
  636. case .Free:
  637. return nil, scratch_free(s, old_memory, loc)
  638. case .Free_All:
  639. scratch_free_all(s, loc)
  640. case .Resize:
  641. return scratch_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment, loc)
  642. case .Resize_Non_Zeroed:
  643. return scratch_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment, loc)
  644. case .Query_Features:
  645. set := (^Allocator_Mode_Set)(old_memory)
  646. if set != nil {
  647. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
  648. }
  649. return nil, nil
  650. case .Query_Info:
  651. return nil, .Mode_Not_Implemented
  652. }
  653. return nil, nil
  654. }
  655. /*
  656. Stack allocator data.
  657. */
  658. Stack :: struct {
  659. data: []byte,
  660. prev_offset: int,
  661. curr_offset: int,
  662. peak_used: int,
  663. }
  664. /*
  665. Header of a stack allocation.
  666. */
  667. Stack_Allocation_Header :: struct {
  668. prev_offset: int,
  669. padding: int,
  670. }
  671. /*
  672. Stack allocator.
  673. The stack allocator is an allocator that allocates data in the backing buffer
  674. linearly, from start to end. Each subsequent allocation will get the next
  675. adjacent memory region.
  676. Unlike arena allocator, the stack allocator saves allocation metadata and has
  677. a strict freeing order. Only the last allocated element can be freed. After the
  678. last allocated element is freed, the next previous allocated element becomes
  679. available for freeing.
  680. The metadata is stored in the allocation headers, that are located before the
  681. start of each allocated memory region. Each header points to the start of the
  682. previous allocation header.
  683. */
  684. @(require_results)
  685. stack_allocator :: proc(stack: ^Stack) -> Allocator {
  686. return Allocator{
  687. procedure = stack_allocator_proc,
  688. data = stack,
  689. }
  690. }
  691. /*
  692. Initialize the stack allocator.
  693. This procedure initializes the stack allocator with a backing buffer specified
  694. by `data` parameter.
  695. */
  696. stack_init :: proc(s: ^Stack, data: []byte) {
  697. s.data = data
  698. s.prev_offset = 0
  699. s.curr_offset = 0
  700. s.peak_used = 0
  701. }
  702. @(deprecated="prefer 'mem.stack_init'")
  703. init_stack :: proc(s: ^Stack, data: []byte) {
  704. s.data = data
  705. s.prev_offset = 0
  706. s.curr_offset = 0
  707. s.peak_used = 0
  708. }
  709. /*
  710. Allocate memory from stack.
  711. This procedure allocates `size` bytes of memory, aligned to the boundary
  712. specified by `alignment`. The allocated memory is zero-initialized. This
  713. procedure returns the pointer to the allocated memory.
  714. */
  715. @(require_results)
  716. stack_alloc :: proc(
  717. s: ^Stack,
  718. size: int,
  719. alignment := DEFAULT_ALIGNMENT,
  720. loc := #caller_location
  721. ) -> (rawptr, Allocator_Error) {
  722. bytes, err := stack_alloc_bytes(s, size, alignment, loc)
  723. return raw_data(bytes), err
  724. }
  725. /*
  726. Allocate memory from stack.
  727. This procedure allocates `size` bytes of memory, aligned to the boundary
  728. specified by `alignment`. The allocated memory is zero-initialized. This
  729. procedure returns the slice of the allocated memory.
  730. */
  731. @(require_results)
  732. stack_alloc_bytes :: proc(
  733. s: ^Stack,
  734. size: int,
  735. alignment := DEFAULT_ALIGNMENT,
  736. loc := #caller_location
  737. ) -> ([]byte, Allocator_Error) {
  738. bytes, err := stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  739. if bytes != nil {
  740. zero_slice(bytes)
  741. }
  742. return bytes, err
  743. }
  744. /*
  745. Allocate memory from stack.
  746. This procedure allocates `size` bytes of memory, aligned to the boundary
  747. specified by `alignment`. The allocated memory is not explicitly
  748. zero-initialized. This procedure returns the pointer to the allocated memory.
  749. */
  750. @(require_results)
  751. stack_alloc_non_zeroed :: proc(
  752. s: ^Stack,
  753. size: int,
  754. alignment := DEFAULT_ALIGNMENT,
  755. loc := #caller_location
  756. ) -> (rawptr, Allocator_Error) {
  757. bytes, err := stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  758. return raw_data(bytes), err
  759. }
  760. /*
  761. Allocate memory from stack.
  762. This procedure allocates `size` bytes of memory, aligned to the boundary
  763. specified by `alignment`. The allocated memory is not explicitly
  764. zero-initialized. This procedure returns the slice of the allocated memory.
  765. */
  766. @(require_results)
  767. stack_alloc_bytes_non_zeroed :: proc(
  768. s: ^Stack,
  769. size: int,
  770. alignment := DEFAULT_ALIGNMENT,
  771. loc := #caller_location
  772. ) -> ([]byte, Allocator_Error) {
  773. if s.data == nil {
  774. panic("Stack allocation on an uninitialized stack allocator", loc)
  775. }
  776. curr_addr := uintptr(raw_data(s.data)) + uintptr(s.curr_offset)
  777. padding := calc_padding_with_header(
  778. curr_addr,
  779. uintptr(alignment),
  780. size_of(Stack_Allocation_Header),
  781. )
  782. if s.curr_offset + padding + size > len(s.data) {
  783. return nil, .Out_Of_Memory
  784. }
  785. s.prev_offset = s.curr_offset
  786. s.curr_offset += padding
  787. next_addr := curr_addr + uintptr(padding)
  788. header := (^Stack_Allocation_Header)(next_addr - size_of(Stack_Allocation_Header))
  789. header.padding = padding
  790. header.prev_offset = s.prev_offset
  791. s.curr_offset += size
  792. s.peak_used = max(s.peak_used, s.curr_offset)
  793. return byte_slice(rawptr(next_addr), size), nil
  794. }
  795. /*
  796. Free memory to the stack.
  797. This procedure frees the memory region starting at `old_memory` to the stack.
  798. If the freeing does is an out of order freeing, the `.Invalid_Pointer` error
  799. is returned.
  800. */
  801. stack_free :: proc(
  802. s: ^Stack,
  803. old_memory: rawptr,
  804. loc := #caller_location,
  805. ) -> (Allocator_Error) {
  806. if s.data == nil {
  807. panic("Stack free on an uninitialized stack allocator", loc)
  808. }
  809. if old_memory == nil {
  810. return nil
  811. }
  812. start := uintptr(raw_data(s.data))
  813. end := start + uintptr(len(s.data))
  814. curr_addr := uintptr(old_memory)
  815. if !(start <= curr_addr && curr_addr < end) {
  816. panic("Out of bounds memory address passed to stack allocator (free)", loc)
  817. }
  818. if curr_addr >= start+uintptr(s.curr_offset) {
  819. // NOTE(bill): Allow double frees
  820. return nil
  821. }
  822. header := (^Stack_Allocation_Header)(curr_addr - size_of(Stack_Allocation_Header))
  823. old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)))
  824. if old_offset != header.prev_offset {
  825. // panic("Out of order stack allocator free");
  826. return .Invalid_Pointer
  827. }
  828. s.curr_offset = old_offset
  829. s.prev_offset = header.prev_offset
  830. return nil
  831. }
  832. /*
  833. Free all allocations to the stack.
  834. */
  835. stack_free_all :: proc(s: ^Stack, loc := #caller_location) {
  836. s.prev_offset = 0
  837. s.curr_offset = 0
  838. }
  839. /*
  840. Resize an allocation.
  841. This procedure resizes a memory region, defined by its location, `old_memory`,
  842. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  843. newly allocated memory, if any is zero-initialized.
  844. If `old_memory` is `nil`, this procedure acts just like `stack_alloc()`,
  845. allocating a memory region `size` bytes in size, aligned on a boundary specified
  846. by `alignment`.
  847. If `size` is 0, this procedure acts just like `stack_free()`, freeing the
  848. memory region located at an address specified by `old_memory`.
  849. This procedure returns the pointer to the resized memory region.
  850. */
  851. @(require_results)
  852. stack_resize :: proc(
  853. s: ^Stack,
  854. old_memory: rawptr,
  855. old_size: int,
  856. size: int,
  857. alignment := DEFAULT_ALIGNMENT,
  858. loc := #caller_location,
  859. ) -> (rawptr, Allocator_Error) {
  860. bytes, err := stack_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment)
  861. return raw_data(bytes), err
  862. }
  863. /*
  864. Resize an allocation.
  865. This procedure resizes a memory region, specified by the `old_data` parameter
  866. to have a size `size` and alignment `alignment`. The newly allocated memory,
  867. if any is zero-initialized.
  868. If `old_memory` is `nil`, this procedure acts just like `stack_alloc()`,
  869. allocating a memory region `size` bytes in size, aligned on a boundary specified
  870. by `alignment`.
  871. If `size` is 0, this procedure acts just like `stack_free()`, freeing the
  872. memory region located at an address specified by `old_memory`.
  873. This procedure returns the slice of the resized memory region.
  874. */
  875. @(require_results)
  876. stack_resize_bytes :: proc(
  877. s: ^Stack,
  878. old_data: []byte,
  879. size: int,
  880. alignment := DEFAULT_ALIGNMENT,
  881. loc := #caller_location,
  882. ) -> ([]byte, Allocator_Error) {
  883. bytes, err := stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  884. if bytes != nil {
  885. if old_data == nil {
  886. zero_slice(bytes)
  887. } else if size > len(old_data) {
  888. zero_slice(bytes[len(old_data):])
  889. }
  890. }
  891. return bytes, err
  892. }
  893. /*
  894. Resize an allocation without zero-initialization.
  895. This procedure resizes a memory region, defined by its location, `old_memory`,
  896. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  897. newly allocated memory, if any is not explicitly zero-initialized.
  898. If `old_memory` is `nil`, this procedure acts just like `stack_alloc()`,
  899. allocating a memory region `size` bytes in size, aligned on a boundary specified
  900. by `alignment`.
  901. If `size` is 0, this procedure acts just like `stack_free()`, freeing the
  902. memory region located at an address specified by `old_memory`.
  903. This procedure returns the pointer to the resized memory region.
  904. */
  905. @(require_results)
  906. stack_resize_non_zeroed :: proc(
  907. s: ^Stack,
  908. old_memory: rawptr,
  909. old_size: int,
  910. size: int,
  911. alignment := DEFAULT_ALIGNMENT,
  912. loc := #caller_location,
  913. ) -> (rawptr, Allocator_Error) {
  914. bytes, err := stack_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment)
  915. return raw_data(bytes), err
  916. }
  917. /*
  918. Resize an allocation without zero-initialization.
  919. This procedure resizes a memory region, specified by the `old_data` parameter
  920. to have a size `size` and alignment `alignment`. The newly allocated memory,
  921. if any is not explicitly zero-initialized.
  922. If `old_memory` is `nil`, this procedure acts just like `stack_alloc()`,
  923. allocating a memory region `size` bytes in size, aligned on a boundary specified
  924. by `alignment`.
  925. If `size` is 0, this procedure acts just like `stack_free()`, freeing the
  926. memory region located at an address specified by `old_memory`.
  927. This procedure returns the slice of the resized memory region.
  928. */
  929. @(require_results)
  930. stack_resize_bytes_non_zeroed :: proc(
  931. s: ^Stack,
  932. old_data: []byte,
  933. size: int,
  934. alignment := DEFAULT_ALIGNMENT,
  935. loc := #caller_location,
  936. ) -> ([]byte, Allocator_Error) {
  937. old_memory := raw_data(old_data)
  938. old_size := len(old_data)
  939. if s.data == nil {
  940. panic("Stack free all on an uninitialized stack allocator", loc)
  941. }
  942. if old_memory == nil {
  943. return stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  944. }
  945. if size == 0 {
  946. return nil, nil
  947. }
  948. start := uintptr(raw_data(s.data))
  949. end := start + uintptr(len(s.data))
  950. curr_addr := uintptr(old_memory)
  951. if !(start <= curr_addr && curr_addr < end) {
  952. panic("Out of bounds memory address passed to stack allocator (resize)")
  953. }
  954. if curr_addr >= start+uintptr(s.curr_offset) {
  955. // NOTE(bill): Allow double frees
  956. return nil, nil
  957. }
  958. if old_size == size {
  959. return byte_slice(old_memory, size), nil
  960. }
  961. header := (^Stack_Allocation_Header)(curr_addr - size_of(Stack_Allocation_Header))
  962. old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)))
  963. if old_offset != header.prev_offset {
  964. data, err := stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  965. if err == nil {
  966. runtime.copy(data, byte_slice(old_memory, old_size))
  967. }
  968. return data, err
  969. }
  970. old_memory_size := uintptr(s.curr_offset) - (curr_addr - start)
  971. assert(old_memory_size == uintptr(old_size))
  972. diff := size - old_size
  973. s.curr_offset += diff // works for smaller sizes too
  974. if diff > 0 {
  975. zero(rawptr(curr_addr + uintptr(diff)), diff)
  976. }
  977. return byte_slice(old_memory, size), nil
  978. }
  979. stack_allocator_proc :: proc(
  980. allocator_data: rawptr,
  981. mode: Allocator_Mode,
  982. size: int,
  983. alignment: int,
  984. old_memory: rawptr,
  985. old_size: int,
  986. loc := #caller_location,
  987. ) -> ([]byte, Allocator_Error) {
  988. s := cast(^Stack)allocator_data
  989. if s.data == nil {
  990. return nil, .Invalid_Argument
  991. }
  992. switch mode {
  993. case .Alloc:
  994. return stack_alloc_bytes(s, size, alignment, loc)
  995. case .Alloc_Non_Zeroed:
  996. return stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  997. case .Free:
  998. return nil, stack_free(s, old_memory, loc)
  999. case .Free_All:
  1000. stack_free_all(s, loc)
  1001. case .Resize:
  1002. return stack_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment, loc)
  1003. case .Resize_Non_Zeroed:
  1004. return stack_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment, loc)
  1005. case .Query_Features:
  1006. set := (^Allocator_Mode_Set)(old_memory)
  1007. if set != nil {
  1008. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
  1009. }
  1010. return nil, nil
  1011. case .Query_Info:
  1012. return nil, .Mode_Not_Implemented
  1013. }
  1014. return nil, nil
  1015. }
  1016. /*
  1017. Allocation header of the small stack allocator.
  1018. */
  1019. Small_Stack_Allocation_Header :: struct {
  1020. padding: u8,
  1021. }
  1022. /*
  1023. Small stack allocator data.
  1024. */
  1025. Small_Stack :: struct {
  1026. data: []byte,
  1027. offset: int,
  1028. peak_used: int,
  1029. }
  1030. /*
  1031. Initialize small stack.
  1032. This procedure initializes the small stack allocator with `data` as its backing
  1033. buffer.
  1034. */
  1035. small_stack_init :: proc(s: ^Small_Stack, data: []byte) {
  1036. s.data = data
  1037. s.offset = 0
  1038. s.peak_used = 0
  1039. }
  1040. @(deprecated="prefer 'small_stack_init'")
  1041. init_small_stack :: proc(s: ^Small_Stack, data: []byte) {
  1042. s.data = data
  1043. s.offset = 0
  1044. s.peak_used = 0
  1045. }
  1046. /*
  1047. Small stack allocator.
  1048. The small stack allocator is just like a stack allocator, with the only
  1049. difference being an extremely small header size. Unlike the stack allocator,
  1050. small stack allows out-of order freeing of memory.
  1051. The memory is allocated in the backing buffer linearly, from start to end.
  1052. Each subsequent allocation will get the next adjacent memory region.
  1053. The metadata is stored in the allocation headers, that are located before the
  1054. start of each allocated memory region. Each header contains the amount of
  1055. padding bytes between that header and end of the previous allocation.
  1056. */
  1057. @(require_results)
  1058. small_stack_allocator :: proc(stack: ^Small_Stack) -> Allocator {
  1059. return Allocator{
  1060. procedure = small_stack_allocator_proc,
  1061. data = stack,
  1062. }
  1063. }
  1064. /*
  1065. Allocate memory from small stack.
  1066. This procedure allocates `size` bytes of memory aligned to a boundary specified
  1067. by `alignment`. The allocated memory is zero-initialized. This procedure
  1068. returns a pointer to the allocated memory region.
  1069. */
  1070. @(require_results)
  1071. small_stack_alloc :: proc(
  1072. s: ^Small_Stack,
  1073. size: int,
  1074. alignment := DEFAULT_ALIGNMENT,
  1075. loc := #caller_location,
  1076. ) -> (rawptr, Allocator_Error) {
  1077. bytes, err := small_stack_alloc_bytes(s, size, alignment, loc)
  1078. return raw_data(bytes), err
  1079. }
  1080. /*
  1081. Allocate memory from small stack.
  1082. This procedure allocates `size` bytes of memory aligned to a boundary specified
  1083. by `alignment`. The allocated memory is zero-initialized. This procedure
  1084. returns a slice of the allocated memory region.
  1085. */
  1086. @(require_results)
  1087. small_stack_alloc_bytes :: proc(
  1088. s: ^Small_Stack,
  1089. size: int,
  1090. alignment := DEFAULT_ALIGNMENT,
  1091. loc := #caller_location,
  1092. ) -> ([]byte, Allocator_Error) {
  1093. bytes, err := small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  1094. if bytes != nil {
  1095. zero_slice(bytes)
  1096. }
  1097. return bytes, err
  1098. }
  1099. /*
  1100. Allocate memory from small stack.
  1101. This procedure allocates `size` bytes of memory aligned to a boundary specified
  1102. by `alignment`. The allocated memory is not explicitly zero-initialized. This
  1103. procedure returns a pointer to the allocated memory region.
  1104. */
  1105. @(require_results)
  1106. small_stack_alloc_non_zeroed :: proc(
  1107. s: ^Small_Stack,
  1108. size: int,
  1109. alignment := DEFAULT_ALIGNMENT,
  1110. loc := #caller_location,
  1111. ) -> (rawptr, Allocator_Error) {
  1112. bytes, err := small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  1113. return raw_data(bytes), err
  1114. }
  1115. /*
  1116. Allocate memory from small stack.
  1117. This procedure allocates `size` bytes of memory aligned to a boundary specified
  1118. by `alignment`. The allocated memory is not explicitly zero-initialized. This
  1119. procedure returns a slice of the allocated memory region.
  1120. */
  1121. @(require_results)
  1122. small_stack_alloc_bytes_non_zeroed :: proc(
  1123. s: ^Small_Stack,
  1124. size: int,
  1125. alignment := DEFAULT_ALIGNMENT,
  1126. loc := #caller_location,
  1127. ) -> ([]byte, Allocator_Error) {
  1128. if s.data == nil {
  1129. panic("Small stack is not initialized", loc)
  1130. }
  1131. alignment := alignment
  1132. alignment = clamp(alignment, 1, 8*size_of(Stack_Allocation_Header{}.padding)/2)
  1133. curr_addr := uintptr(raw_data(s.data)) + uintptr(s.offset)
  1134. padding := calc_padding_with_header(curr_addr, uintptr(alignment), size_of(Small_Stack_Allocation_Header))
  1135. if s.offset + padding + size > len(s.data) {
  1136. return nil, .Out_Of_Memory
  1137. }
  1138. s.offset += padding
  1139. next_addr := curr_addr + uintptr(padding)
  1140. header := (^Small_Stack_Allocation_Header)(next_addr - size_of(Small_Stack_Allocation_Header))
  1141. header.padding = auto_cast padding
  1142. s.offset += size
  1143. s.peak_used = max(s.peak_used, s.offset)
  1144. return byte_slice(rawptr(next_addr), size), nil
  1145. }
  1146. /*
  1147. Allocate memory from small stack.
  1148. This procedure allocates `size` bytes of memory aligned to a boundary specified
  1149. by `alignment`. The allocated memory is not explicitly zero-initialized. This
  1150. procedure returns a slice of the allocated memory region.
  1151. */
  1152. small_stack_free :: proc(
  1153. s: ^Small_Stack,
  1154. old_memory: rawptr,
  1155. loc := #caller_location,
  1156. ) -> Allocator_Error {
  1157. if s.data == nil {
  1158. panic("Small stack is not initialized", loc)
  1159. }
  1160. if old_memory == nil {
  1161. return nil
  1162. }
  1163. start := uintptr(raw_data(s.data))
  1164. end := start + uintptr(len(s.data))
  1165. curr_addr := uintptr(old_memory)
  1166. if !(start <= curr_addr && curr_addr < end) {
  1167. // panic("Out of bounds memory address passed to stack allocator (free)");
  1168. return .Invalid_Pointer
  1169. }
  1170. if curr_addr >= start+uintptr(s.offset) {
  1171. // NOTE(bill): Allow double frees
  1172. return nil
  1173. }
  1174. header := (^Small_Stack_Allocation_Header)(curr_addr - size_of(Small_Stack_Allocation_Header))
  1175. old_offset := int(curr_addr - uintptr(header.padding) - uintptr(raw_data(s.data)))
  1176. s.offset = old_offset
  1177. return nil
  1178. }
  1179. /*
  1180. Free all memory to small stack.
  1181. */
  1182. small_stack_free_all :: proc(s: ^Small_Stack) {
  1183. s.offset = 0
  1184. }
  1185. /*
  1186. Resize an allocation.
  1187. This procedure resizes a memory region, defined by its location, `old_memory`,
  1188. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  1189. newly allocated memory, if any is zero-initialized.
  1190. If `old_memory` is `nil`, this procedure acts just like `small_stack_alloc()`,
  1191. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1192. by `alignment`.
  1193. If `size` is 0, this procedure acts just like `small_stack_free()`, freeing the
  1194. memory region located at an address specified by `old_memory`.
  1195. This procedure returns the pointer to the resized memory region.
  1196. */
  1197. @(require_results)
  1198. small_stack_resize :: proc(
  1199. s: ^Small_Stack,
  1200. old_memory: rawptr,
  1201. old_size: int,
  1202. size: int,
  1203. alignment := DEFAULT_ALIGNMENT,
  1204. loc := #caller_location,
  1205. ) -> (rawptr, Allocator_Error) {
  1206. bytes, err := small_stack_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment, loc)
  1207. return raw_data(bytes), err
  1208. }
  1209. /*
  1210. Resize an allocation.
  1211. This procedure resizes a memory region, specified by the `old_data` parameter
  1212. to have a size `size` and alignment `alignment`. The newly allocated memory,
  1213. if any is zero-initialized.
  1214. If `old_memory` is `nil`, this procedure acts just like `small_stack_alloc()`,
  1215. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1216. by `alignment`.
  1217. If `size` is 0, this procedure acts just like `small_stack_free()`, freeing the
  1218. memory region located at an address specified by `old_memory`.
  1219. This procedure returns the slice of the resized memory region.
  1220. */
  1221. @(require_results)
  1222. small_stack_resize_bytes :: proc(
  1223. s: ^Small_Stack,
  1224. old_data: []byte,
  1225. size: int,
  1226. alignment := DEFAULT_ALIGNMENT,
  1227. loc := #caller_location,
  1228. ) -> ([]byte, Allocator_Error) {
  1229. bytes, err := small_stack_resize_bytes_non_zeroed(s, old_data, size, alignment, loc)
  1230. if bytes != nil {
  1231. if old_data == nil {
  1232. zero_slice(bytes)
  1233. } else if size > len(old_data) {
  1234. zero_slice(bytes[len(old_data):])
  1235. }
  1236. }
  1237. return bytes, err
  1238. }
  1239. /*
  1240. Resize an allocation without zero-initialization.
  1241. This procedure resizes a memory region, defined by its location, `old_memory`,
  1242. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  1243. newly allocated memory, if any is not explicitly zero-initialized.
  1244. If `old_memory` is `nil`, this procedure acts just like `small_stack_alloc()`,
  1245. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1246. by `alignment`.
  1247. If `size` is 0, this procedure acts just like `small_stack_free()`, freeing the
  1248. memory region located at an address specified by `old_memory`.
  1249. This procedure returns the pointer to the resized memory region.
  1250. */
  1251. @(require_results)
  1252. small_stack_resize_non_zeroed :: proc(
  1253. s: ^Small_Stack,
  1254. old_memory: rawptr,
  1255. old_size: int,
  1256. size: int,
  1257. alignment := DEFAULT_ALIGNMENT,
  1258. loc := #caller_location,
  1259. ) -> (rawptr, Allocator_Error) {
  1260. bytes, err := small_stack_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment, loc)
  1261. return raw_data(bytes), err
  1262. }
  1263. /*
  1264. Resize an allocation without zero-initialization.
  1265. This procedure resizes a memory region, specified by the `old_data` parameter
  1266. to have a size `size` and alignment `alignment`. The newly allocated memory,
  1267. if any is not explicitly zero-initialized.
  1268. If `old_memory` is `nil`, this procedure acts just like `small_stack_alloc()`,
  1269. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1270. by `alignment`.
  1271. If `size` is 0, this procedure acts just like `small_stack_free()`, freeing the
  1272. memory region located at an address specified by `old_memory`.
  1273. This procedure returns the slice of the resized memory region.
  1274. */
  1275. @(require_results)
  1276. small_stack_resize_bytes_non_zeroed :: proc(
  1277. s: ^Small_Stack,
  1278. old_data: []byte,
  1279. size: int,
  1280. alignment := DEFAULT_ALIGNMENT,
  1281. loc := #caller_location,
  1282. ) -> ([]byte, Allocator_Error) {
  1283. if s.data == nil {
  1284. panic("Small stack is not initialized", loc)
  1285. }
  1286. old_memory := raw_data(old_data)
  1287. old_size := len(old_data)
  1288. alignment := alignment
  1289. alignment = clamp(alignment, 1, 8*size_of(Stack_Allocation_Header{}.padding)/2)
  1290. if old_memory == nil {
  1291. return small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  1292. }
  1293. if size == 0 {
  1294. return nil, nil
  1295. }
  1296. start := uintptr(raw_data(s.data))
  1297. end := start + uintptr(len(s.data))
  1298. curr_addr := uintptr(old_memory)
  1299. if !(start <= curr_addr && curr_addr < end) {
  1300. // panic("Out of bounds memory address passed to stack allocator (resize)");
  1301. return nil, .Invalid_Pointer
  1302. }
  1303. if curr_addr >= start+uintptr(s.offset) {
  1304. // NOTE(bill): Treat as a double free
  1305. return nil, nil
  1306. }
  1307. if old_size == size {
  1308. return byte_slice(old_memory, size), nil
  1309. }
  1310. data, err := small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  1311. if err == nil {
  1312. runtime.copy(data, byte_slice(old_memory, old_size))
  1313. }
  1314. return data, err
  1315. }
  1316. small_stack_allocator_proc :: proc(
  1317. allocator_data: rawptr,
  1318. mode: Allocator_Mode,
  1319. size, alignment: int,
  1320. old_memory: rawptr,
  1321. old_size: int,
  1322. loc := #caller_location,
  1323. ) -> ([]byte, Allocator_Error) {
  1324. s := cast(^Small_Stack)allocator_data
  1325. if s.data == nil {
  1326. return nil, .Invalid_Argument
  1327. }
  1328. switch mode {
  1329. case .Alloc:
  1330. return small_stack_alloc_bytes(s, size, alignment, loc)
  1331. case .Alloc_Non_Zeroed:
  1332. return small_stack_alloc_bytes_non_zeroed(s, size, alignment, loc)
  1333. case .Free:
  1334. return nil, small_stack_free(s, old_memory, loc)
  1335. case .Free_All:
  1336. small_stack_free_all(s)
  1337. case .Resize:
  1338. return small_stack_resize_bytes(s, byte_slice(old_memory, old_size), size, alignment, loc)
  1339. case .Resize_Non_Zeroed:
  1340. return small_stack_resize_bytes_non_zeroed(s, byte_slice(old_memory, old_size), size, alignment, loc)
  1341. case .Query_Features:
  1342. set := (^Allocator_Mode_Set)(old_memory)
  1343. if set != nil {
  1344. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features}
  1345. }
  1346. return nil, nil
  1347. case .Query_Info:
  1348. return nil, .Mode_Not_Implemented
  1349. }
  1350. return nil, nil
  1351. }
  1352. /* Preserved for compatibility */
  1353. Dynamic_Pool :: Dynamic_Arena
  1354. DYNAMIC_POOL_BLOCK_SIZE_DEFAULT :: DYNAMIC_ARENA_BLOCK_SIZE_DEFAULT
  1355. DYNAMIC_POOL_OUT_OF_BAND_SIZE_DEFAULT :: DYNAMIC_ARENA_OUT_OF_BAND_SIZE_DEFAULT
  1356. dynamic_pool_allocator_proc :: dynamic_arena_allocator_proc
  1357. dynamic_pool_free_all :: dynamic_arena_free_all
  1358. dynamic_pool_reset :: dynamic_arena_reset
  1359. dynamic_pool_alloc_bytes :: dynamic_arena_alloc_bytes
  1360. dynamic_pool_alloc :: dynamic_arena_alloc
  1361. dynamic_pool_init :: dynamic_arena_init
  1362. dynamic_pool_allocator :: dynamic_arena_allocator
  1363. dynamic_pool_destroy :: dynamic_arena_destroy
  1364. /*
  1365. Default block size for dynamic arena.
  1366. */
  1367. DYNAMIC_ARENA_BLOCK_SIZE_DEFAULT :: 65536
  1368. /*
  1369. Default out-band size of the dynamic arena.
  1370. */
  1371. DYNAMIC_ARENA_OUT_OF_BAND_SIZE_DEFAULT :: 6554
  1372. /*
  1373. Dynamic arena allocator data.
  1374. */
  1375. Dynamic_Arena :: struct {
  1376. block_size: int,
  1377. out_band_size: int,
  1378. alignment: int,
  1379. unused_blocks: [dynamic]rawptr,
  1380. used_blocks: [dynamic]rawptr,
  1381. out_band_allocations: [dynamic]rawptr,
  1382. current_block: rawptr,
  1383. current_pos: rawptr,
  1384. bytes_left: int,
  1385. block_allocator: Allocator,
  1386. }
  1387. /*
  1388. Initialize a dynamic arena.
  1389. This procedure initializes a dynamic arena. The specified `block_allocator`
  1390. will be used to allocate arena blocks, and `array_allocator` to allocate
  1391. arrays of blocks and out-band blocks. The blocks have the default size of
  1392. `block_size` and out-band threshold will be `out_band_size`. All allocations
  1393. will be aligned to a boundary specified by `alignment`.
  1394. */
  1395. dynamic_arena_init :: proc(
  1396. pool: ^Dynamic_Arena,
  1397. block_allocator := context.allocator,
  1398. array_allocator := context.allocator,
  1399. block_size := DYNAMIC_ARENA_BLOCK_SIZE_DEFAULT,
  1400. out_band_size := DYNAMIC_ARENA_OUT_OF_BAND_SIZE_DEFAULT,
  1401. alignment := DEFAULT_ALIGNMENT,
  1402. ) {
  1403. pool.block_size = block_size
  1404. pool.out_band_size = out_band_size
  1405. pool.alignment = alignment
  1406. pool.block_allocator = block_allocator
  1407. pool.out_band_allocations.allocator = array_allocator
  1408. pool.unused_blocks.allocator = array_allocator
  1409. pool.used_blocks.allocator = array_allocator
  1410. }
  1411. /*
  1412. Dynamic arena allocator.
  1413. The dynamic arena allocator uses blocks of a specific size, allocated on-demand
  1414. using the block allocator. This allocator acts similarly to arena. All
  1415. allocations in a block happen contiguously, from start to end. If an allocation
  1416. does not fit into the remaining space of the block, and its size is smaller
  1417. than the specified out-band size, a new block is allocated using the
  1418. `block_allocator` and the allocation is performed from a newly-allocated block.
  1419. If an allocation has bigger size than the specified out-band size, a new block
  1420. is allocated such that the allocation fits into this new block. This is referred
  1421. to as an *out-band allocation*. The out-band blocks are kept separately from
  1422. normal blocks.
  1423. Just like arena, the dynamic arena does not support freeing of individual
  1424. objects.
  1425. */
  1426. @(require_results)
  1427. dynamic_arena_allocator :: proc(a: ^Dynamic_Arena) -> Allocator {
  1428. return Allocator{
  1429. procedure = dynamic_arena_allocator_proc,
  1430. data = a,
  1431. }
  1432. }
  1433. /*
  1434. Destroy a dynamic arena.
  1435. This procedure frees all allocations, made on a dynamic arena, including the
  1436. unused blocks, as well as the arrays for storing blocks.
  1437. */
  1438. dynamic_arena_destroy :: proc(a: ^Dynamic_Arena) {
  1439. dynamic_arena_free_all(a)
  1440. delete(a.unused_blocks)
  1441. delete(a.used_blocks)
  1442. delete(a.out_band_allocations)
  1443. zero(a, size_of(a^))
  1444. }
  1445. @(private="file")
  1446. _dynamic_arena_cycle_new_block :: proc(a: ^Dynamic_Arena, loc := #caller_location) -> (err: Allocator_Error) {
  1447. if a.block_allocator.procedure == nil {
  1448. panic("You must call arena_init on a Pool before using it", loc)
  1449. }
  1450. if a.current_block != nil {
  1451. append(&a.used_blocks, a.current_block, loc=loc)
  1452. }
  1453. new_block: rawptr
  1454. if len(a.unused_blocks) > 0 {
  1455. new_block = pop(&a.unused_blocks)
  1456. } else {
  1457. data: []byte
  1458. data, err = a.block_allocator.procedure(
  1459. a.block_allocator.data,
  1460. Allocator_Mode.Alloc,
  1461. a.block_size,
  1462. a.alignment,
  1463. nil,
  1464. 0,
  1465. )
  1466. new_block = raw_data(data)
  1467. }
  1468. a.bytes_left = a.block_size
  1469. a.current_pos = new_block
  1470. a.current_block = new_block
  1471. return
  1472. }
  1473. /*
  1474. Allocate memory from a dynamic arena.
  1475. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1476. by `alignment` from a dynamic arena `a`. The allocated memory is
  1477. zero-initialized. This procedure returns a pointer to the newly allocated memory
  1478. region.
  1479. */
  1480. @(private, require_results)
  1481. dynamic_arena_alloc :: proc(a: ^Dynamic_Arena, size: int, loc := #caller_location) -> (rawptr, Allocator_Error) {
  1482. data, err := dynamic_arena_alloc_bytes(a, size, loc)
  1483. return raw_data(data), err
  1484. }
  1485. /*
  1486. Allocate memory from a dynamic arena.
  1487. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1488. by `alignment` from a dynamic arena `a`. The allocated memory is
  1489. zero-initialized. This procedure returns a slice of the newly allocated memory
  1490. region.
  1491. */
  1492. @(require_results)
  1493. dynamic_arena_alloc_bytes :: proc(a: ^Dynamic_Arena, size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
  1494. bytes, err := dynamic_arena_alloc_bytes_non_zeroed(a, size, loc)
  1495. if bytes != nil {
  1496. zero_slice(bytes)
  1497. }
  1498. return bytes, err
  1499. }
  1500. /*
  1501. Allocate non-initialized memory from a dynamic arena.
  1502. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1503. by `alignment` from a dynamic arena `a`. The allocated memory is not explicitly
  1504. zero-initialized. This procedure returns a pointer to the newly allocated
  1505. memory region.
  1506. */
  1507. @(require_results)
  1508. dynamic_arena_alloc_non_zeroed :: proc(a: ^Dynamic_Arena, size: int, loc := #caller_location) -> (rawptr, Allocator_Error) {
  1509. data, err := dynamic_arena_alloc_bytes_non_zeroed(a, size, loc)
  1510. return raw_data(data), err
  1511. }
  1512. /*
  1513. Allocate non-initialized memory from a dynamic arena.
  1514. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1515. by `alignment` from a dynamic arena `a`. The allocated memory is not explicitly
  1516. zero-initialized. This procedure returns a slice of the newly allocated
  1517. memory region.
  1518. */
  1519. @(require_results)
  1520. dynamic_arena_alloc_bytes_non_zeroed :: proc(a: ^Dynamic_Arena, size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
  1521. n := align_formula(size, a.alignment)
  1522. if n > a.block_size {
  1523. return nil, .Invalid_Argument
  1524. }
  1525. if n >= a.out_band_size {
  1526. assert(a.block_allocator.procedure != nil, "Backing block allocator must be initialized", loc=loc)
  1527. memory, err := alloc_bytes_non_zeroed(a.block_size, a.alignment, a.block_allocator, loc)
  1528. if memory != nil {
  1529. append(&a.out_band_allocations, raw_data(memory), loc = loc)
  1530. }
  1531. return memory, err
  1532. }
  1533. if a.bytes_left < n {
  1534. err := _dynamic_arena_cycle_new_block(a, loc)
  1535. if err != nil {
  1536. return nil, err
  1537. }
  1538. if a.current_block == nil {
  1539. return nil, .Out_Of_Memory
  1540. }
  1541. }
  1542. memory := a.current_pos
  1543. a.current_pos = ([^]byte)(a.current_pos)[n:]
  1544. a.bytes_left -= n
  1545. return ([^]byte)(memory)[:size], nil
  1546. }
  1547. /*
  1548. Reset the dynamic arena.
  1549. This procedure frees all the allocations, owned by the dynamic arena, excluding
  1550. the unused blocks.
  1551. */
  1552. dynamic_arena_reset :: proc(a: ^Dynamic_Arena, loc := #caller_location) {
  1553. if a.current_block != nil {
  1554. append(&a.unused_blocks, a.current_block, loc=loc)
  1555. a.current_block = nil
  1556. }
  1557. for block in a.used_blocks {
  1558. append(&a.unused_blocks, block, loc=loc)
  1559. }
  1560. clear(&a.used_blocks)
  1561. for allocation in a.out_band_allocations {
  1562. free(allocation, a.block_allocator, loc=loc)
  1563. }
  1564. clear(&a.out_band_allocations)
  1565. a.bytes_left = 0 // Make new allocations call `_dynamic_arena_cycle_new_block` again.
  1566. }
  1567. /*
  1568. Free all memory from a dynamic arena.
  1569. This procedure frees all the allocations, owned by the dynamic arena, including
  1570. the unused blocks.
  1571. */
  1572. dynamic_arena_free_all :: proc(a: ^Dynamic_Arena, loc := #caller_location) {
  1573. dynamic_arena_reset(a)
  1574. for block in a.unused_blocks {
  1575. free(block, a.block_allocator, loc)
  1576. }
  1577. clear(&a.unused_blocks)
  1578. }
  1579. /*
  1580. Resize an allocation.
  1581. This procedure resizes a memory region, defined by its location, `old_memory`,
  1582. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  1583. newly allocated memory, if any is zero-initialized.
  1584. If `old_memory` is `nil`, this procedure acts just like `dynamic_arena_alloc()`,
  1585. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1586. by `alignment`.
  1587. If `size` is 0, this procedure acts just like `dynamic_arena_free()`, freeing
  1588. the memory region located at an address specified by `old_memory`.
  1589. This procedure returns the pointer to the resized memory region.
  1590. */
  1591. @(require_results)
  1592. dynamic_arena_resize :: proc(
  1593. a: ^Dynamic_Arena,
  1594. old_memory: rawptr,
  1595. old_size: int,
  1596. size: int,
  1597. loc := #caller_location,
  1598. ) -> (rawptr, Allocator_Error) {
  1599. bytes, err := dynamic_arena_resize_bytes(a, byte_slice(old_memory, old_size), size, loc)
  1600. return raw_data(bytes), err
  1601. }
  1602. /*
  1603. Resize an allocation.
  1604. This procedure resizes a memory region, specified by `old_data`, to have a size
  1605. `size` and alignment `alignment`. The newly allocated memory, if any is
  1606. zero-initialized.
  1607. If `old_memory` is `nil`, this procedure acts just like `dynamic_arena_alloc()`,
  1608. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1609. by `alignment`.
  1610. If `size` is 0, this procedure acts just like `dynamic_arena_free()`, freeing the
  1611. memory region located at an address specified by `old_memory`.
  1612. This procedure returns the slice of the resized memory region.
  1613. */
  1614. @(require_results)
  1615. dynamic_arena_resize_bytes :: proc(
  1616. a: ^Dynamic_Arena,
  1617. old_data: []byte,
  1618. size: int,
  1619. loc := #caller_location,
  1620. ) -> ([]byte, Allocator_Error) {
  1621. bytes, err := dynamic_arena_resize_bytes_non_zeroed(a, old_data, size, loc)
  1622. if bytes != nil {
  1623. if old_data == nil {
  1624. zero_slice(bytes)
  1625. } else if size > len(old_data) {
  1626. zero_slice(bytes[len(old_data):])
  1627. }
  1628. }
  1629. return bytes, err
  1630. }
  1631. /*
  1632. Resize an allocation without zero-initialization.
  1633. This procedure resizes a memory region, defined by its location, `old_memory`,
  1634. and its size, `old_size` to have a size `size` and alignment `alignment`. The
  1635. newly allocated memory, if any is not explicitly zero-initialized.
  1636. If `old_memory` is `nil`, this procedure acts just like `dynamic_arena_alloc()`,
  1637. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1638. by `alignment`.
  1639. If `size` is 0, this procedure acts just like `dynamic_arena_free()`, freeing the
  1640. memory region located at an address specified by `old_memory`.
  1641. This procedure returns the pointer to the resized memory region.
  1642. */
  1643. @(require_results)
  1644. dynamic_arena_resize_non_zeroed :: proc(
  1645. a: ^Dynamic_Arena,
  1646. old_memory: rawptr,
  1647. old_size: int,
  1648. size: int,
  1649. loc := #caller_location,
  1650. ) -> (rawptr, Allocator_Error) {
  1651. bytes, err := dynamic_arena_resize_bytes_non_zeroed(a, byte_slice(old_memory, old_size), size, loc)
  1652. return raw_data(bytes), err
  1653. }
  1654. /*
  1655. Resize an allocation.
  1656. This procedure resizes a memory region, specified by `old_data`, to have a size
  1657. `size` and alignment `alignment`. The newly allocated memory, if any is not
  1658. explicitly zero-initialized.
  1659. If `old_memory` is `nil`, this procedure acts just like `dynamic_arena_alloc()`,
  1660. allocating a memory region `size` bytes in size, aligned on a boundary specified
  1661. by `alignment`.
  1662. If `size` is 0, this procedure acts just like `dynamic_arena_free()`, freeing
  1663. the memory region located at an address specified by `old_memory`.
  1664. This procedure returns the slice of the resized memory region.
  1665. */
  1666. @(require_results)
  1667. dynamic_arena_resize_bytes_non_zeroed :: proc(
  1668. a: ^Dynamic_Arena,
  1669. old_data: []byte,
  1670. size: int,
  1671. loc := #caller_location,
  1672. ) -> ([]byte, Allocator_Error) {
  1673. old_memory := raw_data(old_data)
  1674. old_size := len(old_data)
  1675. if old_size >= size {
  1676. return byte_slice(old_memory, size), nil
  1677. }
  1678. data, err := dynamic_arena_alloc_bytes_non_zeroed(a, size, loc)
  1679. if err == nil {
  1680. runtime.copy(data, byte_slice(old_memory, old_size))
  1681. }
  1682. return data, err
  1683. }
  1684. dynamic_arena_allocator_proc :: proc(
  1685. allocator_data: rawptr,
  1686. mode: Allocator_Mode,
  1687. size: int,
  1688. alignment: int,
  1689. old_memory: rawptr,
  1690. old_size: int,
  1691. loc := #caller_location,
  1692. ) -> ([]byte, Allocator_Error) {
  1693. arena := (^Dynamic_Arena)(allocator_data)
  1694. switch mode {
  1695. case .Alloc:
  1696. return dynamic_arena_alloc_bytes(arena, size, loc)
  1697. case .Alloc_Non_Zeroed:
  1698. return dynamic_arena_alloc_bytes_non_zeroed(arena, size, loc)
  1699. case .Free:
  1700. return nil, .Mode_Not_Implemented
  1701. case .Free_All:
  1702. dynamic_arena_free_all(arena, loc)
  1703. case .Resize:
  1704. return dynamic_arena_resize_bytes(arena, byte_slice(old_memory, old_size), size, loc)
  1705. case .Resize_Non_Zeroed:
  1706. return dynamic_arena_resize_bytes_non_zeroed(arena, byte_slice(old_memory, old_size), size, loc)
  1707. case .Query_Features:
  1708. set := (^Allocator_Mode_Set)(old_memory)
  1709. if set != nil {
  1710. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free_All, .Resize, .Resize_Non_Zeroed, .Query_Features, .Query_Info}
  1711. }
  1712. return nil, nil
  1713. case .Query_Info:
  1714. info := (^Allocator_Query_Info)(old_memory)
  1715. if info != nil && info.pointer != nil {
  1716. info.size = arena.block_size
  1717. info.alignment = arena.alignment
  1718. return byte_slice(info, size_of(info^)), nil
  1719. }
  1720. return nil, nil
  1721. }
  1722. return nil, nil
  1723. }
  1724. /*
  1725. Header of the buddy block.
  1726. */
  1727. Buddy_Block :: struct #align(align_of(uint)) {
  1728. size: uint,
  1729. is_free: bool,
  1730. }
  1731. /*
  1732. Obtain the next buddy block.
  1733. */
  1734. @(require_results)
  1735. buddy_block_next :: proc(block: ^Buddy_Block) -> ^Buddy_Block {
  1736. return (^Buddy_Block)(([^]byte)(block)[block.size:])
  1737. }
  1738. /*
  1739. Split the block into two, by truncating the given block to a given size.
  1740. */
  1741. @(require_results)
  1742. buddy_block_split :: proc(block: ^Buddy_Block, size: uint) -> ^Buddy_Block {
  1743. block := block
  1744. if block != nil && size != 0 {
  1745. // Recursive Split
  1746. for size < block.size {
  1747. sz := block.size >> 1
  1748. block.size = sz
  1749. block = buddy_block_next(block)
  1750. block.size = sz
  1751. block.is_free = true
  1752. }
  1753. if size <= block.size {
  1754. return block
  1755. }
  1756. }
  1757. // Block cannot fit the requested allocation size
  1758. return nil
  1759. }
  1760. /*
  1761. Coalesce contiguous blocks in a range of blocks into one.
  1762. */
  1763. buddy_block_coalescence :: proc(head, tail: ^Buddy_Block) {
  1764. for {
  1765. // Keep looping until there are no more buddies to coalesce
  1766. block := head
  1767. buddy := buddy_block_next(block)
  1768. no_coalescence := true
  1769. for block < tail && buddy < tail { // make sure the buddies are within the range
  1770. if block.is_free && buddy.is_free && block.size == buddy.size {
  1771. // Coalesce buddies into one
  1772. block.size <<= 1
  1773. block = buddy_block_next(block)
  1774. if block < tail {
  1775. buddy = buddy_block_next(block)
  1776. no_coalescence = false
  1777. }
  1778. } else if block.size < buddy.size {
  1779. // The buddy block is split into smaller blocks
  1780. block = buddy
  1781. buddy = buddy_block_next(buddy)
  1782. } else {
  1783. block = buddy_block_next(buddy)
  1784. if block < tail {
  1785. // Leave the buddy block for the next iteration
  1786. buddy = buddy_block_next(block)
  1787. }
  1788. }
  1789. }
  1790. if no_coalescence {
  1791. return
  1792. }
  1793. }
  1794. }
  1795. /*
  1796. Find the best block for storing a given size in a range of blocks.
  1797. */
  1798. @(require_results)
  1799. buddy_block_find_best :: proc(head, tail: ^Buddy_Block, size: uint) -> ^Buddy_Block {
  1800. assert(size != 0)
  1801. best_block: ^Buddy_Block
  1802. block := head // left
  1803. buddy := buddy_block_next(block) // right
  1804. // The entire memory section between head and tail is free,
  1805. // just call 'buddy_block_split' to get the allocation
  1806. if buddy == tail && block.is_free {
  1807. return buddy_block_split(block, size)
  1808. }
  1809. // Find the block which is the 'best_block' to requested allocation sized
  1810. for block < tail && buddy < tail { // make sure the buddies are within the range
  1811. // If both buddies are free, coalesce them together
  1812. // NOTE: this is an optimization to reduce fragmentation
  1813. // this could be completely ignored
  1814. if block.is_free && buddy.is_free && block.size == buddy.size {
  1815. block.size <<= 1
  1816. if size <= block.size && (best_block == nil || block.size <= best_block.size) {
  1817. best_block = block
  1818. }
  1819. block = buddy_block_next(buddy)
  1820. if block < tail {
  1821. // Delay the buddy block for the next iteration
  1822. buddy = buddy_block_next(block)
  1823. }
  1824. continue
  1825. }
  1826. if block.is_free && size <= block.size &&
  1827. (best_block == nil || block.size <= best_block.size) {
  1828. best_block = block
  1829. }
  1830. if buddy.is_free && size <= buddy.size &&
  1831. (best_block == nil || buddy.size < best_block.size) {
  1832. // If each buddy are the same size, then it makes more sense
  1833. // to pick the buddy as it "bounces around" less
  1834. best_block = buddy
  1835. }
  1836. if block.size <= buddy.size {
  1837. block = buddy_block_next(buddy)
  1838. if (block < tail) {
  1839. // Delay the buddy block for the next iteration
  1840. buddy = buddy_block_next(block)
  1841. }
  1842. } else {
  1843. // Buddy was split into smaller blocks
  1844. block = buddy
  1845. buddy = buddy_block_next(buddy)
  1846. }
  1847. }
  1848. if best_block != nil {
  1849. // This will handle the case if the 'best_block' is also the perfect fit
  1850. return buddy_block_split(best_block, size)
  1851. }
  1852. // Maybe out of memory
  1853. return nil
  1854. }
  1855. /*
  1856. The buddy allocator data.
  1857. */
  1858. Buddy_Allocator :: struct {
  1859. head: ^Buddy_Block,
  1860. tail: ^Buddy_Block,
  1861. alignment: uint,
  1862. }
  1863. /*
  1864. Buddy allocator.
  1865. The buddy allocator is a type of allocator that splits the backing buffer into
  1866. multiple regions called buddy blocks. Initially, the allocator only has one
  1867. block with the size of the backing buffer. Upon each allocation, the allocator
  1868. finds the smallest block that can fit the size of requested memory region, and
  1869. splits the block according to the allocation size. If no block can be found,
  1870. the contiguous free blocks are coalesced and the search is performed again.
  1871. */
  1872. @(require_results)
  1873. buddy_allocator :: proc(b: ^Buddy_Allocator) -> Allocator {
  1874. return Allocator{
  1875. procedure = buddy_allocator_proc,
  1876. data = b,
  1877. }
  1878. }
  1879. /*
  1880. Initialize the buddy allocator.
  1881. This procedure initializes the buddy allocator `b` with a backing buffer `data`
  1882. and block alignment specified by `alignment`.
  1883. */
  1884. buddy_allocator_init :: proc(b: ^Buddy_Allocator, data: []byte, alignment: uint, loc := #caller_location) {
  1885. assert(data != nil)
  1886. assert(is_power_of_two(uintptr(len(data))), "Size of the backing buffer must be power of two", loc)
  1887. assert(is_power_of_two(uintptr(alignment)), "Alignment must be a power of two", loc)
  1888. alignment := alignment
  1889. if alignment < size_of(Buddy_Block) {
  1890. alignment = size_of(Buddy_Block)
  1891. }
  1892. ptr := raw_data(data)
  1893. assert(uintptr(ptr) % uintptr(alignment) == 0, "data is not aligned to minimum alignment", loc)
  1894. b.head = (^Buddy_Block)(ptr)
  1895. b.head.size = len(data)
  1896. b.head.is_free = true
  1897. b.tail = buddy_block_next(b.head)
  1898. b.alignment = alignment
  1899. }
  1900. /*
  1901. Get required block size to fit in the allocation as well as the alignment padding.
  1902. */
  1903. @(require_results)
  1904. buddy_block_size_required :: proc(b: ^Buddy_Allocator, size: uint) -> uint {
  1905. size := size
  1906. actual_size := b.alignment
  1907. size += size_of(Buddy_Block)
  1908. size = align_forward_uint(size, b.alignment)
  1909. for size > actual_size {
  1910. actual_size <<= 1
  1911. }
  1912. return actual_size
  1913. }
  1914. /*
  1915. Allocate memory from a buddy allocator.
  1916. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1917. by `alignment`. The allocated memory region is zero-initialized. This procedure
  1918. returns a pointer to the allocated memory region.
  1919. */
  1920. @(require_results)
  1921. buddy_allocator_alloc :: proc(b: ^Buddy_Allocator, size: uint) -> (rawptr, Allocator_Error) {
  1922. bytes, err := buddy_allocator_alloc_bytes(b, size)
  1923. return raw_data(bytes), err
  1924. }
  1925. /*
  1926. Allocate memory from a buddy allocator.
  1927. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1928. by `alignment`. The allocated memory region is zero-initialized. This procedure
  1929. returns a slice of the allocated memory region.
  1930. */
  1931. @(require_results)
  1932. buddy_allocator_alloc_bytes :: proc(b: ^Buddy_Allocator, size: uint) -> ([]byte, Allocator_Error) {
  1933. bytes, err := buddy_allocator_alloc_bytes_non_zeroed(b, size)
  1934. if bytes != nil {
  1935. zero_slice(bytes)
  1936. }
  1937. return bytes, err
  1938. }
  1939. /*
  1940. Allocate non-initialized memory from a buddy allocator.
  1941. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1942. by `alignment`. The allocated memory region is not explicitly zero-initialized.
  1943. This procedure returns a pointer to the allocated memory region.
  1944. */
  1945. @(require_results)
  1946. buddy_allocator_alloc_non_zeroed :: proc(b: ^Buddy_Allocator, size: uint) -> (rawptr, Allocator_Error) {
  1947. bytes, err := buddy_allocator_alloc_bytes_non_zeroed(b, size)
  1948. return raw_data(bytes), err
  1949. }
  1950. /*
  1951. Allocate non-initialized memory from a buddy allocator.
  1952. This procedure allocates `size` bytes of memory aligned on a boundary specified
  1953. by `alignment`. The allocated memory region is not explicitly zero-initialized.
  1954. This procedure returns a slice of the allocated memory region.
  1955. */
  1956. @(require_results)
  1957. buddy_allocator_alloc_bytes_non_zeroed :: proc(b: ^Buddy_Allocator, size: uint) -> ([]byte, Allocator_Error) {
  1958. if size != 0 {
  1959. actual_size := buddy_block_size_required(b, size)
  1960. found := buddy_block_find_best(b.head, b.tail, actual_size)
  1961. if found != nil {
  1962. // Try to coalesce all the free buddy blocks and then search again
  1963. buddy_block_coalescence(b.head, b.tail)
  1964. found = buddy_block_find_best(b.head, b.tail, actual_size)
  1965. }
  1966. if found == nil {
  1967. return nil, .Out_Of_Memory
  1968. }
  1969. found.is_free = false
  1970. data := ([^]byte)(found)[b.alignment:][:size]
  1971. return data, nil
  1972. }
  1973. return nil, nil
  1974. }
  1975. /*
  1976. Free memory to the buddy allocator.
  1977. This procedure frees the memory region allocated at pointer `ptr`.
  1978. If `ptr` is not the latest allocation and is not a leaked allocation, this
  1979. operation is a no-op.
  1980. */
  1981. buddy_allocator_free :: proc(b: ^Buddy_Allocator, ptr: rawptr) -> Allocator_Error {
  1982. if ptr != nil {
  1983. if !(b.head <= ptr && ptr <= b.tail) {
  1984. return .Invalid_Pointer
  1985. }
  1986. block := (^Buddy_Block)(([^]byte)(ptr)[-b.alignment:])
  1987. block.is_free = true
  1988. buddy_block_coalescence(b.head, b.tail)
  1989. }
  1990. return nil
  1991. }
  1992. /*
  1993. Free all memory to the buddy allocator.
  1994. */
  1995. buddy_allocator_free_all :: proc(b: ^Buddy_Allocator) {
  1996. alignment := b.alignment
  1997. head := ([^]byte)(b.head)
  1998. tail := ([^]byte)(b.tail)
  1999. data := head[:ptr_sub(tail, head)]
  2000. buddy_allocator_init(b, data, alignment)
  2001. }
  2002. buddy_allocator_proc :: proc(
  2003. allocator_data: rawptr,
  2004. mode: Allocator_Mode,
  2005. size, alignment: int,
  2006. old_memory: rawptr,
  2007. old_size: int,
  2008. loc := #caller_location,
  2009. ) -> ([]byte, Allocator_Error) {
  2010. b := (^Buddy_Allocator)(allocator_data)
  2011. switch mode {
  2012. case .Alloc:
  2013. return buddy_allocator_alloc_bytes(b, uint(size))
  2014. case .Alloc_Non_Zeroed:
  2015. return buddy_allocator_alloc_bytes_non_zeroed(b, uint(size))
  2016. case .Resize:
  2017. return default_resize_bytes_align(byte_slice(old_memory, old_size), size, alignment, buddy_allocator(b), loc)
  2018. case .Resize_Non_Zeroed:
  2019. return default_resize_bytes_align_non_zeroed(byte_slice(old_memory, old_size), size, alignment, buddy_allocator(b), loc)
  2020. case .Free:
  2021. return nil, buddy_allocator_free(b, old_memory)
  2022. case .Free_All:
  2023. buddy_allocator_free_all(b)
  2024. case .Query_Features:
  2025. set := (^Allocator_Mode_Set)(old_memory)
  2026. if set != nil {
  2027. set^ = {.Query_Features, .Alloc, .Alloc_Non_Zeroed, .Resize, .Resize_Non_Zeroed, .Free, .Free_All, .Query_Info}
  2028. }
  2029. return nil, nil
  2030. case .Query_Info:
  2031. info := (^Allocator_Query_Info)(old_memory)
  2032. if info != nil && info.pointer != nil {
  2033. ptr := info.pointer
  2034. if !(b.head <= ptr && ptr <= b.tail) {
  2035. return nil, .Invalid_Pointer
  2036. }
  2037. block := (^Buddy_Block)(([^]byte)(ptr)[-b.alignment:])
  2038. info.size = int(block.size)
  2039. info.alignment = int(b.alignment)
  2040. return byte_slice(info, size_of(info^)), nil
  2041. }
  2042. return nil, nil
  2043. }
  2044. return nil, nil
  2045. }
  2046. // An allocator that keeps track of allocation sizes and passes it along to resizes.
  2047. // This is useful if you are using a library that needs an equivalent of `realloc` but want to use
  2048. // the Odin allocator interface.
  2049. //
  2050. // You want to wrap your allocator into this one if you are trying to use any allocator that relies
  2051. // on the old size to work.
  2052. //
  2053. // The overhead of this allocator is an extra max(alignment, size_of(Header)) bytes allocated for each allocation, these bytes are
  2054. // used to store the size and original pointer.
  2055. Compat_Allocator :: struct {
  2056. parent: Allocator,
  2057. }
  2058. compat_allocator_init :: proc(rra: ^Compat_Allocator, allocator := context.allocator) {
  2059. rra.parent = allocator
  2060. }
  2061. compat_allocator :: proc(rra: ^Compat_Allocator) -> Allocator {
  2062. return Allocator{
  2063. data = rra,
  2064. procedure = compat_allocator_proc,
  2065. }
  2066. }
  2067. compat_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
  2068. size, alignment: int,
  2069. old_memory: rawptr, old_size: int,
  2070. location := #caller_location) -> (data: []byte, err: Allocator_Error) {
  2071. size, old_size := size, old_size
  2072. Header :: struct {
  2073. size: int,
  2074. ptr: rawptr,
  2075. }
  2076. rra := (^Compat_Allocator)(allocator_data)
  2077. switch mode {
  2078. case .Alloc, .Alloc_Non_Zeroed:
  2079. a := max(alignment, size_of(Header))
  2080. size += a
  2081. assert(size >= 0, "overflow")
  2082. allocation := rra.parent.procedure(rra.parent.data, mode, size, alignment, old_memory, old_size, location) or_return
  2083. #no_bounds_check data = allocation[a:]
  2084. ([^]Header)(raw_data(data))[-1] = {
  2085. size = size,
  2086. ptr = raw_data(allocation),
  2087. }
  2088. return
  2089. case .Free:
  2090. header := ([^]Header)(old_memory)[-1]
  2091. return rra.parent.procedure(rra.parent.data, mode, size, alignment, header.ptr, header.size, location)
  2092. case .Resize, .Resize_Non_Zeroed:
  2093. header := ([^]Header)(old_memory)[-1]
  2094. a := max(alignment, size_of(header))
  2095. size += a
  2096. assert(size >= 0, "overflow")
  2097. allocation := rra.parent.procedure(rra.parent.data, mode, size, alignment, header.ptr, header.size, location) or_return
  2098. #no_bounds_check data = allocation[a:]
  2099. ([^]Header)(raw_data(data))[-1] = {
  2100. size = size,
  2101. ptr = raw_data(allocation),
  2102. }
  2103. return
  2104. case .Free_All, .Query_Info, .Query_Features:
  2105. return rra.parent.procedure(rra.parent.data, mode, size, alignment, old_memory, old_size, location)
  2106. case: unreachable()
  2107. }
  2108. }