Memory.cpp 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910
  1. // Copyright (C) 2014, Panagiotis Christopoulos Charitos.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include "anki/util/Memory.h"
  6. #include "anki/util/Exception.h"
  7. #include "anki/util/Functions.h"
  8. #include "anki/util/Assert.h"
  9. #include "anki/util/NonCopyable.h"
  10. #include "anki/util/Thread.h"
  11. #include "anki/util/Vector.h"
  12. #include <cstdlib>
  13. #include <cstring>
  14. namespace anki {
  15. //==============================================================================
  16. // Other =
  17. //==============================================================================
  18. //==============================================================================
  19. void* mallocAligned(PtrSize size, PtrSize alignmentBytes) throw()
  20. {
  21. #if ANKI_POSIX
  22. # if ANKI_OS != ANKI_OS_ANDROID
  23. void* out;
  24. int err = posix_memalign(
  25. &out, getAlignedRoundUp(alignmentBytes, sizeof(void*)), size);
  26. if(!err)
  27. {
  28. // Make sure it's aligned
  29. ANKI_ASSERT(isAligned(alignmentBytes, out));
  30. return out;
  31. }
  32. else
  33. {
  34. ANKI_ASSERT(0 && "mallocAligned() failed");
  35. return nullptr;
  36. }
  37. # else
  38. void* out = memalign(
  39. getAlignedRoundUp(alignmentBytes, sizeof(void*)), size);
  40. if(out)
  41. {
  42. // Make sure it's aligned
  43. ANKI_ASSERT(isAligned(alignmentBytes, out));
  44. return out;
  45. }
  46. else
  47. {
  48. ANKI_ASSERT(0 && "mallocAligned() failed");
  49. return nullptr;
  50. }
  51. # endif
  52. #elif ANKI_OS == ANKI_OS_WINDOWS
  53. void* out = _aligned_malloc(size, alignmentBytes);
  54. if(out)
  55. {
  56. // Make sure it's aligned
  57. ANKI_ASSERT(isAligned(alignmentBytes, out));
  58. }
  59. return out;
  60. #else
  61. # error "Unimplemented"
  62. #endif
  63. }
  64. //==============================================================================
  65. void freeAligned(void* ptr) throw()
  66. {
  67. #if ANKI_POSIX
  68. ::free(ptr);
  69. #elif ANKI_OS == ANKI_OS_WINDOWS
  70. _aligned_free(ptr);
  71. #else
  72. # error "Unimplemented"
  73. #endif
  74. }
  75. //==============================================================================
  76. void* allocAligned(
  77. void* userData, void* ptr, PtrSize size, PtrSize alignment) throw()
  78. {
  79. (void)userData;
  80. void* out;
  81. if(ptr == nullptr)
  82. {
  83. // Allocate
  84. ANKI_ASSERT(size > 0);
  85. out = mallocAligned(size, alignment);
  86. }
  87. else
  88. {
  89. // Deallocate
  90. ANKI_ASSERT(size == 0);
  91. ANKI_ASSERT(alignment == 0);
  92. freeAligned(ptr);
  93. out = nullptr;
  94. }
  95. return out;
  96. }
  97. //==============================================================================
  98. // HeapMemoryPool =
  99. //==============================================================================
  100. //==============================================================================
  101. /// The hidden implementation of HeapMemoryPool
  102. class HeapMemoryPool::Implementation: public NonCopyable
  103. {
  104. public:
  105. std::atomic<U32> m_refcount;
  106. std::atomic<U32> m_allocationsCount;
  107. AllocAlignedCallback m_alloc;
  108. void* m_allocUserData;
  109. };
  110. //==============================================================================
  111. HeapMemoryPool::HeapMemoryPool(
  112. AllocAlignedCallback alloc, void* allocUserData)
  113. {
  114. ANKI_ASSERT(alloc != nullptr);
  115. m_impl = (Implementation*)alloc(allocUserData, nullptr,
  116. sizeof(Implementation), alignof(Implementation));
  117. m_impl->m_refcount = 1;
  118. m_impl->m_allocationsCount = 0;
  119. m_impl->m_alloc = alloc;
  120. m_impl->m_allocUserData = allocUserData;
  121. }
  122. //==============================================================================
  123. HeapMemoryPool& HeapMemoryPool::operator=(const HeapMemoryPool& other)
  124. {
  125. clear();
  126. if(other.m_impl)
  127. {
  128. m_impl = other.m_impl;
  129. ++m_impl->m_refcount;
  130. }
  131. return *this;
  132. }
  133. //==============================================================================
  134. void HeapMemoryPool::clear()
  135. {
  136. if(m_impl)
  137. {
  138. U32 refcount = --m_impl->m_refcount;
  139. if(refcount == 0)
  140. {
  141. auto alloc = m_impl->m_alloc;
  142. auto ud = m_impl->m_allocUserData;
  143. ANKI_ASSERT(alloc);
  144. alloc(ud, m_impl, 0, 0);
  145. }
  146. m_impl = nullptr;
  147. }
  148. }
  149. //==============================================================================
  150. void* HeapMemoryPool::allocate(PtrSize size, PtrSize alignment) throw()
  151. {
  152. ANKI_ASSERT(m_impl != nullptr);
  153. ++m_impl->m_allocationsCount;
  154. return m_impl->m_alloc(m_impl->m_allocUserData, nullptr, size, alignment);
  155. }
  156. //==============================================================================
  157. Bool HeapMemoryPool::free(void* ptr) throw()
  158. {
  159. ANKI_ASSERT(m_impl != nullptr);
  160. --m_impl->m_allocationsCount;
  161. m_impl->m_alloc(m_impl->m_allocUserData, ptr, 0, 0);
  162. return true;
  163. }
  164. //==============================================================================
  165. U32 HeapMemoryPool::getAllocationsCount() const
  166. {
  167. ANKI_ASSERT(m_impl != nullptr);
  168. return m_impl->m_allocationsCount.load();
  169. }
  170. //==============================================================================
  171. // StackMemoryPool =
  172. //==============================================================================
  173. //==============================================================================
  174. /// The hidden implementation of StackMemoryPool
  175. class StackMemoryPool::Implementation: public NonCopyable
  176. {
  177. public:
  178. /// The header of each allocation
  179. class MemoryBlockHeader
  180. {
  181. public:
  182. U8 m_size[sizeof(U32)]; ///< It's U8 to allow whatever alignment
  183. };
  184. static_assert(alignof(MemoryBlockHeader) == 1, "Alignment error");
  185. static_assert(sizeof(MemoryBlockHeader) == sizeof(U32), "Size error");
  186. /// Refcount
  187. std::atomic<U32> m_refcount = {1};
  188. /// User allocation function
  189. AllocAlignedCallback m_alloc;
  190. /// User allocation function data
  191. void* m_allocUserData;
  192. /// Alignment of allocations
  193. PtrSize m_alignmentBytes;
  194. /// Aligned size of MemoryBlockHeader
  195. PtrSize m_headerSize;
  196. /// Pre-allocated memory chunk
  197. U8* m_memory = nullptr;
  198. /// Size of the pre-allocated memory chunk
  199. PtrSize m_memsize = 0;
  200. /// Points to the memory and more specifically to the top of the stack
  201. std::atomic<U8*> m_top = {nullptr};
  202. // Construct
  203. Implementation(AllocAlignedCallback alloc, void* allocUserData,
  204. PtrSize size, PtrSize alignmentBytes)
  205. : m_alloc(alloc),
  206. m_allocUserData(allocUserData),
  207. m_alignmentBytes(alignmentBytes),
  208. m_memsize(getAlignedRoundUp(alignmentBytes, size))
  209. {
  210. ANKI_ASSERT(m_alloc);
  211. ANKI_ASSERT(m_memsize > 0);
  212. ANKI_ASSERT(m_alignmentBytes > 0);
  213. m_memory = (U8*)m_alloc(
  214. m_allocUserData, nullptr, m_memsize, m_alignmentBytes);
  215. if(m_memory != nullptr)
  216. {
  217. #if ANKI_DEBUG
  218. // Invalidate the memory
  219. memset(m_memory, 0xCC, m_memsize);
  220. #endif
  221. // Align allocated memory
  222. m_top = m_memory;
  223. // Calc header size
  224. m_headerSize =
  225. getAlignedRoundUp(m_alignmentBytes, sizeof(MemoryBlockHeader));
  226. }
  227. else
  228. {
  229. throw ANKI_EXCEPTION("Failed to allocate memory");
  230. }
  231. }
  232. // Destroy
  233. ~Implementation()
  234. {
  235. if(m_memory != nullptr)
  236. {
  237. m_alloc(m_allocUserData, m_memory, 0, 0);
  238. }
  239. }
  240. PtrSize getTotalSize() const
  241. {
  242. return m_memsize;
  243. }
  244. PtrSize getAllocatedSize() const
  245. {
  246. ANKI_ASSERT(m_memory != nullptr);
  247. return m_top.load() - m_memory;
  248. }
  249. const void* getBaseAddress() const
  250. {
  251. ANKI_ASSERT(m_memory != nullptr);
  252. return m_memory;
  253. }
  254. /// Allocate
  255. void* allocate(PtrSize size, PtrSize alignment) throw()
  256. {
  257. ANKI_ASSERT(m_memory != nullptr);
  258. ANKI_ASSERT(alignment <= m_alignmentBytes);
  259. (void)alignment;
  260. size = getAlignedRoundUp(m_alignmentBytes, size + m_headerSize);
  261. ANKI_ASSERT(size < MAX_U32 && "Too big allocation");
  262. U8* out = m_top.fetch_add(size);
  263. if(out + size <= m_memory + m_memsize)
  264. {
  265. #if ANKI_DEBUG
  266. // Invalidate the block
  267. memset(out, 0xCC, size);
  268. #endif
  269. // Write the block header
  270. MemoryBlockHeader* header = (MemoryBlockHeader*)out;
  271. U32 size32 = size;
  272. memcpy(&header->m_size[0], &size32, sizeof(U32));
  273. // Set the correct output
  274. out += m_headerSize;
  275. // Check alignment
  276. ANKI_ASSERT(isAligned(m_alignmentBytes, out));
  277. }
  278. else
  279. {
  280. // Error
  281. out = nullptr;
  282. }
  283. return out;
  284. }
  285. /// Free
  286. Bool free(void* ptr) throw()
  287. {
  288. // ptr shouldn't be null or not aligned. If not aligned it was not
  289. // allocated by this class
  290. ANKI_ASSERT(ptr != nullptr && isAligned(m_alignmentBytes, ptr));
  291. // memory is nullptr if moved
  292. ANKI_ASSERT(m_memory != nullptr);
  293. // Correct the p
  294. U8* realptr = (U8*)ptr - m_headerSize;
  295. // realptr should be inside the pool's preallocated memory
  296. ANKI_ASSERT(realptr >= m_memory);
  297. // Get block size
  298. MemoryBlockHeader* header = (MemoryBlockHeader*)realptr;
  299. U32 size;
  300. memcpy(&size, &header->m_size[0], sizeof(U32));
  301. // Check if the size is within limits
  302. ANKI_ASSERT(realptr + size <= m_memory + m_memsize);
  303. // Atomic stuff
  304. U8* expected = realptr + size;
  305. U8* desired = realptr;
  306. // if(top == expected) {
  307. // top = desired;
  308. // exchange = true;
  309. // } else {
  310. // expected = top;
  311. // exchange = false;
  312. // }
  313. Bool exchange = m_top.compare_exchange_strong(expected, desired);
  314. return exchange;
  315. }
  316. /// Reset
  317. void reset()
  318. {
  319. // memory is nullptr if moved
  320. ANKI_ASSERT(m_memory != nullptr);
  321. #if ANKI_DEBUG
  322. // Invalidate the memory
  323. memset(m_memory, 0xCC, m_memsize);
  324. #endif
  325. m_top = m_memory;
  326. }
  327. };
  328. //==============================================================================
  329. StackMemoryPool::StackMemoryPool(
  330. AllocAlignedCallback alloc, void* allocUserData,
  331. PtrSize size, PtrSize alignmentBytes)
  332. {
  333. m_impl = (Implementation*)alloc(allocUserData, nullptr,
  334. sizeof(Implementation), alignof(Implementation));
  335. ::new((void*)m_impl) Implementation(
  336. alloc, allocUserData, size, alignmentBytes);
  337. }
  338. //==============================================================================
  339. StackMemoryPool& StackMemoryPool::operator=(const StackMemoryPool& other)
  340. {
  341. clear();
  342. if(other.m_impl)
  343. {
  344. m_impl = other.m_impl;
  345. ++m_impl->m_refcount;
  346. }
  347. return *this;
  348. }
  349. //==============================================================================
  350. void StackMemoryPool::clear()
  351. {
  352. if(m_impl)
  353. {
  354. U32 refcount = --m_impl->m_refcount;
  355. if(refcount == 0)
  356. {
  357. auto alloc = m_impl->m_alloc;
  358. auto ud = m_impl->m_allocUserData;
  359. ANKI_ASSERT(alloc);
  360. m_impl->~Implementation();
  361. alloc(ud, m_impl, 0, 0);
  362. }
  363. m_impl = nullptr;
  364. }
  365. }
  366. //==============================================================================
  367. PtrSize StackMemoryPool::getTotalSize() const
  368. {
  369. ANKI_ASSERT(m_impl != nullptr);
  370. return m_impl->getTotalSize();
  371. }
  372. //==============================================================================
  373. PtrSize StackMemoryPool::getAllocatedSize() const
  374. {
  375. ANKI_ASSERT(m_impl != nullptr);
  376. return m_impl->getAllocatedSize();
  377. }
  378. //==============================================================================
  379. void* StackMemoryPool::allocate(PtrSize size, PtrSize alignment) throw()
  380. {
  381. ANKI_ASSERT(m_impl != nullptr);
  382. return m_impl->allocate(size, alignment);
  383. }
  384. //==============================================================================
  385. Bool StackMemoryPool::free(void* ptr) throw()
  386. {
  387. ANKI_ASSERT(m_impl != nullptr);
  388. return m_impl->free(ptr);
  389. }
  390. //==============================================================================
  391. void StackMemoryPool::reset()
  392. {
  393. ANKI_ASSERT(m_impl != nullptr);
  394. m_impl->reset();
  395. }
  396. //==============================================================================
  397. U32 StackMemoryPool::getUsersCount() const
  398. {
  399. ANKI_ASSERT(m_impl != nullptr);
  400. return m_impl->m_refcount.load();
  401. }
  402. //==============================================================================
  403. // ChainMemoryPool =
  404. //==============================================================================
  405. //==============================================================================
  406. /// The hidden implementation of ChainMemoryPool
  407. class ChainMemoryPool::Implementation: public NonCopyable
  408. {
  409. public:
  410. /// A chunk of memory
  411. class Chunk
  412. {
  413. public:
  414. StackMemoryPool::Implementation m_pool;
  415. /// Used to identify if the chunk can be deleted
  416. U32 m_allocationsCount = 0;
  417. /// Next chunk in the list
  418. Chunk* m_next = nullptr;
  419. Chunk(AllocAlignedCallback alloc, void* allocUserData,
  420. PtrSize size, PtrSize alignmentBytes)
  421. : m_pool(alloc, allocUserData, size, alignmentBytes)
  422. {}
  423. };
  424. /// Refcount
  425. std::atomic<U32> m_refcount = {1};
  426. /// User allocation function
  427. AllocAlignedCallback m_alloc;
  428. /// User allocation function data
  429. void* m_allocUserData;
  430. /// Alignment of allocations
  431. PtrSize m_alignmentBytes;
  432. /// The first chunk
  433. Chunk* m_headChunk = nullptr;
  434. /// Current chunk to allocate from
  435. Chunk* m_tailChunk = nullptr;
  436. /// Fast thread locking
  437. SpinLock m_lock;
  438. /// Chunk first chunk size
  439. PtrSize m_initSize;
  440. /// Chunk max size
  441. PtrSize m_maxSize;
  442. /// Chunk allocation method value
  443. U32 m_step;
  444. /// Chunk allocation method
  445. U8 m_method;
  446. /// Construct
  447. Implementation(
  448. AllocAlignedCallback alloc,
  449. void* allocUserData,
  450. PtrSize initialChunkSize,
  451. PtrSize maxChunkSize,
  452. ChunkAllocationStepMethod chunkAllocStepMethod,
  453. PtrSize chunkAllocStep,
  454. PtrSize alignmentBytes)
  455. : m_alloc(alloc),
  456. m_allocUserData(allocUserData),
  457. m_alignmentBytes(alignmentBytes),
  458. m_initSize(initialChunkSize),
  459. m_maxSize(maxChunkSize),
  460. m_step((U32)chunkAllocStep),
  461. m_method(chunkAllocStepMethod)
  462. {
  463. ANKI_ASSERT(m_alloc);
  464. // Initial size should be > 0
  465. ANKI_ASSERT(m_initSize > 0);
  466. // On fixed step should be 0
  467. if(m_method == FIXED)
  468. {
  469. ANKI_ASSERT(m_step == 0);
  470. }
  471. // On fixed initial size is the same as the max
  472. if(m_method == FIXED)
  473. {
  474. ANKI_ASSERT(m_initSize == m_maxSize);
  475. }
  476. // On add and mul the max size should be greater than initial
  477. if(m_method == ADD || m_method == MULTIPLY)
  478. {
  479. ANKI_ASSERT(m_initSize < m_maxSize);
  480. }
  481. }
  482. /// Destroy
  483. ~Implementation()
  484. {
  485. Chunk* ch = m_headChunk;
  486. while(ch)
  487. {
  488. Chunk* next = ch->m_next;
  489. ch->~Chunk();
  490. m_alloc(m_allocUserData, ch, 0, 0);
  491. ch = next;
  492. }
  493. }
  494. /// Create a new chunk
  495. Chunk* createNewChunk(PtrSize size) throw()
  496. {
  497. //
  498. // Calculate preferred size
  499. //
  500. // Get the size of the next chunk
  501. PtrSize crntMaxSize;
  502. if(m_method == FIXED)
  503. {
  504. crntMaxSize = m_initSize;
  505. }
  506. else
  507. {
  508. // Get the size of the previous max chunk
  509. if(m_tailChunk != nullptr)
  510. {
  511. // Get the size of previous
  512. crntMaxSize = m_tailChunk->m_pool.getTotalSize();
  513. // Increase it
  514. if(m_method == MULTIPLY)
  515. {
  516. crntMaxSize *= m_step;
  517. }
  518. else
  519. {
  520. ANKI_ASSERT(m_method == ADD);
  521. crntMaxSize += m_step;
  522. }
  523. }
  524. else
  525. {
  526. // No chunks. Choose initial size
  527. ANKI_ASSERT(m_headChunk == nullptr);
  528. crntMaxSize = m_initSize;
  529. }
  530. ANKI_ASSERT(crntMaxSize > 0);
  531. // Fix the size
  532. crntMaxSize = std::min(crntMaxSize, (PtrSize)m_maxSize);
  533. }
  534. size = std::max(crntMaxSize, size)
  535. + sizeof(StackMemoryPool::Implementation::MemoryBlockHeader)
  536. + m_alignmentBytes;
  537. ANKI_ASSERT(size <= m_maxSize && "To big chunk");
  538. //
  539. // Create the chunk
  540. //
  541. Chunk* chunk = (Chunk*)m_alloc(
  542. m_allocUserData, nullptr, sizeof(Chunk), alignof(Chunk));
  543. if(chunk)
  544. {
  545. // Construct it
  546. ::new((void*)chunk) Chunk(
  547. m_alloc, m_allocUserData, size, m_alignmentBytes);
  548. // Register it
  549. if(m_tailChunk)
  550. {
  551. m_tailChunk->m_next = chunk;
  552. m_tailChunk = chunk;
  553. }
  554. else
  555. {
  556. ANKI_ASSERT(m_headChunk == nullptr);
  557. m_headChunk = m_tailChunk = chunk;
  558. }
  559. }
  560. return chunk;
  561. }
  562. /// Allocate from chunk
  563. void* allocateFromChunk(Chunk* ch, PtrSize size, PtrSize alignment) throw()
  564. {
  565. ANKI_ASSERT(ch);
  566. ANKI_ASSERT(size <= m_maxSize);
  567. void* mem = ch->m_pool.allocate(size, alignment);
  568. if(mem)
  569. {
  570. ++ch->m_allocationsCount;
  571. }
  572. return mem;
  573. }
  574. /// Allocate memory
  575. void* allocate(PtrSize size, PtrSize alignment) throw()
  576. {
  577. Chunk* ch;
  578. void* mem = nullptr;
  579. m_lock.lock();
  580. // Get chunk
  581. ch = m_tailChunk;
  582. // Create new chunk if needed
  583. if(ch == nullptr
  584. || (mem = allocateFromChunk(ch, size, alignment)) == nullptr)
  585. {
  586. // Create new chunk
  587. ch = createNewChunk(size);
  588. // Chunk creation failed
  589. if(ch == nullptr)
  590. {
  591. m_lock.unlock();
  592. return mem;
  593. }
  594. }
  595. if(mem == nullptr)
  596. {
  597. mem = allocateFromChunk(ch, size, alignment);
  598. ANKI_ASSERT(mem != nullptr && "The chunk should have space");
  599. }
  600. m_lock.unlock();
  601. return mem;
  602. }
  603. /// Free memory
  604. Bool free(void* ptr) throw()
  605. {
  606. m_lock.lock();
  607. // Get the chunk that ptr belongs to
  608. Chunk* chunk = m_headChunk;
  609. Chunk* prevChunk = nullptr;
  610. while(chunk)
  611. {
  612. const U8* from = (const U8*)chunk->m_pool.getBaseAddress();
  613. const U8* to = from + chunk->m_pool.getTotalSize();
  614. const U8* cptr = (const U8*)ptr;
  615. if(cptr >= from && cptr < to)
  616. {
  617. break;
  618. }
  619. prevChunk = chunk;
  620. chunk = chunk->m_next;
  621. }
  622. ANKI_ASSERT(chunk != nullptr
  623. && "Not initialized or ptr is incorrect");
  624. // Decrease the deallocation refcount and if it's zero delete the chunk
  625. ANKI_ASSERT(chunk->m_allocationsCount > 0);
  626. if(--chunk->m_allocationsCount == 0)
  627. {
  628. // Chunk is empty. Delete it
  629. if(prevChunk != nullptr)
  630. {
  631. ANKI_ASSERT(m_headChunk != chunk);
  632. prevChunk->m_next = chunk->m_next;
  633. }
  634. if(chunk == m_headChunk)
  635. {
  636. ANKI_ASSERT(prevChunk == nullptr);
  637. m_headChunk = chunk->m_next;
  638. }
  639. if(chunk == m_tailChunk)
  640. {
  641. m_tailChunk = prevChunk;
  642. }
  643. // Finaly delete it
  644. chunk->~Chunk();
  645. m_alloc(m_allocUserData, chunk, 0, 0);
  646. }
  647. m_lock.unlock();
  648. return true;
  649. }
  650. PtrSize getAllocatedSize() const
  651. {
  652. PtrSize sum = 0;
  653. Chunk* ch = m_headChunk;
  654. while(ch)
  655. {
  656. sum += ch->m_pool.getAllocatedSize();
  657. ch = ch->m_next;
  658. }
  659. return sum;
  660. }
  661. PtrSize getChunksCount() const
  662. {
  663. PtrSize count = 0;
  664. Chunk* ch = m_headChunk;
  665. while(ch)
  666. {
  667. ++count;
  668. ch = ch->m_next;
  669. }
  670. return count;
  671. }
  672. };
  673. //==============================================================================
  674. ChainMemoryPool::ChainMemoryPool(
  675. AllocAlignedCallback alloc,
  676. void* allocUserData,
  677. PtrSize initialChunkSize,
  678. PtrSize maxChunkSize,
  679. ChunkAllocationStepMethod chunkAllocStepMethod,
  680. PtrSize chunkAllocStep,
  681. PtrSize alignmentBytes)
  682. {
  683. m_impl = (Implementation*)alloc(allocUserData, nullptr,
  684. sizeof(Implementation), alignof(Implementation));
  685. ::new((void*)m_impl) Implementation(
  686. alloc, allocUserData,
  687. initialChunkSize, maxChunkSize, chunkAllocStepMethod, chunkAllocStep,
  688. alignmentBytes);
  689. }
  690. //==============================================================================
  691. ChainMemoryPool& ChainMemoryPool::operator=(const ChainMemoryPool& other)
  692. {
  693. clear();
  694. if(other.m_impl)
  695. {
  696. m_impl = other.m_impl;
  697. ++m_impl->m_refcount;
  698. }
  699. return *this;
  700. }
  701. //==============================================================================
  702. void ChainMemoryPool::clear()
  703. {
  704. if(m_impl)
  705. {
  706. U32 refcount = --m_impl->m_refcount;
  707. if(refcount == 0)
  708. {
  709. auto alloc = m_impl->m_alloc;
  710. auto ud = m_impl->m_allocUserData;
  711. ANKI_ASSERT(alloc);
  712. alloc(ud, m_impl, 0, 0);
  713. }
  714. m_impl = nullptr;
  715. }
  716. }
  717. //==============================================================================
  718. void* ChainMemoryPool::allocate(PtrSize size, PtrSize alignment) throw()
  719. {
  720. ANKI_ASSERT(m_impl != nullptr);
  721. return m_impl->allocate(size, alignment);
  722. }
  723. //==============================================================================
  724. Bool ChainMemoryPool::free(void* ptr) throw()
  725. {
  726. ANKI_ASSERT(m_impl != nullptr);
  727. return m_impl->free(ptr);
  728. }
  729. //==============================================================================
  730. PtrSize ChainMemoryPool::getChunksCount() const
  731. {
  732. ANKI_ASSERT(m_impl != nullptr);
  733. return m_impl->getChunksCount();
  734. }
  735. //==============================================================================
  736. PtrSize ChainMemoryPool::getAllocatedSize() const
  737. {
  738. ANKI_ASSERT(m_impl != nullptr);
  739. return m_impl->getAllocatedSize();
  740. }
  741. //==============================================================================
  742. U32 ChainMemoryPool::getUsersCount() const
  743. {
  744. ANKI_ASSERT(m_impl != nullptr);
  745. return m_impl->m_refcount.load();
  746. }
  747. } // end namespace anki