Allocator.cpp 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104
  1. // Copyright (c) 2008-2023 the Urho3D project
  2. // License: MIT
  3. #include "../Precompiled.h"
  4. #include "../DebugNew.h"
  5. namespace Urho3D
  6. {
  7. static AllocatorBlock* AllocatorReserveBlock(AllocatorBlock* allocator, i32 nodeSize, i32 capacity)
  8. {
  9. assert(nodeSize > 0 && capacity > 0);
  10. u8* blockPtr = new u8[sizeof(AllocatorBlock) + capacity * (sizeof(AllocatorNode) + nodeSize)];
  11. AllocatorBlock* newBlock = reinterpret_cast<AllocatorBlock*>(blockPtr);
  12. newBlock->nodeSize_ = nodeSize;
  13. newBlock->capacity_ = capacity;
  14. newBlock->free_ = nullptr;
  15. newBlock->next_ = nullptr;
  16. if (!allocator)
  17. {
  18. allocator = newBlock;
  19. }
  20. else
  21. {
  22. newBlock->next_ = allocator->next_;
  23. allocator->next_ = newBlock;
  24. }
  25. // Initialize the nodes. Free nodes are always chained to the first (parent) allocator
  26. u8* nodePtr = blockPtr + sizeof(AllocatorBlock);
  27. AllocatorNode* firstNewNode = reinterpret_cast<AllocatorNode*>(nodePtr);
  28. for (i32 i = 0; i < capacity - 1; ++i)
  29. {
  30. AllocatorNode* newNode = reinterpret_cast<AllocatorNode*>(nodePtr);
  31. newNode->next_ = reinterpret_cast<AllocatorNode*>(nodePtr + sizeof(AllocatorNode) + nodeSize);
  32. nodePtr += sizeof(AllocatorNode) + nodeSize;
  33. }
  34. // i == capacity - 1
  35. {
  36. AllocatorNode* newNode = reinterpret_cast<AllocatorNode*>(nodePtr);
  37. newNode->next_ = nullptr;
  38. }
  39. allocator->free_ = firstNewNode;
  40. return newBlock;
  41. }
  42. AllocatorBlock* AllocatorInitialize(i32 nodeSize, i32 initialCapacity)
  43. {
  44. return AllocatorReserveBlock(nullptr, nodeSize, initialCapacity);
  45. }
  46. void AllocatorUninitialize(AllocatorBlock* allocator)
  47. {
  48. while (allocator)
  49. {
  50. AllocatorBlock* next = allocator->next_;
  51. delete[] reinterpret_cast<u8*>(allocator);
  52. allocator = next;
  53. }
  54. }
  55. void* AllocatorReserve(AllocatorBlock* allocator)
  56. {
  57. if (!allocator)
  58. return nullptr;
  59. if (!allocator->free_)
  60. {
  61. // Free nodes have been exhausted. Allocate a new larger block
  62. i32 newCapacity = (allocator->capacity_ + 1) >> 1u; // * 0.5 and round up
  63. AllocatorReserveBlock(allocator, allocator->nodeSize_, newCapacity);
  64. allocator->capacity_ += newCapacity;
  65. }
  66. // We should have new free node(s) chained
  67. AllocatorNode* freeNode = allocator->free_;
  68. void* ptr = reinterpret_cast<u8*>(freeNode) + sizeof(AllocatorNode);
  69. allocator->free_ = freeNode->next_;
  70. freeNode->next_ = nullptr;
  71. return ptr;
  72. }
  73. void AllocatorFree(AllocatorBlock* allocator, void* ptr)
  74. {
  75. if (!allocator || !ptr)
  76. return;
  77. u8* dataPtr = static_cast<u8*>(ptr);
  78. AllocatorNode* node = reinterpret_cast<AllocatorNode*>(dataPtr - sizeof(AllocatorNode));
  79. // Chain the node back to free nodes
  80. node->next_ = allocator->free_;
  81. allocator->free_ = node;
  82. }
  83. }