GpuMemoryPools.cpp 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177
  1. // Copyright (C) 2009-2022, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Core/GpuMemoryPools.h>
  6. #include <AnKi/Core/ConfigSet.h>
  7. #include <AnKi/Gr/GrManager.h>
  8. #include <AnKi/Util/Tracer.h>
  9. namespace anki {
  10. UnifiedGeometryMemoryPool::~UnifiedGeometryMemoryPool()
  11. {
  12. // Do nothing
  13. }
  14. Error UnifiedGeometryMemoryPool::init(HeapMemoryPool* pool, GrManager* gr, const ConfigSet& cfg)
  15. {
  16. ANKI_ASSERT(pool && gr);
  17. m_gr = gr;
  18. // Create the GPU buffer.
  19. BufferInitInfo bufferInit("Global vertex & index");
  20. bufferInit.m_size = cfg.getCoreGlobalVertexMemorySize();
  21. if(!isPowerOfTwo(bufferInit.m_size))
  22. {
  23. ANKI_CORE_LOGE("core_globalVertexMemorySize should be a power of two (because of the buddy allocator");
  24. return Error::kUserData;
  25. }
  26. bufferInit.m_usage = BufferUsageBit::kVertex | BufferUsageBit::kIndex | BufferUsageBit::kTransferDestination;
  27. if(gr->getDeviceCapabilities().m_rayTracingEnabled)
  28. {
  29. bufferInit.m_usage |= BufferUsageBit::kAccelerationStructureBuild;
  30. }
  31. m_vertBuffer = gr->newBuffer(bufferInit);
  32. // Init the rest
  33. m_buddyAllocator.init(pool, __builtin_ctzll(bufferInit.m_size));
  34. return Error::kNone;
  35. }
  36. Error UnifiedGeometryMemoryPool::allocate(PtrSize size, U32 alignment, PtrSize& offset)
  37. {
  38. U32 offset32;
  39. const Bool success = m_buddyAllocator.allocate(size, alignment, offset32);
  40. if(ANKI_UNLIKELY(!success))
  41. {
  42. BuddyAllocatorBuilderStats stats;
  43. m_buddyAllocator.getStats(stats);
  44. ANKI_CORE_LOGE("Failed to allocate vertex memory of size %zu. The allocator has %zu (user requested %zu) out "
  45. "%zu allocated",
  46. size, stats.m_realAllocatedSize, stats.m_userAllocatedSize, m_vertBuffer->getSize());
  47. return Error::kOutOfMemory;
  48. }
  49. offset = offset32;
  50. return Error::kNone;
  51. }
  52. void UnifiedGeometryMemoryPool::free(PtrSize size, U32 alignment, PtrSize offset)
  53. {
  54. m_buddyAllocator.free(U32(offset), size, alignment);
  55. }
  56. StagingGpuMemoryPool::~StagingGpuMemoryPool()
  57. {
  58. m_gr->finish();
  59. for(auto& it : m_perFrameBuffers)
  60. {
  61. it.m_buff->unmap();
  62. it.m_buff.reset(nullptr);
  63. }
  64. }
  65. Error StagingGpuMemoryPool::init(GrManager* gr, const ConfigSet& cfg)
  66. {
  67. m_gr = gr;
  68. m_perFrameBuffers[StagingGpuMemoryType::kUniform].m_size = cfg.getCoreUniformPerFrameMemorySize();
  69. m_perFrameBuffers[StagingGpuMemoryType::kStorage].m_size = cfg.getCoreStoragePerFrameMemorySize();
  70. m_perFrameBuffers[StagingGpuMemoryType::kVertex].m_size = cfg.getCoreVertexPerFrameMemorySize();
  71. m_perFrameBuffers[StagingGpuMemoryType::kTexture].m_size = cfg.getCoreTextureBufferPerFrameMemorySize();
  72. initBuffer(StagingGpuMemoryType::kUniform, gr->getDeviceCapabilities().m_uniformBufferBindOffsetAlignment,
  73. gr->getDeviceCapabilities().m_uniformBufferMaxRange, BufferUsageBit::kAllUniform, *gr);
  74. initBuffer(StagingGpuMemoryType::kStorage,
  75. max(gr->getDeviceCapabilities().m_storageBufferBindOffsetAlignment,
  76. gr->getDeviceCapabilities().m_sbtRecordAlignment),
  77. gr->getDeviceCapabilities().m_storageBufferMaxRange,
  78. BufferUsageBit::kAllStorage | BufferUsageBit::kShaderBindingTable, *gr);
  79. initBuffer(StagingGpuMemoryType::kVertex, 16, kMaxU32, BufferUsageBit::kVertex | BufferUsageBit::kIndex, *gr);
  80. initBuffer(StagingGpuMemoryType::kTexture, gr->getDeviceCapabilities().m_textureBufferBindOffsetAlignment,
  81. gr->getDeviceCapabilities().m_textureBufferMaxRange, BufferUsageBit::kAllTexture, *gr);
  82. return Error::kNone;
  83. }
  84. void StagingGpuMemoryPool::initBuffer(StagingGpuMemoryType type, U32 alignment, PtrSize maxAllocSize,
  85. BufferUsageBit usage, GrManager& gr)
  86. {
  87. PerFrameBuffer& perframe = m_perFrameBuffers[type];
  88. perframe.m_buff = gr.newBuffer(BufferInitInfo(perframe.m_size, usage, BufferMapAccessBit::kWrite, "Staging"));
  89. perframe.m_alloc.init(perframe.m_size, alignment, maxAllocSize);
  90. perframe.m_mappedMem = static_cast<U8*>(perframe.m_buff->map(0, perframe.m_size, BufferMapAccessBit::kWrite));
  91. }
  92. void* StagingGpuMemoryPool::allocateFrame(PtrSize size, StagingGpuMemoryType usage, StagingGpuMemoryToken& token)
  93. {
  94. PerFrameBuffer& buff = m_perFrameBuffers[usage];
  95. const Error err = buff.m_alloc.allocate(size, token.m_offset);
  96. if(err)
  97. {
  98. ANKI_CORE_LOGF("Out of staging GPU memory. Usage: %u", U32(usage));
  99. }
  100. token.m_buffer = buff.m_buff;
  101. token.m_range = size;
  102. token.m_type = usage;
  103. return buff.m_mappedMem + token.m_offset;
  104. }
  105. void* StagingGpuMemoryPool::tryAllocateFrame(PtrSize size, StagingGpuMemoryType usage, StagingGpuMemoryToken& token)
  106. {
  107. PerFrameBuffer& buff = m_perFrameBuffers[usage];
  108. const Error err = buff.m_alloc.allocate(size, token.m_offset);
  109. if(!err)
  110. {
  111. token.m_buffer = buff.m_buff;
  112. token.m_range = size;
  113. token.m_type = usage;
  114. return buff.m_mappedMem + token.m_offset;
  115. }
  116. else
  117. {
  118. token = {};
  119. return nullptr;
  120. }
  121. }
  122. void StagingGpuMemoryPool::endFrame()
  123. {
  124. for(StagingGpuMemoryType usage : EnumIterable<StagingGpuMemoryType>())
  125. {
  126. PerFrameBuffer& buff = m_perFrameBuffers[usage];
  127. if(buff.m_mappedMem)
  128. {
  129. // Increase the counters
  130. switch(usage)
  131. {
  132. case StagingGpuMemoryType::kUniform:
  133. ANKI_TRACE_INC_COUNTER(STAGING_UNIFORMS_SIZE, buff.m_alloc.getUnallocatedMemorySize());
  134. break;
  135. case StagingGpuMemoryType::kStorage:
  136. ANKI_TRACE_INC_COUNTER(STAGING_STORAGE_SIZE, buff.m_alloc.getUnallocatedMemorySize());
  137. break;
  138. default:
  139. break;
  140. }
  141. buff.m_alloc.endFrame();
  142. }
  143. }
  144. }
  145. } // end namespace anki