Browse Source

Remove the allocator from GR

Panagiotis Christopoulos Charitos 3 years ago
parent
commit
a177b64a63
60 changed files with 370 additions and 366 deletions
  1. 5 2
      AnKi/Gr/GrManager.h
  2. 1 1
      AnKi/Gr/GrObject.cpp
  3. 1 1
      AnKi/Gr/GrObject.h
  4. 2 2
      AnKi/Gr/Vulkan/AccelerationStructure.cpp
  5. 3 3
      AnKi/Gr/Vulkan/AccelerationStructureImpl.cpp
  6. 2 2
      AnKi/Gr/Vulkan/Buffer.cpp
  7. 4 4
      AnKi/Gr/Vulkan/BufferImpl.cpp
  8. 1 0
      AnKi/Gr/Vulkan/BufferImpl.h
  9. 3 3
      AnKi/Gr/Vulkan/CommandBuffer.cpp
  10. 13 14
      AnKi/Gr/Vulkan/CommandBufferFactory.cpp
  11. 8 8
      AnKi/Gr/Vulkan/CommandBufferFactory.h
  12. 4 4
      AnKi/Gr/Vulkan/CommandBufferFactory.inl.h
  13. 15 15
      AnKi/Gr/Vulkan/CommandBufferImpl.cpp
  14. 1 1
      AnKi/Gr/Vulkan/CommandBufferImpl.h
  15. 11 11
      AnKi/Gr/Vulkan/CommandBufferImpl.inl.h
  16. 5 5
      AnKi/Gr/Vulkan/DeferredBarrierFactory.h
  17. 3 3
      AnKi/Gr/Vulkan/DeferredBarrierFactory.inl.h
  18. 59 62
      AnKi/Gr/Vulkan/DescriptorSet.cpp
  19. 6 6
      AnKi/Gr/Vulkan/DescriptorSet.h
  20. 1 1
      AnKi/Gr/Vulkan/Fence.cpp
  21. 5 5
      AnKi/Gr/Vulkan/FenceFactory.cpp
  22. 5 5
      AnKi/Gr/Vulkan/FenceFactory.h
  23. 2 2
      AnKi/Gr/Vulkan/FenceFactory.inl.h
  24. 8 8
      AnKi/Gr/Vulkan/FrameGarbageCollector.cpp
  25. 2 2
      AnKi/Gr/Vulkan/Framebuffer.cpp
  26. 2 2
      AnKi/Gr/Vulkan/FramebufferImpl.cpp
  27. 20 20
      AnKi/Gr/Vulkan/GpuMemoryManager.cpp
  28. 3 3
      AnKi/Gr/Vulkan/GpuMemoryManager.h
  29. 11 9
      AnKi/Gr/Vulkan/GrManager.cpp
  30. 30 31
      AnKi/Gr/Vulkan/GrManagerImpl.cpp
  31. 1 0
      AnKi/Gr/Vulkan/GrManagerImpl.h
  32. 2 2
      AnKi/Gr/Vulkan/GrUpscaler.cpp
  33. 5 5
      AnKi/Gr/Vulkan/MicroObjectRecycler.h
  34. 11 11
      AnKi/Gr/Vulkan/MicroObjectRecycler.inl.h
  35. 2 2
      AnKi/Gr/Vulkan/OcclusionQuery.cpp
  36. 2 2
      AnKi/Gr/Vulkan/Pipeline.cpp
  37. 4 3
      AnKi/Gr/Vulkan/Pipeline.h
  38. 8 8
      AnKi/Gr/Vulkan/PipelineCache.cpp
  39. 3 3
      AnKi/Gr/Vulkan/PipelineCache.h
  40. 2 2
      AnKi/Gr/Vulkan/PipelineLayout.cpp
  41. 4 3
      AnKi/Gr/Vulkan/PipelineLayout.h
  42. 5 5
      AnKi/Gr/Vulkan/QueryFactory.cpp
  43. 6 5
      AnKi/Gr/Vulkan/QueryFactory.h
  44. 2 2
      AnKi/Gr/Vulkan/Sampler.cpp
  45. 7 7
      AnKi/Gr/Vulkan/SamplerFactory.cpp
  46. 1 2
      AnKi/Gr/Vulkan/SamplerFactory.h
  47. 7 8
      AnKi/Gr/Vulkan/SemaphoreFactory.h
  48. 3 3
      AnKi/Gr/Vulkan/SemaphoreFactory.inl.h
  49. 2 2
      AnKi/Gr/Vulkan/Shader.cpp
  50. 8 8
      AnKi/Gr/Vulkan/ShaderImpl.cpp
  51. 2 2
      AnKi/Gr/Vulkan/ShaderProgram.cpp
  52. 18 18
      AnKi/Gr/Vulkan/ShaderProgramImpl.cpp
  53. 8 8
      AnKi/Gr/Vulkan/SwapchainFactory.cpp
  54. 1 1
      AnKi/Gr/Vulkan/SwapchainFactory.h
  55. 2 2
      AnKi/Gr/Vulkan/Texture.cpp
  56. 7 7
      AnKi/Gr/Vulkan/TextureImpl.cpp
  57. 2 2
      AnKi/Gr/Vulkan/TextureView.cpp
  58. 2 2
      AnKi/Gr/Vulkan/TimestampQuery.cpp
  59. 2 2
      AnKi/Util/ClassAllocatorBuilder.h
  60. 5 4
      AnKi/Util/ClassAllocatorBuilder.inl.h

+ 5 - 2
AnKi/Gr/GrManager.h

@@ -89,7 +89,7 @@ public:
 
 
 	GrManagerStats getStats() const;
 	GrManagerStats getStats() const;
 
 
-	ANKI_INTERNAL HeapMemoryPool& getMemoryPool()
+	ANKI_INTERNAL HeapMemoryPool& getMemoryPool() const
 	{
 	{
 		return m_pool;
 		return m_pool;
 	}
 	}
@@ -115,7 +115,10 @@ public:
 	}
 	}
 
 
 protected:
 protected:
-	HeapMemoryPool m_pool; ///< Keep it first to get deleted last
+	/// Keep it first to get deleted last. It's mutable because its methods are thread-safe and we want to use it in
+	/// const methods.
+	mutable HeapMemoryPool m_pool;
+
 	ConfigSet* m_config = nullptr;
 	ConfigSet* m_config = nullptr;
 	String m_cacheDir;
 	String m_cacheDir;
 	Atomic<U64> m_uuidIndex = {1};
 	Atomic<U64> m_uuidIndex = {1};

+ 1 - 1
AnKi/Gr/GrObject.cpp

@@ -31,7 +31,7 @@ GrObject::~GrObject()
 	}
 	}
 }
 }
 
 
-HeapMemoryPool& GrObject::getMemoryPool()
+HeapMemoryPool& GrObject::getMemoryPool() const
 {
 {
 	return m_manager->getMemoryPool();
 	return m_manager->getMemoryPool();
 }
 }

+ 1 - 1
AnKi/Gr/GrObject.h

@@ -62,7 +62,7 @@ public:
 		return *m_manager;
 		return *m_manager;
 	}
 	}
 
 
-	HeapMemoryPool& getMemoryPool();
+	HeapMemoryPool& getMemoryPool() const;
 
 
 	void retain() const
 	void retain() const
 	{
 	{

+ 2 - 2
AnKi/Gr/Vulkan/AccelerationStructure.cpp

@@ -12,11 +12,11 @@ namespace anki {
 AccelerationStructure* AccelerationStructure::newInstance(GrManager* manager, const AccelerationStructureInitInfo& init)
 AccelerationStructure* AccelerationStructure::newInstance(GrManager* manager, const AccelerationStructureInitInfo& init)
 {
 {
 	AccelerationStructureImpl* impl =
 	AccelerationStructureImpl* impl =
-		manager->getAllocator().newInstance<AccelerationStructureImpl>(manager, init.getName());
+		anki::newInstance<AccelerationStructureImpl>(manager->getMemoryPool(), manager, init.getName());
 	const Error err = impl->init(init);
 	const Error err = impl->init(init);
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 3 - 3
AnKi/Gr/Vulkan/AccelerationStructureImpl.cpp

@@ -10,7 +10,7 @@ namespace anki {
 
 
 AccelerationStructureImpl::~AccelerationStructureImpl()
 AccelerationStructureImpl::~AccelerationStructureImpl()
 {
 {
-	m_topLevelInfo.m_blas.destroy(getAllocator());
+	m_topLevelInfo.m_blas.destroy(getMemoryPool());
 
 
 	if(m_handle)
 	if(m_handle)
 	{
 	{
@@ -90,7 +90,7 @@ Error AccelerationStructureImpl::init(const AccelerationStructureInitInfo& inf)
 	else
 	else
 	{
 	{
 		// Create the instances buffer
 		// Create the instances buffer
-		m_topLevelInfo.m_blas.resizeStorage(getAllocator(), inf.m_topLevel.m_instances.getSize());
+		m_topLevelInfo.m_blas.resizeStorage(getMemoryPool(), inf.m_topLevel.m_instances.getSize());
 
 
 		BufferInitInfo buffInit("AS instances");
 		BufferInitInfo buffInit("AS instances");
 		buffInit.m_size = sizeof(VkAccelerationStructureInstanceKHR) * inf.m_topLevel.m_instances.getSize();
 		buffInit.m_size = sizeof(VkAccelerationStructureInstanceKHR) * inf.m_topLevel.m_instances.getSize();
@@ -116,7 +116,7 @@ Error AccelerationStructureImpl::init(const AccelerationStructureInitInfo& inf)
 			ANKI_ASSERT(outInst.accelerationStructureReference != 0);
 			ANKI_ASSERT(outInst.accelerationStructureReference != 0);
 
 
 			// Hold the reference
 			// Hold the reference
-			m_topLevelInfo.m_blas.emplaceBack(getAllocator(), inf.m_topLevel.m_instances[i].m_bottomLevel);
+			m_topLevelInfo.m_blas.emplaceBack(getMemoryPool(), inf.m_topLevel.m_instances[i].m_bottomLevel);
 		}
 		}
 
 
 		m_topLevelInfo.m_instancesBuffer->flush(0, kMaxPtrSize);
 		m_topLevelInfo.m_instancesBuffer->flush(0, kMaxPtrSize);

+ 2 - 2
AnKi/Gr/Vulkan/Buffer.cpp

@@ -11,11 +11,11 @@ namespace anki {
 
 
 Buffer* Buffer::newInstance(GrManager* manager, const BufferInitInfo& init)
 Buffer* Buffer::newInstance(GrManager* manager, const BufferInitInfo& init)
 {
 {
-	BufferImpl* impl = manager->getAllocator().newInstance<BufferImpl>(manager, init.getName());
+	BufferImpl* impl = anki::newInstance<BufferImpl>(manager->getMemoryPool(), manager, init.getName());
 	const Error err = impl->init(init);
 	const Error err = impl->init(init);
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 4 - 4
AnKi/Gr/Vulkan/BufferImpl.cpp

@@ -12,13 +12,13 @@ BufferImpl::~BufferImpl()
 {
 {
 	ANKI_ASSERT(!m_mapped);
 	ANKI_ASSERT(!m_mapped);
 
 
-	BufferGarbage* garbage = getAllocator().newInstance<BufferGarbage>();
+	BufferGarbage* garbage = anki::newInstance<BufferGarbage>(getMemoryPool());
 	garbage->m_bufferHandle = m_handle;
 	garbage->m_bufferHandle = m_handle;
 	garbage->m_memoryHandle = m_memHandle;
 	garbage->m_memoryHandle = m_memHandle;
 
 
 	if(m_views.getSize())
 	if(m_views.getSize())
 	{
 	{
-		garbage->m_viewHandles.create(getAllocator(), U32(m_views.getSize()));
+		garbage->m_viewHandles.create(getMemoryPool(), U32(m_views.getSize()));
 
 
 		U32 count = 0;
 		U32 count = 0;
 		for(auto it : m_views)
 		for(auto it : m_views)
@@ -42,7 +42,7 @@ BufferImpl::~BufferImpl()
 	}
 	}
 #endif
 #endif
 
 
-	m_views.destroy(getAllocator());
+	m_views.destroy(getMemoryPool());
 }
 }
 
 
 Error BufferImpl::init(const BufferInitInfo& inf)
 Error BufferImpl::init(const BufferInitInfo& inf)
@@ -447,7 +447,7 @@ VkBufferView BufferImpl::getOrCreateBufferView(Format fmt, PtrSize offset, PtrSi
 	VkBufferView view;
 	VkBufferView view;
 	ANKI_VK_CHECKF(vkCreateBufferView(getDevice(), &viewCreateInfo, nullptr, &view));
 	ANKI_VK_CHECKF(vkCreateBufferView(getDevice(), &viewCreateInfo, nullptr, &view));
 
 
-	m_views.emplace(getAllocator(), hash, view);
+	m_views.emplace(getMemoryPool(), hash, view);
 
 
 	return view;
 	return view;
 }
 }

+ 1 - 0
AnKi/Gr/Vulkan/BufferImpl.h

@@ -90,6 +90,7 @@ public:
 	}
 	}
 
 
 	/// Only for texture buffers.
 	/// Only for texture buffers.
+	/// @note It's thread-safe
 	VkBufferView getOrCreateBufferView(Format fmt, PtrSize offset, PtrSize range) const;
 	VkBufferView getOrCreateBufferView(Format fmt, PtrSize offset, PtrSize range) const;
 
 
 private:
 private:

+ 3 - 3
AnKi/Gr/Vulkan/CommandBuffer.cpp

@@ -14,11 +14,11 @@ namespace anki {
 CommandBuffer* CommandBuffer::newInstance(GrManager* manager, const CommandBufferInitInfo& init)
 CommandBuffer* CommandBuffer::newInstance(GrManager* manager, const CommandBufferInitInfo& init)
 {
 {
 	ANKI_TRACE_SCOPED_EVENT(VK_NEW_CCOMMAND_BUFFER);
 	ANKI_TRACE_SCOPED_EVENT(VK_NEW_CCOMMAND_BUFFER);
-	CommandBufferImpl* impl = manager->getAllocator().newInstance<CommandBufferImpl>(manager, init.getName());
+	CommandBufferImpl* impl = anki::newInstance<CommandBufferImpl>(manager->getMemoryPool(), manager, init.getName());
 	const Error err = impl->init(init);
 	const Error err = impl->init(init);
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;
@@ -46,7 +46,7 @@ void CommandBuffer::flush(ConstWeakArray<FencePtr> waitFences, FencePtr* signalF
 		if(signalFence)
 		if(signalFence)
 		{
 		{
 			FenceImpl* fenceImpl =
 			FenceImpl* fenceImpl =
-				self.getGrManagerImpl().getAllocator().newInstance<FenceImpl>(&getManager(), "SignalFence");
+				anki::newInstance<FenceImpl>(self.getGrManagerImpl().getMemoryPool(), &getManager(), "SignalFence");
 			fenceImpl->m_semaphore = signalSemaphore;
 			fenceImpl->m_semaphore = signalSemaphore;
 			signalFence->reset(fenceImpl);
 			signalFence->reset(fenceImpl);
 		}
 		}

+ 13 - 14
AnKi/Gr/Vulkan/CommandBufferFactory.cpp

@@ -46,10 +46,10 @@ void MicroCommandBuffer::reset()
 
 
 	for(GrObjectType type : EnumIterable<GrObjectType>())
 	for(GrObjectType type : EnumIterable<GrObjectType>())
 	{
 	{
-		m_objectRefs[type].destroy(m_fastAlloc);
+		m_objectRefs[type].destroy(m_fastPool);
 	}
 	}
 
 
-	m_fastAlloc.getMemoryPool().reset();
+	m_fastPool.reset();
 }
 }
 
 
 Error CommandBufferThreadAllocator::init()
 Error CommandBufferThreadAllocator::init()
@@ -77,7 +77,7 @@ Error CommandBufferThreadAllocator::init()
 			{
 			{
 				MicroObjectRecycler<MicroCommandBuffer>& recycler = m_recyclers[secondLevel][smallBatch][queue];
 				MicroObjectRecycler<MicroCommandBuffer>& recycler = m_recyclers[secondLevel][smallBatch][queue];
 
 
-				recycler.init(m_factory->m_alloc);
+				recycler.init(m_factory->m_pool);
 			}
 			}
 		}
 		}
 	}
 	}
@@ -134,11 +134,10 @@ Error CommandBufferThreadAllocator::newCommandBuffer(CommandBufferFlag cmdbFlags
 		VkCommandBuffer cmdb;
 		VkCommandBuffer cmdb;
 		ANKI_VK_CHECK(vkAllocateCommandBuffers(m_factory->m_dev, &ci, &cmdb));
 		ANKI_VK_CHECK(vkAllocateCommandBuffers(m_factory->m_dev, &ci, &cmdb));
 
 
-		MicroCommandBuffer* newCmdb = getAllocator().newInstance<MicroCommandBuffer>(this);
+		MicroCommandBuffer* newCmdb = newInstance<MicroCommandBuffer>(getMemoryPool(), this);
 
 
-		newCmdb->m_fastAlloc =
-			StackAllocator<U8>(m_factory->m_alloc.getMemoryPool().getAllocationCallback(),
-							   m_factory->m_alloc.getMemoryPool().getAllocationCallbackUserData(), 256_KB, 2.0f);
+		newCmdb->m_fastPool.init(m_factory->m_pool->getAllocationCallback(),
+								 m_factory->m_pool->getAllocationCallbackUserData(), 256_KB, 2.0f);
 
 
 		newCmdb->m_handle = cmdb;
 		newCmdb->m_handle = cmdb;
 		newCmdb->m_flags = cmdbFlags;
 		newCmdb->m_flags = cmdbFlags;
@@ -172,11 +171,11 @@ void CommandBufferThreadAllocator::deleteCommandBuffer(MicroCommandBuffer* ptr)
 	m_recyclers[secondLevel][smallBatch][ptr->m_queue].recycle(ptr);
 	m_recyclers[secondLevel][smallBatch][ptr->m_queue].recycle(ptr);
 }
 }
 
 
-Error CommandBufferFactory::init(GrAllocator<U8> alloc, VkDevice dev, const VulkanQueueFamilies& queueFamilies)
+Error CommandBufferFactory::init(HeapMemoryPool* pool, VkDevice dev, const VulkanQueueFamilies& queueFamilies)
 {
 {
-	ANKI_ASSERT(dev);
+	ANKI_ASSERT(pool && dev);
 
 
-	m_alloc = alloc;
+	m_pool = pool;
 	m_dev = dev;
 	m_dev = dev;
 	m_queueFamilies = queueFamilies;
 	m_queueFamilies = queueFamilies;
 	return Error::kNone;
 	return Error::kNone;
@@ -203,10 +202,10 @@ void CommandBufferFactory::destroy()
 	for(CommandBufferThreadAllocator* talloc : m_threadAllocs)
 	for(CommandBufferThreadAllocator* talloc : m_threadAllocs)
 	{
 	{
 		talloc->destroy();
 		talloc->destroy();
-		m_alloc.deleteInstance(talloc);
+		deleteInstance(*m_pool, talloc);
 	}
 	}
 
 
-	m_threadAllocs.destroy(m_alloc);
+	m_threadAllocs.destroy(*m_pool);
 }
 }
 
 
 Error CommandBufferFactory::newCommandBuffer(ThreadId tid, CommandBufferFlag cmdbFlags, MicroCommandBufferPtr& ptr)
 Error CommandBufferFactory::newCommandBuffer(ThreadId tid, CommandBufferFlag cmdbFlags, MicroCommandBufferPtr& ptr)
@@ -246,9 +245,9 @@ Error CommandBufferFactory::newCommandBuffer(ThreadId tid, CommandBufferFlag cmd
 
 
 			if(alloc == nullptr)
 			if(alloc == nullptr)
 			{
 			{
-				alloc = m_alloc.newInstance<CommandBufferThreadAllocator>(this, tid);
+				alloc = newInstance<CommandBufferThreadAllocator>(*m_pool, this, tid);
 
 
-				m_threadAllocs.resize(m_alloc, m_threadAllocs.getSize() + 1);
+				m_threadAllocs.resize(*m_pool, m_threadAllocs.getSize() + 1);
 				m_threadAllocs[m_threadAllocs.getSize() - 1] = alloc;
 				m_threadAllocs[m_threadAllocs.getSize() - 1] = alloc;
 
 
 				// Sort for fast find
 				// Sort for fast find

+ 8 - 8
AnKi/Gr/Vulkan/CommandBufferFactory.h

@@ -60,7 +60,7 @@ public:
 		return m_fence;
 		return m_fence;
 	}
 	}
 
 
-	GrAllocator<U8>& getAllocator();
+	HeapMemoryPool& getMemoryPool();
 
 
 	/// Interface method.
 	/// Interface method.
 	void onFenceDone()
 	void onFenceDone()
@@ -68,9 +68,9 @@ public:
 		reset();
 		reset();
 	}
 	}
 
 
-	StackAllocator<U8>& getFastAllocator()
+	StackMemoryPool& getFastMemoryPool()
 	{
 	{
-		return m_fastAlloc;
+		return m_fastPool;
 	}
 	}
 
 
 	VkCommandBuffer getHandle() const
 	VkCommandBuffer getHandle() const
@@ -105,7 +105,7 @@ public:
 private:
 private:
 	static constexpr U32 kMaxRefObjectSearch = 16;
 	static constexpr U32 kMaxRefObjectSearch = 16;
 
 
-	StackAllocator<U8> m_fastAlloc;
+	StackMemoryPool m_fastPool;
 	VkCommandBuffer m_handle = {};
 	VkCommandBuffer m_handle = {};
 
 
 	MicroFencePtr m_fence;
 	MicroFencePtr m_fence;
@@ -137,7 +137,7 @@ private:
 		}
 		}
 
 
 		// Not found in the temp cache, add it
 		// Not found in the temp cache, add it
-		arr.emplaceBack(m_fastAlloc, grobj);
+		arr.emplaceBack(m_fastPool, grobj);
 	}
 	}
 };
 };
 
 
@@ -179,7 +179,7 @@ public:
 
 
 	void destroy();
 	void destroy();
 
 
-	GrAllocator<U8>& getAllocator();
+	HeapMemoryPool& getMemoryPool();
 
 
 	/// Request a new command buffer.
 	/// Request a new command buffer.
 	Error newCommandBuffer(CommandBufferFlag cmdbFlags, MicroCommandBufferPtr& ptr);
 	Error newCommandBuffer(CommandBufferFlag cmdbFlags, MicroCommandBufferPtr& ptr);
@@ -214,7 +214,7 @@ public:
 
 
 	CommandBufferFactory& operator=(const CommandBufferFactory&) = delete; // Non-copyable
 	CommandBufferFactory& operator=(const CommandBufferFactory&) = delete; // Non-copyable
 
 
-	Error init(GrAllocator<U8> alloc, VkDevice dev, const VulkanQueueFamilies& queueFamilies);
+	Error init(HeapMemoryPool* pool, VkDevice dev, const VulkanQueueFamilies& queueFamilies);
 
 
 	void destroy();
 	void destroy();
 
 
@@ -228,7 +228,7 @@ public:
 	}
 	}
 
 
 private:
 private:
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_pool = nullptr;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VulkanQueueFamilies m_queueFamilies;
 	VulkanQueueFamilies m_queueFamilies;
 
 

+ 4 - 4
AnKi/Gr/Vulkan/CommandBufferFactory.inl.h

@@ -7,9 +7,9 @@
 
 
 namespace anki {
 namespace anki {
 
 
-inline GrAllocator<U8>& MicroCommandBuffer::getAllocator()
+inline HeapMemoryPool& MicroCommandBuffer::getMemoryPool()
 {
 {
-	return m_threadAlloc->getAllocator();
+	return m_threadAlloc->getMemoryPool();
 }
 }
 
 
 inline void MicroCommandBufferPtrDeleter::operator()(MicroCommandBuffer* ptr)
 inline void MicroCommandBufferPtrDeleter::operator()(MicroCommandBuffer* ptr)
@@ -18,9 +18,9 @@ inline void MicroCommandBufferPtrDeleter::operator()(MicroCommandBuffer* ptr)
 	ptr->m_threadAlloc->deleteCommandBuffer(ptr);
 	ptr->m_threadAlloc->deleteCommandBuffer(ptr);
 }
 }
 
 
-inline GrAllocator<U8>& CommandBufferThreadAllocator::getAllocator()
+inline HeapMemoryPool& CommandBufferThreadAllocator::getMemoryPool()
 {
 {
-	return m_factory->m_alloc;
+	return *m_factory->m_pool;
 }
 }
 
 
 } // end namespace anki
 } // end namespace anki

+ 15 - 15
AnKi/Gr/Vulkan/CommandBufferImpl.cpp

@@ -34,12 +34,12 @@ CommandBufferImpl::~CommandBufferImpl()
 		ANKI_VK_LOGW("Command buffer was not flushed");
 		ANKI_VK_LOGW("Command buffer was not flushed");
 	}
 	}
 
 
-	m_imgBarriers.destroy(m_alloc);
-	m_buffBarriers.destroy(m_alloc);
-	m_memBarriers.destroy(m_alloc);
-	m_queryResetAtoms.destroy(m_alloc);
-	m_writeQueryAtoms.destroy(m_alloc);
-	m_secondLevelAtoms.destroy(m_alloc);
+	m_imgBarriers.destroy(*m_pool);
+	m_buffBarriers.destroy(*m_pool);
+	m_memBarriers.destroy(*m_pool);
+	m_queryResetAtoms.destroy(*m_pool);
+	m_writeQueryAtoms.destroy(*m_pool);
+	m_secondLevelAtoms.destroy(*m_pool);
 }
 }
 
 
 Error CommandBufferImpl::init(const CommandBufferInitInfo& init)
 Error CommandBufferImpl::init(const CommandBufferInitInfo& init)
@@ -50,7 +50,7 @@ Error CommandBufferImpl::init(const CommandBufferInitInfo& init)
 	ANKI_CHECK(getGrManagerImpl().getCommandBufferFactory().newCommandBuffer(m_tid, m_flags, m_microCmdb));
 	ANKI_CHECK(getGrManagerImpl().getCommandBufferFactory().newCommandBuffer(m_tid, m_flags, m_microCmdb));
 	m_handle = m_microCmdb->getHandle();
 	m_handle = m_microCmdb->getHandle();
 
 
-	m_alloc = m_microCmdb->getFastAllocator();
+	m_pool = &m_microCmdb->getFastMemoryPool();
 
 
 	// Store some of the init info for later
 	// Store some of the init info for later
 	if(!!(m_flags & CommandBufferFlag::kSecondLevel))
 	if(!!(m_flags & CommandBufferFlag::kSecondLevel))
@@ -64,7 +64,7 @@ Error CommandBufferImpl::init(const CommandBufferInitInfo& init)
 
 
 	for(DescriptorSetState& state : m_dsetState)
 	for(DescriptorSetState& state : m_dsetState)
 	{
 	{
-		state.init(m_alloc);
+		state.init(m_pool);
 	}
 	}
 
 
 	return Error::kNone;
 	return Error::kNone;
@@ -461,11 +461,11 @@ void CommandBufferImpl::flushBarriers()
 	// Batch
 	// Batch
 	//
 	//
 
 
-	DynamicArrayRaii<VkImageMemoryBarrier> finalImgBarriers(m_alloc);
+	DynamicArrayRaii<VkImageMemoryBarrier> finalImgBarriers(m_pool);
 	U32 finalImgBarrierCount = 0;
 	U32 finalImgBarrierCount = 0;
 	if(m_imgBarrierCount > 0)
 	if(m_imgBarrierCount > 0)
 	{
 	{
-		DynamicArrayRaii<VkImageMemoryBarrier> squashedBarriers(m_alloc);
+		DynamicArrayRaii<VkImageMemoryBarrier> squashedBarriers(m_pool);
 		U32 squashedBarrierCount = 0;
 		U32 squashedBarrierCount = 0;
 
 
 		squashedBarriers.create(m_imgBarrierCount);
 		squashedBarriers.create(m_imgBarrierCount);
@@ -590,7 +590,7 @@ void CommandBufferImpl::flushQueryResets()
 
 
 	vkCmdResetQueryPool(m_handle, pool, firstQuery, queryCount);
 	vkCmdResetQueryPool(m_handle, pool, firstQuery, queryCount);
 
 
-	m_queryResetAtoms.destroy(m_alloc);
+	m_queryResetAtoms.destroy(*m_pool);
 }
 }
 
 
 void CommandBufferImpl::flushWriteQueryResults()
 void CommandBufferImpl::flushWriteQueryResults()
@@ -654,7 +654,7 @@ void CommandBufferImpl::flushWriteQueryResults()
 	vkCmdCopyQueryPoolResults(m_handle, pool, firstQuery, queryCount, buff, offset, sizeof(U32),
 	vkCmdCopyQueryPoolResults(m_handle, pool, firstQuery, queryCount, buff, offset, sizeof(U32),
 							  VK_QUERY_RESULT_PARTIAL_BIT);
 							  VK_QUERY_RESULT_PARTIAL_BIT);
 
 
-	m_writeQueryAtoms.resize(m_alloc, 0);
+	m_writeQueryAtoms.resize(*m_pool, 0);
 }
 }
 
 
 void CommandBufferImpl::copyBufferToTextureViewInternal(const BufferPtr& buff, PtrSize offset,
 void CommandBufferImpl::copyBufferToTextureViewInternal(const BufferPtr& buff, PtrSize offset,
@@ -894,9 +894,9 @@ void CommandBufferImpl::setPipelineBarrierInternal(
 {
 {
 	commandCommon();
 	commandCommon();
 
 
-	DynamicArrayRaii<VkImageMemoryBarrier> imageBarriers(m_alloc);
-	DynamicArrayRaii<VkBufferMemoryBarrier> bufferBarriers(m_alloc);
-	DynamicArrayRaii<VkMemoryBarrier> genericBarriers(m_alloc);
+	DynamicArrayRaii<VkImageMemoryBarrier> imageBarriers(m_pool);
+	DynamicArrayRaii<VkBufferMemoryBarrier> bufferBarriers(m_pool);
+	DynamicArrayRaii<VkMemoryBarrier> genericBarriers(m_pool);
 	VkPipelineStageFlags srcStageMask = 0;
 	VkPipelineStageFlags srcStageMask = 0;
 	VkPipelineStageFlags dstStageMask = 0;
 	VkPipelineStageFlags dstStageMask = 0;
 
 

+ 1 - 1
AnKi/Gr/Vulkan/CommandBufferImpl.h

@@ -415,7 +415,7 @@ public:
 	void setLineWidthInternal(F32 width);
 	void setLineWidthInternal(F32 width);
 
 
 private:
 private:
-	StackAllocator<U8> m_alloc;
+	StackMemoryPool* m_pool = nullptr;
 
 
 	MicroCommandBufferPtr m_microCmdb;
 	MicroCommandBufferPtr m_microCmdb;
 	VkCommandBuffer m_handle = VK_NULL_HANDLE;
 	VkCommandBuffer m_handle = VK_NULL_HANDLE;

+ 11 - 11
AnKi/Gr/Vulkan/CommandBufferImpl.inl.h

@@ -109,7 +109,7 @@ inline void CommandBufferImpl::setImageBarrier(VkPipelineStageFlags srcStage, Vk
 
 
 	if(m_imgBarriers.getSize() <= m_imgBarrierCount)
 	if(m_imgBarriers.getSize() <= m_imgBarrierCount)
 	{
 	{
-		m_imgBarriers.resize(m_alloc, max<U32>(2, m_imgBarrierCount * 2));
+		m_imgBarriers.resize(*m_pool, max<U32>(2, m_imgBarrierCount * 2));
 	}
 	}
 
 
 	m_imgBarriers[m_imgBarrierCount++] = inf;
 	m_imgBarriers[m_imgBarrierCount++] = inf;
@@ -131,7 +131,7 @@ inline void CommandBufferImpl::setTextureBarrierRangeInternal(const TexturePtr&
 	ANKI_ASSERT(impl.usageValid(nextUsage));
 	ANKI_ASSERT(impl.usageValid(nextUsage));
 	ANKI_ASSERT(((nextUsage & TextureUsageBit::kGenerateMipmaps) == TextureUsageBit::kGenerateMipmaps
 	ANKI_ASSERT(((nextUsage & TextureUsageBit::kGenerateMipmaps) == TextureUsageBit::kGenerateMipmaps
 				 || (nextUsage & TextureUsageBit::kGenerateMipmaps) == TextureUsageBit::kNone)
 				 || (nextUsage & TextureUsageBit::kGenerateMipmaps) == TextureUsageBit::kNone)
-				&& "GENERATE_MIPMAPS should be alone");
+				&& "kGenerateMipmaps should be alone");
 
 
 	VkPipelineStageFlags srcStage;
 	VkPipelineStageFlags srcStage;
 	VkAccessFlags srcAccess;
 	VkAccessFlags srcAccess;
@@ -227,7 +227,7 @@ inline void CommandBufferImpl::setBufferBarrierInternal(VkPipelineStageFlags src
 
 
 	if(m_buffBarriers.getSize() <= m_buffBarrierCount)
 	if(m_buffBarriers.getSize() <= m_buffBarrierCount)
 	{
 	{
-		m_buffBarriers.resize(m_alloc, max<U32>(2, m_buffBarrierCount * 2));
+		m_buffBarriers.resize(*m_pool, max<U32>(2, m_buffBarrierCount * 2));
 	}
 	}
 
 
 	m_buffBarriers[m_buffBarrierCount++] = b;
 	m_buffBarriers[m_buffBarrierCount++] = b;
@@ -279,7 +279,7 @@ CommandBufferImpl::setAccelerationStructureBarrierInternal([[maybe_unused]] cons
 
 
 	if(m_memBarriers.getSize() <= m_memBarrierCount)
 	if(m_memBarriers.getSize() <= m_memBarrierCount)
 	{
 	{
-		m_memBarriers.resize(m_alloc, max<U32>(2, m_memBarrierCount * 2));
+		m_memBarriers.resize(*m_pool, max<U32>(2, m_memBarrierCount * 2));
 	}
 	}
 
 
 	m_memBarriers[m_memBarrierCount++] = memBarrier;
 	m_memBarriers[m_memBarrierCount++] = memBarrier;
@@ -358,7 +358,7 @@ inline void CommandBufferImpl::dispatchComputeInternal(U32 groupCountX, U32 grou
 			Bool dirty;
 			Bool dirty;
 			Array<PtrSize, kMaxBindingsPerDescriptorSet> dynamicOffsetsPtrSize;
 			Array<PtrSize, kMaxBindingsPerDescriptorSet> dynamicOffsetsPtrSize;
 			U32 dynamicOffsetCount;
 			U32 dynamicOffsetCount;
-			if(getGrManagerImpl().getDescriptorSetFactory().newDescriptorSet(m_alloc, m_dsetState[i], dset, dirty,
+			if(getGrManagerImpl().getDescriptorSetFactory().newDescriptorSet(*m_pool, m_dsetState[i], dset, dirty,
 																			 dynamicOffsetsPtrSize, dynamicOffsetCount))
 																			 dynamicOffsetsPtrSize, dynamicOffsetCount))
 			{
 			{
 				ANKI_VK_LOGF("Cannot recover");
 				ANKI_VK_LOGF("Cannot recover");
@@ -422,7 +422,7 @@ inline void CommandBufferImpl::traceRaysInternal(const BufferPtr& sbtBuffer, Ptr
 			Bool dirty;
 			Bool dirty;
 			Array<PtrSize, kMaxBindingsPerDescriptorSet> dynamicOffsetsPtrSize;
 			Array<PtrSize, kMaxBindingsPerDescriptorSet> dynamicOffsetsPtrSize;
 			U32 dynamicOffsetCount;
 			U32 dynamicOffsetCount;
-			if(getGrManagerImpl().getDescriptorSetFactory().newDescriptorSet(m_alloc, m_dsetState[i], dset, dirty,
+			if(getGrManagerImpl().getDescriptorSetFactory().newDescriptorSet(*m_pool, m_dsetState[i], dset, dirty,
 																			 dynamicOffsetsPtrSize, dynamicOffsetCount))
 																			 dynamicOffsetsPtrSize, dynamicOffsetCount))
 			{
 			{
 				ANKI_VK_LOGF("Cannot recover");
 				ANKI_VK_LOGF("Cannot recover");
@@ -488,7 +488,7 @@ inline void CommandBufferImpl::resetOcclusionQueryInternal(const OcclusionQueryP
 	QueryResetAtom atom;
 	QueryResetAtom atom;
 	atom.m_pool = handle;
 	atom.m_pool = handle;
 	atom.m_queryIdx = idx;
 	atom.m_queryIdx = idx;
-	m_queryResetAtoms.emplaceBack(m_alloc, atom);
+	m_queryResetAtoms.emplaceBack(*m_pool, atom);
 #else
 #else
 	ANKI_CMD(vkCmdResetQueryPool(m_handle, handle, idx, 1), kAnyOtherCommand);
 	ANKI_CMD(vkCmdResetQueryPool(m_handle, handle, idx, 1), kAnyOtherCommand);
 #endif
 #endif
@@ -536,7 +536,7 @@ inline void CommandBufferImpl::resetTimestampQueryInternal(const TimestampQueryP
 	QueryResetAtom atom;
 	QueryResetAtom atom;
 	atom.m_pool = handle;
 	atom.m_pool = handle;
 	atom.m_queryIdx = idx;
 	atom.m_queryIdx = idx;
-	m_queryResetAtoms.emplaceBack(m_alloc, atom);
+	m_queryResetAtoms.emplaceBack(*m_pool, atom);
 #else
 #else
 	ANKI_CMD(vkCmdResetQueryPool(m_handle, handle, idx, 1), kAnyOtherCommand);
 	ANKI_CMD(vkCmdResetQueryPool(m_handle, handle, idx, 1), kAnyOtherCommand);
 #endif
 #endif
@@ -603,7 +603,7 @@ inline void CommandBufferImpl::pushSecondLevelCommandBufferInternal(const Comman
 
 
 	if(m_secondLevelAtoms.getSize() <= m_secondLevelAtomCount)
 	if(m_secondLevelAtoms.getSize() <= m_secondLevelAtomCount)
 	{
 	{
-		m_secondLevelAtoms.resize(m_alloc, max<U32>(8, m_secondLevelAtomCount * 2));
+		m_secondLevelAtoms.resize(*m_pool, max<U32>(8, m_secondLevelAtomCount * 2));
 	}
 	}
 
 
 	m_secondLevelAtoms[m_secondLevelAtomCount++] = static_cast<const CommandBufferImpl&>(*cmdb).m_handle;
 	m_secondLevelAtoms[m_secondLevelAtomCount++] = static_cast<const CommandBufferImpl&>(*cmdb).m_handle;
@@ -654,7 +654,7 @@ inline void CommandBufferImpl::drawcallCommon()
 			Bool dirty;
 			Bool dirty;
 			Array<PtrSize, kMaxBindingsPerDescriptorSet> dynamicOffsetsPtrSize;
 			Array<PtrSize, kMaxBindingsPerDescriptorSet> dynamicOffsetsPtrSize;
 			U32 dynamicOffsetCount;
 			U32 dynamicOffsetCount;
-			if(getGrManagerImpl().getDescriptorSetFactory().newDescriptorSet(m_alloc, m_dsetState[i], dset, dirty,
+			if(getGrManagerImpl().getDescriptorSetFactory().newDescriptorSet(*m_pool, m_dsetState[i], dset, dirty,
 																			 dynamicOffsetsPtrSize, dynamicOffsetCount))
 																			 dynamicOffsetsPtrSize, dynamicOffsetCount))
 			{
 			{
 				ANKI_VK_LOGF("Cannot recover");
 				ANKI_VK_LOGF("Cannot recover");
@@ -837,7 +837,7 @@ inline void CommandBufferImpl::writeOcclusionQueryResultToBufferInternal(const O
 	atom.m_buffer = impl.getHandle();
 	atom.m_buffer = impl.getHandle();
 	atom.m_offset = offset;
 	atom.m_offset = offset;
 
 
-	m_writeQueryAtoms.emplaceBack(m_alloc, atom);
+	m_writeQueryAtoms.emplaceBack(*m_pool, atom);
 #else
 #else
 	ANKI_CMD(vkCmdCopyQueryPoolResults(m_handle, q.m_handle.m_pool, q.m_handle.m_queryIndex, 1, impl.getHandle(),
 	ANKI_CMD(vkCmdCopyQueryPoolResults(m_handle, q.m_handle.m_pool, q.m_handle.m_queryIndex, 1, impl.getHandle(),
 									   offset, sizeof(U32), VK_QUERY_RESULT_PARTIAL_BIT),
 									   offset, sizeof(U32), VK_QUERY_RESULT_PARTIAL_BIT),

+ 5 - 5
AnKi/Gr/Vulkan/DeferredBarrierFactory.h

@@ -48,7 +48,7 @@ public:
 		return m_refcount.load();
 		return m_refcount.load();
 	}
 	}
 
 
-	GrAllocator<U8> getAllocator() const;
+	HeapMemoryPool& getMemoryPool();
 
 
 	void setFence(MicroFencePtr& f)
 	void setFence(MicroFencePtr& f)
 	{
 	{
@@ -98,10 +98,10 @@ public:
 
 
 	DeferredBarrierFactory& operator=(const DeferredBarrierFactory&) = delete; // Non-copyable
 	DeferredBarrierFactory& operator=(const DeferredBarrierFactory&) = delete; // Non-copyable
 
 
-	void init(GrAllocator<U8> alloc, VkDevice dev)
+	void init(HeapMemoryPool* pool, VkDevice dev)
 	{
 	{
-		ANKI_ASSERT(dev);
-		m_alloc = alloc;
+		ANKI_ASSERT(pool && dev);
+		m_pool = pool;
 		m_dev = dev;
 		m_dev = dev;
 	}
 	}
 
 
@@ -113,7 +113,7 @@ public:
 	MicroDeferredBarrierPtr newInstance();
 	MicroDeferredBarrierPtr newInstance();
 
 
 private:
 private:
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_pool = nullptr;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	MicroObjectRecycler<MicroDeferredBarrier> m_recycler;
 	MicroObjectRecycler<MicroDeferredBarrier> m_recycler;
 
 

+ 3 - 3
AnKi/Gr/Vulkan/DeferredBarrierFactory.inl.h

@@ -25,9 +25,9 @@ inline MicroDeferredBarrier::~MicroDeferredBarrier()
 	}
 	}
 }
 }
 
 
-inline GrAllocator<U8> MicroDeferredBarrier::getAllocator() const
+inline HeapMemoryPool& MicroDeferredBarrier::getMemoryPool()
 {
 {
-	return m_factory->m_alloc;
+	return *m_factory->m_pool;
 }
 }
 
 
 inline void MicroDeferredBarrierPtrDeleter::operator()(MicroDeferredBarrier* s)
 inline void MicroDeferredBarrierPtrDeleter::operator()(MicroDeferredBarrier* s)
@@ -43,7 +43,7 @@ inline MicroDeferredBarrierPtr DeferredBarrierFactory::newInstance()
 	if(out == nullptr)
 	if(out == nullptr)
 	{
 	{
 		// Create a new one
 		// Create a new one
-		out = m_alloc.newInstance<MicroDeferredBarrier>(this);
+		out = anki::newInstance<MicroDeferredBarrier>(*m_pool, this);
 	}
 	}
 
 
 	return MicroDeferredBarrierPtr(out);
 	return MicroDeferredBarrierPtr(out);

+ 59 - 62
AnKi/Gr/Vulkan/DescriptorSet.cpp

@@ -21,7 +21,7 @@ class DescriptorSetFactory::BindlessDescriptorSet
 public:
 public:
 	~BindlessDescriptorSet();
 	~BindlessDescriptorSet();
 
 
-	Error init(const GrAllocator<U8>& alloc, VkDevice dev, const U32 bindlessTextureCount, U32 bindlessImageCount);
+	Error init(HeapMemoryPool* pool, VkDevice dev, const U32 bindlessTextureCount, U32 bindlessImageCount);
 
 
 	/// Bind a sampled image.
 	/// Bind a sampled image.
 	/// @note It's thread-safe.
 	/// @note It's thread-safe.
@@ -58,10 +58,10 @@ public:
 	}
 	}
 
 
 private:
 private:
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_memPool = nullptr;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkDescriptorSetLayout m_layout = VK_NULL_HANDLE;
 	VkDescriptorSetLayout m_layout = VK_NULL_HANDLE;
-	VkDescriptorPool m_pool = VK_NULL_HANDLE;
+	VkDescriptorPool m_dsPool = VK_NULL_HANDLE;
 	VkDescriptorSet m_dset = VK_NULL_HANDLE;
 	VkDescriptorSet m_dset = VK_NULL_HANDLE;
 	Mutex m_mtx;
 	Mutex m_mtx;
 
 
@@ -103,12 +103,12 @@ public:
 	Error createNewPool();
 	Error createNewPool();
 
 
 	Error getOrCreateSet(U64 hash, const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings,
 	Error getOrCreateSet(U64 hash, const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings,
-						 StackAllocator<U8>& tmpAlloc, const DS*& out)
+						 StackMemoryPool& tmpPool, const DS*& out)
 	{
 	{
 		out = tryFindSet(hash);
 		out = tryFindSet(hash);
 		if(out == nullptr)
 		if(out == nullptr)
 		{
 		{
-			ANKI_CHECK(newSet(hash, bindings, tmpAlloc, out));
+			ANKI_CHECK(newSet(hash, bindings, tmpPool, out));
 		}
 		}
 
 
 		return Error::kNone;
 		return Error::kNone;
@@ -126,9 +126,9 @@ private:
 
 
 	[[nodiscard]] const DS* tryFindSet(U64 hash);
 	[[nodiscard]] const DS* tryFindSet(U64 hash);
 	Error newSet(U64 hash, const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings,
 	Error newSet(U64 hash, const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings,
-				 StackAllocator<U8>& tmpAlloc, const DS*& out);
+				 StackMemoryPool& tmpPool, const DS*& out);
 	void writeSet(const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings, const DS& set,
 	void writeSet(const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings, const DS& set,
-				  StackAllocator<U8>& tmpAlloc);
+				  StackMemoryPool& tmpPool);
 };
 };
 
 
 class alignas(ANKI_CACHE_LINE_SIZE) DescriptorSetFactory::ThreadLocal
 class alignas(ANKI_CACHE_LINE_SIZE) DescriptorSetFactory::ThreadLocal
@@ -176,10 +176,10 @@ DescriptorSetFactory::BindlessDescriptorSet::~BindlessDescriptorSet()
 	ANKI_ASSERT(m_freeTexelBufferIndexCount == m_freeTexelBufferIndices.getSize()
 	ANKI_ASSERT(m_freeTexelBufferIndexCount == m_freeTexelBufferIndices.getSize()
 				&& "Forgot to unbind some texel buffers");
 				&& "Forgot to unbind some texel buffers");
 
 
-	if(m_pool)
+	if(m_dsPool)
 	{
 	{
-		vkDestroyDescriptorPool(m_dev, m_pool, nullptr);
-		m_pool = VK_NULL_HANDLE;
+		vkDestroyDescriptorPool(m_dev, m_dsPool, nullptr);
+		m_dsPool = VK_NULL_HANDLE;
 		m_dset = VK_NULL_HANDLE;
 		m_dset = VK_NULL_HANDLE;
 	}
 	}
 
 
@@ -189,15 +189,15 @@ DescriptorSetFactory::BindlessDescriptorSet::~BindlessDescriptorSet()
 		m_layout = VK_NULL_HANDLE;
 		m_layout = VK_NULL_HANDLE;
 	}
 	}
 
 
-	m_freeTexIndices.destroy(m_alloc);
-	m_freeTexelBufferIndices.destroy(m_alloc);
+	m_freeTexIndices.destroy(*m_memPool);
+	m_freeTexelBufferIndices.destroy(*m_memPool);
 }
 }
 
 
-Error DescriptorSetFactory::BindlessDescriptorSet::init(const GrAllocator<U8>& alloc, VkDevice dev,
-														U32 bindlessTextureCount, U32 bindlessTextureBuffers)
+Error DescriptorSetFactory::BindlessDescriptorSet::init(HeapMemoryPool* pool, VkDevice dev, U32 bindlessTextureCount,
+														U32 bindlessTextureBuffers)
 {
 {
 	ANKI_ASSERT(dev);
 	ANKI_ASSERT(dev);
-	m_alloc = alloc;
+	m_memPool = pool;
 	m_dev = dev;
 	m_dev = dev;
 
 
 	// Create the layout
 	// Create the layout
@@ -248,14 +248,14 @@ Error DescriptorSetFactory::BindlessDescriptorSet::init(const GrAllocator<U8>& a
 		ci.pPoolSizes = &sizes[0];
 		ci.pPoolSizes = &sizes[0];
 		ci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
 		ci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
 
 
-		ANKI_VK_CHECK(vkCreateDescriptorPool(m_dev, &ci, nullptr, &m_pool));
+		ANKI_VK_CHECK(vkCreateDescriptorPool(m_dev, &ci, nullptr, &m_dsPool));
 	}
 	}
 
 
 	// Create the descriptor set
 	// Create the descriptor set
 	{
 	{
 		VkDescriptorSetAllocateInfo ci = {};
 		VkDescriptorSetAllocateInfo ci = {};
 		ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
 		ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
-		ci.descriptorPool = m_pool;
+		ci.descriptorPool = m_dsPool;
 		ci.descriptorSetCount = 1;
 		ci.descriptorSetCount = 1;
 		ci.pSetLayouts = &m_layout;
 		ci.pSetLayouts = &m_layout;
 
 
@@ -264,7 +264,7 @@ Error DescriptorSetFactory::BindlessDescriptorSet::init(const GrAllocator<U8>& a
 
 
 	// Init the free arrays
 	// Init the free arrays
 	{
 	{
-		m_freeTexIndices.create(m_alloc, bindlessTextureCount);
+		m_freeTexIndices.create(*m_memPool, bindlessTextureCount);
 		m_freeTexIndexCount = U16(m_freeTexIndices.getSize());
 		m_freeTexIndexCount = U16(m_freeTexIndices.getSize());
 
 
 		for(U32 i = 0; i < m_freeTexIndices.getSize(); ++i)
 		for(U32 i = 0; i < m_freeTexIndices.getSize(); ++i)
@@ -272,7 +272,7 @@ Error DescriptorSetFactory::BindlessDescriptorSet::init(const GrAllocator<U8>& a
 			m_freeTexIndices[i] = U16(m_freeTexIndices.getSize() - i - 1);
 			m_freeTexIndices[i] = U16(m_freeTexIndices.getSize() - i - 1);
 		}
 		}
 
 
-		m_freeTexelBufferIndices.create(m_alloc, bindlessTextureBuffers);
+		m_freeTexelBufferIndices.create(*m_memPool, bindlessTextureBuffers);
 		m_freeTexelBufferIndexCount = U16(m_freeTexelBufferIndices.getSize());
 		m_freeTexelBufferIndexCount = U16(m_freeTexelBufferIndices.getSize());
 
 
 		for(U32 i = 0; i < m_freeTexelBufferIndices.getSize(); ++i)
 		for(U32 i = 0; i < m_freeTexelBufferIndices.getSize(); ++i)
@@ -367,23 +367,23 @@ void DescriptorSetFactory::BindlessDescriptorSet::unbindCommon(U32 idx, DynamicA
 
 
 DescriptorSetFactory::DSAllocator::~DSAllocator()
 DescriptorSetFactory::DSAllocator::~DSAllocator()
 {
 {
-	auto alloc = m_layoutEntry->m_factory->m_alloc;
+	HeapMemoryPool& pool = *m_layoutEntry->m_factory->m_pool;
 
 
 	while(!m_list.isEmpty())
 	while(!m_list.isEmpty())
 	{
 	{
 		DS* ds = &m_list.getFront();
 		DS* ds = &m_list.getFront();
 		m_list.popFront();
 		m_list.popFront();
 
 
-		alloc.deleteInstance(ds);
+		deleteInstance(pool, ds);
 	}
 	}
 
 
 	for(VkDescriptorPool pool : m_pools)
 	for(VkDescriptorPool pool : m_pools)
 	{
 	{
 		vkDestroyDescriptorPool(m_layoutEntry->m_factory->m_dev, pool, nullptr);
 		vkDestroyDescriptorPool(m_layoutEntry->m_factory->m_dev, pool, nullptr);
 	}
 	}
-	m_pools.destroy(alloc);
+	m_pools.destroy(pool);
 
 
-	m_hashmap.destroy(alloc);
+	m_hashmap.destroy(pool);
 }
 }
 
 
 Error DescriptorSetFactory::DSAllocator::init()
 Error DescriptorSetFactory::DSAllocator::init()
@@ -419,7 +419,7 @@ Error DescriptorSetFactory::DSAllocator::createNewPool()
 	ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_POOL_CREATE, 1);
 	ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_POOL_CREATE, 1);
 
 
 	// Push back
 	// Push back
-	m_pools.resize(m_layoutEntry->m_factory->m_alloc, m_pools.getSize() + 1);
+	m_pools.resize(*m_layoutEntry->m_factory->m_pool, m_pools.getSize() + 1);
 	m_pools[m_pools.getSize() - 1] = pool;
 	m_pools[m_pools.getSize() - 1] = pool;
 
 
 	return Error::kNone;
 	return Error::kNone;
@@ -449,7 +449,7 @@ const DS* DescriptorSetFactory::DSAllocator::tryFindSet(U64 hash)
 
 
 Error DescriptorSetFactory::DSAllocator::newSet(U64 hash,
 Error DescriptorSetFactory::DSAllocator::newSet(U64 hash,
 												const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings,
 												const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings,
-												StackAllocator<U8>& tmpAlloc, const DS*& out_)
+												StackMemoryPool& tmpPool, const DS*& out_)
 {
 {
 	DS* out = nullptr;
 	DS* out = nullptr;
 
 
@@ -466,11 +466,11 @@ Error DescriptorSetFactory::DSAllocator::newSet(U64 hash,
 			// Found something, recycle
 			// Found something, recycle
 			auto it2 = m_hashmap.find(set->m_hash);
 			auto it2 = m_hashmap.find(set->m_hash);
 			ANKI_ASSERT(it2 != m_hashmap.getEnd());
 			ANKI_ASSERT(it2 != m_hashmap.getEnd());
-			m_hashmap.erase(m_layoutEntry->m_factory->m_alloc, it2);
+			m_hashmap.erase(*m_layoutEntry->m_factory->m_pool, it2);
 			m_list.erase(set);
 			m_list.erase(set);
 
 
 			m_list.pushBack(set);
 			m_list.pushBack(set);
-			m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, set);
+			m_hashmap.emplace(*m_layoutEntry->m_factory->m_pool, hash, set);
 
 
 			out = set;
 			out = set;
 			break;
 			break;
@@ -501,10 +501,10 @@ Error DescriptorSetFactory::DSAllocator::newSet(U64 hash,
 		ANKI_ASSERT(rez == VK_SUCCESS && "That allocation can't fail");
 		ANKI_ASSERT(rez == VK_SUCCESS && "That allocation can't fail");
 		ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_SET_CREATE, 1);
 		ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_SET_CREATE, 1);
 
 
-		out = m_layoutEntry->m_factory->m_alloc.newInstance<DS>();
+		out = newInstance<DS>(*m_layoutEntry->m_factory->m_pool);
 		out->m_handle = handle;
 		out->m_handle = handle;
 
 
-		m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, out);
+		m_hashmap.emplace(*m_layoutEntry->m_factory->m_pool, hash, out);
 		m_list.pushBack(out);
 		m_list.pushBack(out);
 	}
 	}
 
 
@@ -513,21 +513,20 @@ Error DescriptorSetFactory::DSAllocator::newSet(U64 hash,
 	out->m_hash = hash;
 	out->m_hash = hash;
 
 
 	// Finally, write it
 	// Finally, write it
-	writeSet(bindings, *out, tmpAlloc);
+	writeSet(bindings, *out, tmpPool);
 
 
 	out_ = out;
 	out_ = out;
 	return Error::kNone;
 	return Error::kNone;
 }
 }
 
 
 void DescriptorSetFactory::DSAllocator::writeSet(
 void DescriptorSetFactory::DSAllocator::writeSet(
-	const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings, const DS& set,
-	StackAllocator<U8>& tmpAlloc)
+	const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings, const DS& set, StackMemoryPool& tmpPool)
 {
 {
-	DynamicArrayRaii<VkWriteDescriptorSet> writeInfos(tmpAlloc);
-	DynamicArrayRaii<VkDescriptorImageInfo> texInfos(tmpAlloc);
-	DynamicArrayRaii<VkDescriptorBufferInfo> buffInfos(tmpAlloc);
-	DynamicArrayRaii<VkWriteDescriptorSetAccelerationStructureKHR> asInfos(tmpAlloc);
-	DynamicArrayRaii<VkBufferView> bufferViews(tmpAlloc);
+	DynamicArrayRaii<VkWriteDescriptorSet> writeInfos(&tmpPool);
+	DynamicArrayRaii<VkDescriptorImageInfo> texInfos(&tmpPool);
+	DynamicArrayRaii<VkDescriptorBufferInfo> buffInfos(&tmpPool);
+	DynamicArrayRaii<VkWriteDescriptorSetAccelerationStructureKHR> asInfos(&tmpPool);
+	DynamicArrayRaii<VkBufferView> bufferViews(&tmpPool);
 
 
 	// First pass: Populate the VkDescriptorImageInfo and VkDescriptorBufferInfo
 	// First pass: Populate the VkDescriptorImageInfo and VkDescriptorBufferInfo
 	for(U bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
 	for(U bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
@@ -665,8 +664,6 @@ void DescriptorSetFactory::DSAllocator::writeSet(
 
 
 DSLayoutCacheEntry::~DSLayoutCacheEntry()
 DSLayoutCacheEntry::~DSLayoutCacheEntry()
 {
 {
-	auto alloc = m_factory->m_alloc;
-
 	if(m_layoutHandle)
 	if(m_layoutHandle)
 	{
 	{
 		vkDestroyDescriptorSetLayout(m_factory->m_dev, m_layoutHandle, nullptr);
 		vkDestroyDescriptorSetLayout(m_factory->m_dev, m_layoutHandle, nullptr);
@@ -756,24 +753,24 @@ Error DSLayoutCacheEntry::getOrCreateDSAllocator(DescriptorSetFactory::DSAllocat
 	DescriptorSetFactory::ThreadLocal* threadLocal = DescriptorSetFactory::m_threadLocal;
 	DescriptorSetFactory::ThreadLocal* threadLocal = DescriptorSetFactory::m_threadLocal;
 	if(ANKI_UNLIKELY(threadLocal == nullptr))
 	if(ANKI_UNLIKELY(threadLocal == nullptr))
 	{
 	{
-		threadLocal = m_factory->m_alloc.newInstance<DescriptorSetFactory::ThreadLocal>();
+		threadLocal = newInstance<DescriptorSetFactory::ThreadLocal>(*m_factory->m_pool);
 		DescriptorSetFactory::m_threadLocal = threadLocal;
 		DescriptorSetFactory::m_threadLocal = threadLocal;
 
 
 		LockGuard<Mutex> lock(m_factory->m_allThreadLocalsMtx);
 		LockGuard<Mutex> lock(m_factory->m_allThreadLocalsMtx);
-		m_factory->m_allThreadLocals.emplaceBack(m_factory->m_alloc, threadLocal);
+		m_factory->m_allThreadLocals.emplaceBack(*m_factory->m_pool, threadLocal);
 	}
 	}
 
 
 	// Get or create the allocator
 	// Get or create the allocator
 	if(ANKI_UNLIKELY(m_index >= threadLocal->m_allocators.getSize()))
 	if(ANKI_UNLIKELY(m_index >= threadLocal->m_allocators.getSize()))
 	{
 	{
-		threadLocal->m_allocators.resize(m_factory->m_alloc, m_index + 1, nullptr);
-		alloc = m_factory->m_alloc.newInstance<DescriptorSetFactory::DSAllocator>(this);
+		threadLocal->m_allocators.resize(*m_factory->m_pool, m_index + 1, nullptr);
+		alloc = newInstance<DescriptorSetFactory::DSAllocator>(*m_factory->m_pool, this);
 		ANKI_CHECK(alloc->init());
 		ANKI_CHECK(alloc->init());
 		threadLocal->m_allocators[m_index] = alloc;
 		threadLocal->m_allocators[m_index] = alloc;
 	}
 	}
 	else if(ANKI_UNLIKELY(threadLocal->m_allocators[m_index] == nullptr))
 	else if(ANKI_UNLIKELY(threadLocal->m_allocators[m_index] == nullptr))
 	{
 	{
-		alloc = m_factory->m_alloc.newInstance<DescriptorSetFactory::DSAllocator>(this);
+		alloc = newInstance<DescriptorSetFactory::DSAllocator>(*m_factory->m_pool, this);
 		ANKI_CHECK(alloc->init());
 		ANKI_CHECK(alloc->init());
 		threadLocal->m_allocators[m_index] = alloc;
 		threadLocal->m_allocators[m_index] = alloc;
 	}
 	}
@@ -811,7 +808,7 @@ AnyBinding& DescriptorSetState::getBindingToPopulate(U32 bindingIdx, U32 arrayId
 	{
 	{
 		// Need to grow
 		// Need to grow
 		const U32 newSize = max(extended.m_arraySize * 2, arrayIdx + 1);
 		const U32 newSize = max(extended.m_arraySize * 2, arrayIdx + 1);
-		AnyBinding* newArr = m_alloc.newArray<AnyBinding>(newSize);
+		AnyBinding* newArr = newArray<AnyBinding>(*m_pool, newSize);
 
 
 		if(extended.m_arraySize == 1)
 		if(extended.m_arraySize == 1)
 		{
 		{
@@ -983,14 +980,15 @@ DescriptorSetFactory::~DescriptorSetFactory()
 {
 {
 }
 }
 
 
-Error DescriptorSetFactory::init(const GrAllocator<U8>& alloc, VkDevice dev, U32 bindlessTextureCount,
+Error DescriptorSetFactory::init(HeapMemoryPool* pool, VkDevice dev, U32 bindlessTextureCount,
 								 U32 bindlessTextureBuffers)
 								 U32 bindlessTextureBuffers)
 {
 {
-	m_alloc = alloc;
+	ANKI_ASSERT(pool);
+	m_pool = pool;
 	m_dev = dev;
 	m_dev = dev;
 
 
-	m_bindless = m_alloc.newInstance<BindlessDescriptorSet>();
-	ANKI_CHECK(m_bindless->init(alloc, dev, bindlessTextureCount, bindlessTextureBuffers));
+	m_bindless = newInstance<BindlessDescriptorSet>(*m_pool);
+	ANKI_CHECK(m_bindless->init(pool, dev, bindlessTextureCount, bindlessTextureBuffers));
 	m_bindlessTextureCount = bindlessTextureCount;
 	m_bindlessTextureCount = bindlessTextureCount;
 	m_bindlessUniformTexelBufferCount = bindlessTextureBuffers;
 	m_bindlessUniformTexelBufferCount = bindlessTextureBuffers;
 
 
@@ -1003,25 +1001,25 @@ void DescriptorSetFactory::destroy()
 	{
 	{
 		for(DSAllocator* alloc : threadLocal->m_allocators)
 		for(DSAllocator* alloc : threadLocal->m_allocators)
 		{
 		{
-			m_alloc.deleteInstance(alloc);
+			deleteInstance(*m_pool, alloc);
 		}
 		}
 
 
-		threadLocal->m_allocators.destroy(m_alloc);
-		m_alloc.deleteInstance(threadLocal);
+		threadLocal->m_allocators.destroy(*m_pool);
+		deleteInstance(*m_pool, threadLocal);
 	}
 	}
 
 
-	m_allThreadLocals.destroy(m_alloc);
+	m_allThreadLocals.destroy(*m_pool);
 
 
 	for(DSLayoutCacheEntry* l : m_caches)
 	for(DSLayoutCacheEntry* l : m_caches)
 	{
 	{
-		m_alloc.deleteInstance(l);
+		deleteInstance(*m_pool, l);
 	}
 	}
 
 
-	m_caches.destroy(m_alloc);
+	m_caches.destroy(*m_pool);
 
 
 	if(m_bindless)
 	if(m_bindless)
 	{
 	{
-		m_alloc.deleteInstance(m_bindless);
+		deleteInstance(*m_pool, m_bindless);
 	}
 	}
 }
 }
 
 
@@ -1097,10 +1095,10 @@ Error DescriptorSetFactory::newDescriptorSetLayout(const DescriptorSetLayoutInit
 
 
 		if(cache == nullptr)
 		if(cache == nullptr)
 		{
 		{
-			cache = m_alloc.newInstance<DSLayoutCacheEntry>(this, m_caches.getSize());
+			cache = newInstance<DSLayoutCacheEntry>(*m_pool, this, m_caches.getSize());
 			ANKI_CHECK(cache->init(bindings.getBegin(), bindingCount, hash));
 			ANKI_CHECK(cache->init(bindings.getBegin(), bindingCount, hash));
 
 
-			m_caches.emplaceBack(m_alloc, cache);
+			m_caches.emplaceBack(*m_pool, cache);
 		}
 		}
 
 
 		// Set the layout
 		// Set the layout
@@ -1111,9 +1109,8 @@ Error DescriptorSetFactory::newDescriptorSetLayout(const DescriptorSetLayoutInit
 	return Error::kNone;
 	return Error::kNone;
 }
 }
 
 
-Error DescriptorSetFactory::newDescriptorSet(StackAllocator<U8>& tmpAlloc, DescriptorSetState& state,
-											 DescriptorSet& set, Bool& dirty,
-											 Array<PtrSize, kMaxBindingsPerDescriptorSet>& dynamicOffsets,
+Error DescriptorSetFactory::newDescriptorSet(StackMemoryPool& tmpPool, DescriptorSetState& state, DescriptorSet& set,
+											 Bool& dirty, Array<PtrSize, kMaxBindingsPerDescriptorSet>& dynamicOffsets,
 											 U32& dynamicOffsetCount)
 											 U32& dynamicOffsetCount)
 {
 {
 	ANKI_TRACE_SCOPED_EVENT(VK_DESCRIPTOR_SET_GET_OR_CREATE);
 	ANKI_TRACE_SCOPED_EVENT(VK_DESCRIPTOR_SET_GET_OR_CREATE);
@@ -1142,7 +1139,7 @@ Error DescriptorSetFactory::newDescriptorSet(StackAllocator<U8>& tmpAlloc, Descr
 
 
 			// Finally, allocate
 			// Finally, allocate
 			const DS* s;
 			const DS* s;
-			ANKI_CHECK(alloc->getOrCreateSet(hash, state.m_bindings, tmpAlloc, s));
+			ANKI_CHECK(alloc->getOrCreateSet(hash, state.m_bindings, tmpPool, s));
 			set.m_handle = s->m_handle;
 			set.m_handle = s->m_handle;
 			ANKI_ASSERT(set.m_handle != VK_NULL_HANDLE);
 			ANKI_ASSERT(set.m_handle != VK_NULL_HANDLE);
 		}
 		}

+ 6 - 6
AnKi/Gr/Vulkan/DescriptorSet.h

@@ -174,9 +174,9 @@ class DescriptorSetState
 	friend class DescriptorSetFactory;
 	friend class DescriptorSetFactory;
 
 
 public:
 public:
-	void init(StackAllocator<U8>& alloc)
+	void init(StackMemoryPool* pool)
 	{
 	{
-		m_alloc = alloc;
+		m_pool = pool;
 	}
 	}
 
 
 	void setLayout(const DescriptorSetLayout& layout)
 	void setLayout(const DescriptorSetLayout& layout)
@@ -326,7 +326,7 @@ public:
 	}
 	}
 
 
 private:
 private:
-	StackAllocator<U8> m_alloc;
+	StackMemoryPool* m_pool = nullptr;
 	DescriptorSetLayout m_layout;
 	DescriptorSetLayout m_layout;
 
 
 	Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet> m_bindings;
 	Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet> m_bindings;
@@ -361,7 +361,7 @@ public:
 	DescriptorSetFactory() = default;
 	DescriptorSetFactory() = default;
 	~DescriptorSetFactory();
 	~DescriptorSetFactory();
 
 
-	Error init(const GrAllocator<U8>& alloc, VkDevice dev, U32 bindlessTextureCount, U32 bindlessTextureBuffers);
+	Error init(HeapMemoryPool* pool, VkDevice dev, U32 bindlessTextureCount, U32 bindlessTextureBuffers);
 
 
 	void destroy();
 	void destroy();
 
 
@@ -369,7 +369,7 @@ public:
 	Error newDescriptorSetLayout(const DescriptorSetLayoutInitInfo& init, DescriptorSetLayout& layout);
 	Error newDescriptorSetLayout(const DescriptorSetLayoutInitInfo& init, DescriptorSetLayout& layout);
 
 
 	/// @note It's thread-safe.
 	/// @note It's thread-safe.
-	Error newDescriptorSet(StackAllocator<U8>& tmpAlloc, DescriptorSetState& state, DescriptorSet& set, Bool& dirty,
+	Error newDescriptorSet(StackMemoryPool& tmpPool, DescriptorSetState& state, DescriptorSet& set, Bool& dirty,
 						   Array<PtrSize, kMaxBindingsPerDescriptorSet>& dynamicOffsets, U32& dynamicOffsetCount);
 						   Array<PtrSize, kMaxBindingsPerDescriptorSet>& dynamicOffsets, U32& dynamicOffsetCount);
 
 
 	void endFrame()
 	void endFrame()
@@ -400,7 +400,7 @@ private:
 	DynamicArray<ThreadLocal*> m_allThreadLocals;
 	DynamicArray<ThreadLocal*> m_allThreadLocals;
 	Mutex m_allThreadLocalsMtx;
 	Mutex m_allThreadLocalsMtx;
 
 
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_pool = nullptr;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	U64 m_frameCount = 0;
 	U64 m_frameCount = 0;
 
 

+ 1 - 1
AnKi/Gr/Vulkan/Fence.cpp

@@ -11,7 +11,7 @@ namespace anki {
 
 
 Fence* Fence::newInstance(GrManager* manager)
 Fence* Fence::newInstance(GrManager* manager)
 {
 {
-	return manager->getAllocator().newInstance<FenceImpl>(manager, "N/A");
+	return anki::newInstance<FenceImpl>(manager->getMemoryPool(), manager, "N/A");
 }
 }
 
 
 Bool Fence::clientWait(Second seconds)
 Bool Fence::clientWait(Second seconds)

+ 5 - 5
AnKi/Gr/Vulkan/FenceFactory.cpp

@@ -11,10 +11,10 @@ void FenceFactory::destroy()
 {
 {
 	for(MicroFence* fence : m_fences)
 	for(MicroFence* fence : m_fences)
 	{
 	{
-		m_alloc.deleteInstance(fence);
+		deleteInstance(*m_pool, fence);
 	}
 	}
 
 
-	m_fences.destroy(m_alloc);
+	m_fences.destroy(*m_pool);
 }
 }
 
 
 MicroFence* FenceFactory::newFence()
 MicroFence* FenceFactory::newFence()
@@ -33,7 +33,7 @@ MicroFence* FenceFactory::newFence()
 				out = m_fences[i];
 				out = m_fences[i];
 
 
 				// Pop it
 				// Pop it
-				m_fences.erase(m_alloc, m_fences.getBegin() + i);
+				m_fences.erase(*m_pool, m_fences.getBegin() + i);
 				break;
 				break;
 			}
 			}
 			else if(status != VK_NOT_READY)
 			else if(status != VK_NOT_READY)
@@ -62,7 +62,7 @@ MicroFence* FenceFactory::newFence()
 	if(out == nullptr)
 	if(out == nullptr)
 	{
 	{
 		// Create a new one
 		// Create a new one
-		out = m_alloc.newInstance<MicroFence>(this);
+		out = anki::newInstance<MicroFence>(*m_pool, this);
 	}
 	}
 	else
 	else
 	{
 	{
@@ -79,7 +79,7 @@ void FenceFactory::deleteFence(MicroFence* fence)
 	ANKI_ASSERT(fence);
 	ANKI_ASSERT(fence);
 
 
 	LockGuard<Mutex> lock(m_mtx);
 	LockGuard<Mutex> lock(m_mtx);
-	m_fences.emplaceBack(m_alloc, fence);
+	m_fences.emplaceBack(*m_pool, fence);
 }
 }
 
 
 } // end namespace anki
 } // end namespace anki

+ 5 - 5
AnKi/Gr/Vulkan/FenceFactory.h

@@ -46,7 +46,7 @@ public:
 		return m_refcount.fetchSub(1);
 		return m_refcount.fetchSub(1);
 	}
 	}
 
 
-	GrAllocator<U8> getAllocator() const;
+	HeapMemoryPool& getMemoryPool();
 
 
 	void wait()
 	void wait()
 	{
 	{
@@ -95,10 +95,10 @@ public:
 	{
 	{
 	}
 	}
 
 
-	void init(GrAllocator<U8> alloc, VkDevice dev)
+	void init(HeapMemoryPool* pool, VkDevice dev)
 	{
 	{
-		ANKI_ASSERT(dev);
-		m_alloc = std::move(alloc);
+		ANKI_ASSERT(pool && dev);
+		m_pool = pool;
 		m_dev = dev;
 		m_dev = dev;
 	}
 	}
 
 
@@ -111,7 +111,7 @@ public:
 	}
 	}
 
 
 private:
 private:
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_pool = nullptr;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	DynamicArray<MicroFence*> m_fences;
 	DynamicArray<MicroFence*> m_fences;
 	U32 m_aliveFenceCount = 0;
 	U32 m_aliveFenceCount = 0;

+ 2 - 2
AnKi/Gr/Vulkan/FenceFactory.inl.h

@@ -28,9 +28,9 @@ inline MicroFence::~MicroFence()
 	}
 	}
 }
 }
 
 
-inline GrAllocator<U8> MicroFence::getAllocator() const
+inline HeapMemoryPool& MicroFence::getMemoryPool()
 {
 {
-	return m_factory->m_alloc;
+	return *m_factory->m_pool;
 }
 }
 
 
 inline Bool MicroFence::done() const
 inline Bool MicroFence::done() const

+ 8 - 8
AnKi/Gr/Vulkan/FrameGarbageCollector.cpp

@@ -22,7 +22,7 @@ void FrameGarbageCollector::collectGarbage()
 	}
 	}
 
 
 	const VkDevice dev = m_gr->getDevice();
 	const VkDevice dev = m_gr->getDevice();
-	GrAllocator<U8> alloc = m_gr->getAllocator();
+	HeapMemoryPool& pool = m_gr->getMemoryPool();
 
 
 	IntrusiveList<FrameGarbage> newFrames;
 	IntrusiveList<FrameGarbage> newFrames;
 	while(!m_frames.isEmpty())
 	while(!m_frames.isEmpty())
@@ -47,13 +47,13 @@ void FrameGarbageCollector::collectGarbage()
 			{
 			{
 				vkDestroyImageView(dev, viewHandle, nullptr);
 				vkDestroyImageView(dev, viewHandle, nullptr);
 			}
 			}
-			textureGarbage->m_viewHandles.destroy(alloc);
+			textureGarbage->m_viewHandles.destroy(pool);
 
 
 			for(U32 bindlessIndex : textureGarbage->m_bindlessIndices)
 			for(U32 bindlessIndex : textureGarbage->m_bindlessIndices)
 			{
 			{
 				m_gr->getDescriptorSetFactory().unbindBindlessTexture(bindlessIndex);
 				m_gr->getDescriptorSetFactory().unbindBindlessTexture(bindlessIndex);
 			}
 			}
-			textureGarbage->m_bindlessIndices.destroy(alloc);
+			textureGarbage->m_bindlessIndices.destroy(pool);
 
 
 			if(textureGarbage->m_imageHandle)
 			if(textureGarbage->m_imageHandle)
 			{
 			{
@@ -65,7 +65,7 @@ void FrameGarbageCollector::collectGarbage()
 				m_gr->getGpuMemoryManager().freeMemory(textureGarbage->m_memoryHandle);
 				m_gr->getGpuMemoryManager().freeMemory(textureGarbage->m_memoryHandle);
 			}
 			}
 
 
-			alloc.deleteInstance(textureGarbage);
+			deleteInstance(pool, textureGarbage);
 		}
 		}
 
 
 		// Dispose buffer garbage
 		// Dispose buffer garbage
@@ -77,7 +77,7 @@ void FrameGarbageCollector::collectGarbage()
 			{
 			{
 				vkDestroyBufferView(dev, view, nullptr);
 				vkDestroyBufferView(dev, view, nullptr);
 			}
 			}
-			bufferGarbage->m_viewHandles.destroy(alloc);
+			bufferGarbage->m_viewHandles.destroy(pool);
 
 
 			if(bufferGarbage->m_bufferHandle)
 			if(bufferGarbage->m_bufferHandle)
 			{
 			{
@@ -89,10 +89,10 @@ void FrameGarbageCollector::collectGarbage()
 				m_gr->getGpuMemoryManager().freeMemory(bufferGarbage->m_memoryHandle);
 				m_gr->getGpuMemoryManager().freeMemory(bufferGarbage->m_memoryHandle);
 			}
 			}
 
 
-			alloc.deleteInstance(bufferGarbage);
+			deleteInstance(pool, bufferGarbage);
 		}
 		}
 
 
-		alloc.deleteInstance(&frame);
+		deleteInstance(pool, &frame);
 	}
 	}
 
 
 	m_frames = std::move(newFrames);
 	m_frames = std::move(newFrames);
@@ -106,7 +106,7 @@ FrameGarbageCollector::FrameGarbage& FrameGarbageCollector::getFrame()
 	}
 	}
 	else
 	else
 	{
 	{
-		FrameGarbage* newGarbage = m_gr->getAllocator().newInstance<FrameGarbage>();
+		FrameGarbage* newGarbage = newInstance<FrameGarbage>(m_gr->getMemoryPool());
 		m_frames.pushBack(newGarbage);
 		m_frames.pushBack(newGarbage);
 	}
 	}
 
 

+ 2 - 2
AnKi/Gr/Vulkan/Framebuffer.cpp

@@ -11,11 +11,11 @@ namespace anki {
 
 
 Framebuffer* Framebuffer::newInstance(GrManager* manager, const FramebufferInitInfo& init)
 Framebuffer* Framebuffer::newInstance(GrManager* manager, const FramebufferInitInfo& init)
 {
 {
-	FramebufferImpl* impl = manager->getAllocator().newInstance<FramebufferImpl>(manager, init.getName());
+	FramebufferImpl* impl = anki::newInstance<FramebufferImpl>(manager->getMemoryPool(), manager, init.getName());
 	const Error err = impl->init(init);
 	const Error err = impl->init(init);
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 2 - 2
AnKi/Gr/Vulkan/FramebufferImpl.cpp

@@ -24,7 +24,7 @@ FramebufferImpl::~FramebufferImpl()
 		vkDestroyRenderPass(getDevice(), rpass, nullptr);
 		vkDestroyRenderPass(getDevice(), rpass, nullptr);
 	}
 	}
 
 
-	m_renderpassHandles.destroy(getAllocator());
+	m_renderpassHandles.destroy(getMemoryPool());
 
 
 	if(m_compatibleRenderpassHandle)
 	if(m_compatibleRenderpassHandle)
 	{
 	{
@@ -376,7 +376,7 @@ VkRenderPass FramebufferImpl::getRenderPassHandle(const Array<VkImageLayout, kMa
 			ANKI_VK_CHECKF(vkCreateRenderPass2KHR(getDevice(), &ci, nullptr, &out));
 			ANKI_VK_CHECKF(vkCreateRenderPass2KHR(getDevice(), &ci, nullptr, &out));
 			getGrManagerImpl().trySetVulkanHandleName(getName(), VK_OBJECT_TYPE_RENDER_PASS, out);
 			getGrManagerImpl().trySetVulkanHandleName(getName(), VK_OBJECT_TYPE_RENDER_PASS, out);
 
 
-			m_renderpassHandles.emplace(getAllocator(), hash, out);
+			m_renderpassHandles.emplace(getMemoryPool(), hash, out);
 		}
 		}
 	}
 	}
 
 

+ 20 - 20
AnKi/Gr/Vulkan/GpuMemoryManager.cpp

@@ -7,16 +7,16 @@
 
 
 namespace anki {
 namespace anki {
 
 
-static constexpr Array<GpuMemoryManagerClassInfo, 7> CLASSES{{{4_KB, 256_KB},
-															  {128_KB, 8_MB},
-															  {1_MB, 64_MB},
-															  {16_MB, 128_MB},
-															  {64_MB, 128_MB},
-															  {128_MB, 128_MB},
-															  {256_MB, 256_MB}}};
+static constexpr Array<GpuMemoryManagerClassInfo, 7> kClasses{{{4_KB, 256_KB},
+															   {128_KB, 8_MB},
+															   {1_MB, 64_MB},
+															   {16_MB, 128_MB},
+															   {64_MB, 128_MB},
+															   {128_MB, 128_MB},
+															   {256_MB, 256_MB}}};
 
 
 /// Special classes for the ReBAR memory. Have that as a special case because it's so limited and needs special care.
 /// Special classes for the ReBAR memory. Have that as a special case because it's so limited and needs special care.
-static constexpr Array<GpuMemoryManagerClassInfo, 3> REBAR_CLASSES{{{1_MB, 1_MB}, {12_MB, 12_MB}, {24_MB, 24_MB}}};
+static constexpr Array<GpuMemoryManagerClassInfo, 3> kRebarClasses{{{1_MB, 1_MB}, {12_MB, 12_MB}, {24_MB, 24_MB}}};
 
 
 Error GpuMemoryManagerInterface::allocateChunk(U32 classIdx, GpuMemoryManagerChunk*& chunk)
 Error GpuMemoryManagerInterface::allocateChunk(U32 classIdx, GpuMemoryManagerChunk*& chunk)
 {
 {
@@ -40,7 +40,7 @@ Error GpuMemoryManagerInterface::allocateChunk(U32 classIdx, GpuMemoryManagerChu
 					 m_classInfos[classIdx].m_suballocationSize);
 					 m_classInfos[classIdx].m_suballocationSize);
 	}
 	}
 
 
-	chunk = m_parent->m_alloc.newInstance<GpuMemoryManagerChunk>();
+	chunk = newInstance<GpuMemoryManagerChunk>(*m_parent->m_pool);
 	chunk->m_handle = memHandle;
 	chunk->m_handle = memHandle;
 	chunk->m_size = m_classInfos[classIdx].m_chunkSize;
 	chunk->m_size = m_classInfos[classIdx].m_chunkSize;
 
 
@@ -64,7 +64,7 @@ void GpuMemoryManagerInterface::freeChunk(GpuMemoryManagerChunk* chunk)
 	ANKI_ASSERT(m_allocatedMemory >= chunk->m_size);
 	ANKI_ASSERT(m_allocatedMemory >= chunk->m_size);
 	m_allocatedMemory -= chunk->m_size;
 	m_allocatedMemory -= chunk->m_size;
 
 
-	m_parent->m_alloc.deleteInstance(chunk);
+	deleteInstance(*m_parent->m_pool, chunk);
 }
 }
 
 
 GpuMemoryManager::~GpuMemoryManager()
 GpuMemoryManager::~GpuMemoryManager()
@@ -74,17 +74,17 @@ GpuMemoryManager::~GpuMemoryManager()
 void GpuMemoryManager::destroy()
 void GpuMemoryManager::destroy()
 {
 {
 	ANKI_VK_LOGV("Destroying memory manager");
 	ANKI_VK_LOGV("Destroying memory manager");
-	m_callocs.destroy(m_alloc);
+	m_callocs.destroy(*m_pool);
 }
 }
 
 
-void GpuMemoryManager::init(VkPhysicalDevice pdev, VkDevice dev, GrAllocator<U8> alloc, Bool exposeBufferGpuAddress)
+void GpuMemoryManager::init(VkPhysicalDevice pdev, VkDevice dev, HeapMemoryPool* pool, Bool exposeBufferGpuAddress)
 {
 {
-	ANKI_ASSERT(pdev);
+	ANKI_ASSERT(pool && pdev);
 	ANKI_ASSERT(dev);
 	ANKI_ASSERT(dev);
 
 
 	// Print some info
 	// Print some info
 	ANKI_VK_LOGV("Initializing memory manager");
 	ANKI_VK_LOGV("Initializing memory manager");
-	for(const GpuMemoryManagerClassInfo& c : CLASSES)
+	for(const GpuMemoryManagerClassInfo& c : kClasses)
 	{
 	{
 		ANKI_VK_LOGV("\tGPU mem class. Chunk size: %lu, suballocationSize: %lu, allocsPerChunk %lu", c.m_chunkSize,
 		ANKI_VK_LOGV("\tGPU mem class. Chunk size: %lu, suballocationSize: %lu, allocsPerChunk %lu", c.m_chunkSize,
 					 c.m_suballocationSize, c.m_chunkSize / c.m_suballocationSize);
 					 c.m_suballocationSize, c.m_chunkSize / c.m_suballocationSize);
@@ -104,7 +104,7 @@ void GpuMemoryManager::init(VkPhysicalDevice pdev, VkDevice dev, GrAllocator<U8>
 				m_bufferImageGranularity);
 				m_bufferImageGranularity);
 		}
 		}
 
 
-		for(const GpuMemoryManagerClassInfo& c : CLASSES)
+		for(const GpuMemoryManagerClassInfo& c : kClasses)
 		{
 		{
 			if(!isAligned(m_bufferImageGranularity, c.m_suballocationSize))
 			if(!isAligned(m_bufferImageGranularity, c.m_suballocationSize))
 			{
 			{
@@ -118,10 +118,10 @@ void GpuMemoryManager::init(VkPhysicalDevice pdev, VkDevice dev, GrAllocator<U8>
 
 
 	vkGetPhysicalDeviceMemoryProperties(pdev, &m_memoryProperties);
 	vkGetPhysicalDeviceMemoryProperties(pdev, &m_memoryProperties);
 
 
-	m_alloc = alloc;
+	m_pool = pool;
 	m_dev = dev;
 	m_dev = dev;
 
 
-	m_callocs.create(alloc, m_memoryProperties.memoryTypeCount);
+	m_callocs.create(*pool, m_memoryProperties.memoryTypeCount);
 	for(U32 memTypeIdx = 0; memTypeIdx < m_callocs.getSize(); ++memTypeIdx)
 	for(U32 memTypeIdx = 0; memTypeIdx < m_callocs.getSize(); ++memTypeIdx)
 	{
 	{
 		GpuMemoryManagerInterface& iface = m_callocs[memTypeIdx].getInterface();
 		GpuMemoryManagerInterface& iface = m_callocs[memTypeIdx].getInterface();
@@ -149,15 +149,15 @@ void GpuMemoryManager::init(VkPhysicalDevice pdev, VkDevice dev, GrAllocator<U8>
 		// Choose different classes
 		// Choose different classes
 		if(!isReBar)
 		if(!isReBar)
 		{
 		{
-			iface.m_classInfos = CLASSES;
+			iface.m_classInfos = kClasses;
 		}
 		}
 		else
 		else
 		{
 		{
-			iface.m_classInfos = REBAR_CLASSES;
+			iface.m_classInfos = kRebarClasses;
 		}
 		}
 
 
 		// The interface is initialized, init the builder
 		// The interface is initialized, init the builder
-		m_callocs[memTypeIdx].init(m_alloc);
+		m_callocs[memTypeIdx].init(m_pool);
 	}
 	}
 }
 }
 
 

+ 3 - 3
AnKi/Gr/Vulkan/GpuMemoryManager.h

@@ -130,7 +130,7 @@ public:
 
 
 	GpuMemoryManager& operator=(const GpuMemoryManager&) = delete; // Non-copyable
 	GpuMemoryManager& operator=(const GpuMemoryManager&) = delete; // Non-copyable
 
 
-	void init(VkPhysicalDevice pdev, VkDevice dev, GrAllocator<U8> alloc, Bool exposeBufferGpuAddress);
+	void init(VkPhysicalDevice pdev, VkDevice dev, HeapMemoryPool* pool, Bool exposeBufferGpuAddress);
 
 
 	void destroy();
 	void destroy();
 
 
@@ -155,13 +155,13 @@ public:
 private:
 private:
 	using ClassAllocator = ClassAllocatorBuilder<GpuMemoryManagerChunk, GpuMemoryManagerInterface, Mutex>;
 	using ClassAllocator = ClassAllocatorBuilder<GpuMemoryManagerChunk, GpuMemoryManagerInterface, Mutex>;
 
 
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_pool = nullptr;
 
 
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkDevice m_dev = VK_NULL_HANDLE;
 
 
 	DynamicArray<ClassAllocator> m_callocs;
 	DynamicArray<ClassAllocator> m_callocs;
 
 
-	VkPhysicalDeviceMemoryProperties m_memoryProperties;
+	VkPhysicalDeviceMemoryProperties m_memoryProperties = {};
 	U32 m_bufferImageGranularity = 0;
 	U32 m_bufferImageGranularity = 0;
 
 
 	// Dedicated allocation stats
 	// Dedicated allocation stats

+ 11 - 9
AnKi/Gr/Vulkan/GrManager.cpp

@@ -29,23 +29,24 @@ GrManager::GrManager()
 GrManager::~GrManager()
 GrManager::~GrManager()
 {
 {
 	// Destroy in reverse order
 	// Destroy in reverse order
-	m_cacheDir.destroy(m_alloc);
+	m_cacheDir.destroy(m_pool);
 }
 }
 
 
 Error GrManager::newInstance(GrManagerInitInfo& init, GrManager*& gr)
 Error GrManager::newInstance(GrManagerInitInfo& init, GrManager*& gr)
 {
 {
-	auto alloc = HeapAllocator<U8>(init.m_allocCallback, init.m_allocCallbackUserData, "Gr");
-
-	GrManagerImpl* impl = alloc.newInstance<GrManagerImpl>();
+	GrManagerImpl* impl = static_cast<GrManagerImpl*>(
+		init.m_allocCallback(init.m_allocCallbackUserData, nullptr, sizeof(GrManagerImpl), alignof(GrManagerImpl)));
+	callConstructor(*impl);
 
 
 	// Init
 	// Init
-	impl->m_alloc = alloc;
-	impl->m_cacheDir.create(alloc, init.m_cacheDirectory);
+	impl->m_pool.init(init.m_allocCallback, init.m_allocCallbackUserData);
+	impl->m_cacheDir.create(impl->m_pool, init.m_cacheDirectory);
 	Error err = impl->init(init);
 	Error err = impl->init(init);
 
 
 	if(err)
 	if(err)
 	{
 	{
-		alloc.deleteInstance(impl);
+		callDestructor(*impl);
+		init.m_allocCallback(init.m_allocCallbackUserData, impl, 0, 0);
 		gr = nullptr;
 		gr = nullptr;
 	}
 	}
 	else
 	else
@@ -63,9 +64,10 @@ void GrManager::deleteInstance(GrManager* gr)
 		return;
 		return;
 	}
 	}
 
 
-	auto alloc = gr->m_alloc;
+	AllocAlignedCallback callback = gr->m_pool.getAllocationCallback();
+	void* userData = gr->m_pool.getAllocationCallbackUserData();
 	gr->~GrManager();
 	gr->~GrManager();
-	alloc.deallocate(gr, 1);
+	callback(userData, gr, 0, 0);
 }
 }
 
 
 TexturePtr GrManager::acquireNextPresentableTexture()
 TexturePtr GrManager::acquireNextPresentableTexture()

+ 30 - 31
AnKi/Gr/Vulkan/GrManagerImpl.cpp

@@ -66,7 +66,7 @@ GrManagerImpl::~GrManagerImpl()
 	m_pplineLayoutFactory.destroy();
 	m_pplineLayoutFactory.destroy();
 	m_descrFactory.destroy();
 	m_descrFactory.destroy();
 
 
-	m_pplineCache.destroy(m_device, m_physicalDevice, getAllocator());
+	m_pplineCache.destroy(m_device, m_physicalDevice, m_pool);
 
 
 	m_fenceFactory.destroy();
 	m_fenceFactory.destroy();
 
 
@@ -142,23 +142,23 @@ Error GrManagerImpl::initInternal(const GrManagerInitInfo& init)
 
 
 	m_crntSwapchain = m_swapchainFactory.newInstance();
 	m_crntSwapchain = m_swapchainFactory.newInstance();
 
 
-	ANKI_CHECK(m_pplineCache.init(m_device, m_physicalDevice, init.m_cacheDirectory, *m_config, getAllocator()));
+	ANKI_CHECK(m_pplineCache.init(m_device, m_physicalDevice, init.m_cacheDirectory, *m_config, m_pool));
 
 
 	ANKI_CHECK(initMemory());
 	ANKI_CHECK(initMemory());
 
 
-	ANKI_CHECK(m_cmdbFactory.init(getAllocator(), m_device, m_queueFamilyIndices));
+	ANKI_CHECK(m_cmdbFactory.init(&m_pool, m_device, m_queueFamilyIndices));
 
 
 	for(PerFrame& f : m_perFrame)
 	for(PerFrame& f : m_perFrame)
 	{
 	{
 		resetFrame(f);
 		resetFrame(f);
 	}
 	}
 
 
-	m_fenceFactory.init(getAllocator(), m_device);
-	m_semaphoreFactory.init(getAllocator(), m_device);
+	m_fenceFactory.init(&m_pool, m_device);
+	m_semaphoreFactory.init(&m_pool, m_device);
 	m_samplerFactory.init(this);
 	m_samplerFactory.init(this);
-	m_barrierFactory.init(getAllocator(), m_device);
-	m_occlusionQueryFactory.init(getAllocator(), m_device, VK_QUERY_TYPE_OCCLUSION);
-	m_timestampQueryFactory.init(getAllocator(), m_device, VK_QUERY_TYPE_TIMESTAMP);
+	m_barrierFactory.init(&m_pool, m_device);
+	m_occlusionQueryFactory.init(&m_pool, m_device, VK_QUERY_TYPE_OCCLUSION);
+	m_timestampQueryFactory.init(&m_pool, m_device, VK_QUERY_TYPE_TIMESTAMP);
 
 
 	// See if unaligned formats are supported
 	// See if unaligned formats are supported
 	{
 	{
@@ -195,8 +195,8 @@ Error GrManagerImpl::initInternal(const GrManagerInitInfo& init)
 		}
 		}
 	}
 	}
 
 
-	ANKI_CHECK(m_descrFactory.init(getAllocator(), m_device, kMaxBindlessTextures, kMaxBindlessReadonlyTextureBuffers));
-	m_pplineLayoutFactory.init(getAllocator(), m_device);
+	ANKI_CHECK(m_descrFactory.init(&m_pool, m_device, kMaxBindlessTextures, kMaxBindlessReadonlyTextureBuffers));
+	m_pplineLayoutFactory.init(&m_pool, m_device);
 
 
 	m_frameGarbageCollector.init(this);
 	m_frameGarbageCollector.init(this);
 
 
@@ -227,14 +227,14 @@ Error GrManagerImpl::initInstance()
 	ci.pApplicationInfo = &app;
 	ci.pApplicationInfo = &app;
 
 
 	// Instance layers
 	// Instance layers
-	DynamicArrayRaii<const char*> layersToEnable(getAllocator());
+	DynamicArrayRaii<const char*> layersToEnable(&m_pool);
 	{
 	{
 		U32 layerCount;
 		U32 layerCount;
 		vkEnumerateInstanceLayerProperties(&layerCount, nullptr);
 		vkEnumerateInstanceLayerProperties(&layerCount, nullptr);
 
 
 		if(layerCount)
 		if(layerCount)
 		{
 		{
-			DynamicArrayRaii<VkLayerProperties> layerProps(getAllocator(), layerCount);
+			DynamicArrayRaii<VkLayerProperties> layerProps(&m_pool, layerCount);
 			vkEnumerateInstanceLayerProperties(&layerCount, &layerProps[0]);
 			vkEnumerateInstanceLayerProperties(&layerCount, &layerProps[0]);
 
 
 			ANKI_VK_LOGV("Found the following instance layers:");
 			ANKI_VK_LOGV("Found the following instance layers:");
@@ -265,8 +265,8 @@ Error GrManagerImpl::initInstance()
 	}
 	}
 
 
 	// Validation features
 	// Validation features
-	DynamicArrayRaii<VkValidationFeatureEnableEXT> enabledValidationFeatures(getAllocator());
-	DynamicArrayRaii<VkValidationFeatureDisableEXT> disabledValidationFeatures(getAllocator());
+	DynamicArrayRaii<VkValidationFeatureEnableEXT> enabledValidationFeatures(&m_pool);
+	DynamicArrayRaii<VkValidationFeatureDisableEXT> disabledValidationFeatures(&m_pool);
 	if(m_config->getGrDebugPrintf())
 	if(m_config->getGrDebugPrintf())
 	{
 	{
 		enabledValidationFeatures.emplaceBack(VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT);
 		enabledValidationFeatures.emplaceBack(VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT);
@@ -292,8 +292,8 @@ Error GrManagerImpl::initInstance()
 	}
 	}
 
 
 	// Extensions
 	// Extensions
-	DynamicArrayRaii<const char*> instExtensions(getAllocator());
-	DynamicArrayRaii<VkExtensionProperties> instExtensionInf(getAllocator());
+	DynamicArrayRaii<const char*> instExtensions(&m_pool);
+	DynamicArrayRaii<VkExtensionProperties> instExtensionInf(&m_pool);
 	U32 extCount = 0;
 	U32 extCount = 0;
 	vkEnumerateInstanceExtensionProperties(nullptr, &extCount, nullptr);
 	vkEnumerateInstanceExtensionProperties(nullptr, &extCount, nullptr);
 	if(extCount)
 	if(extCount)
@@ -429,7 +429,7 @@ Error GrManagerImpl::initInstance()
 
 
 	// Find the correct physical device
 	// Find the correct physical device
 	{
 	{
-		DynamicArrayRaii<VkPhysicalDevice> physicalDevices(m_alloc, count);
+		DynamicArrayRaii<VkPhysicalDevice> physicalDevices(&m_pool, count);
 		ANKI_VK_CHECK(vkEnumeratePhysicalDevices(m_instance, &count, &physicalDevices[0]));
 		ANKI_VK_CHECK(vkEnumeratePhysicalDevices(m_instance, &count, &physicalDevices[0]));
 
 
 		VkPhysicalDevice firstChoice = VK_NULL_HANDLE;
 		VkPhysicalDevice firstChoice = VK_NULL_HANDLE;
@@ -554,7 +554,7 @@ Error GrManagerImpl::initDevice(const GrManagerInitInfo& init)
 	vkGetPhysicalDeviceQueueFamilyProperties(m_physicalDevice, &count, nullptr);
 	vkGetPhysicalDeviceQueueFamilyProperties(m_physicalDevice, &count, nullptr);
 	ANKI_VK_LOGI("Number of queue families: %u", count);
 	ANKI_VK_LOGI("Number of queue families: %u", count);
 
 
-	DynamicArrayRaii<VkQueueFamilyProperties> queueInfos(getAllocator());
+	DynamicArrayRaii<VkQueueFamilyProperties> queueInfos(&m_pool);
 	queueInfos.create(count);
 	queueInfos.create(count);
 	vkGetPhysicalDeviceQueueFamilyProperties(m_physicalDevice, &count, &queueInfos[0]);
 	vkGetPhysicalDeviceQueueFamilyProperties(m_physicalDevice, &count, &queueInfos[0]);
 
 
@@ -624,8 +624,8 @@ Error GrManagerImpl::initDevice(const GrManagerInitInfo& init)
 	U32 extCount = 0;
 	U32 extCount = 0;
 	vkEnumerateDeviceExtensionProperties(m_physicalDevice, nullptr, &extCount, nullptr);
 	vkEnumerateDeviceExtensionProperties(m_physicalDevice, nullptr, &extCount, nullptr);
 
 
-	DynamicArrayRaii<VkExtensionProperties> extensionInfos(getAllocator()); // Keep it alive in the stack
-	DynamicArrayRaii<const char*> extensionsToEnable(getAllocator());
+	DynamicArrayRaii<VkExtensionProperties> extensionInfos(&m_pool); // Keep it alive in the stack
+	DynamicArrayRaii<const char*> extensionsToEnable(&m_pool);
 	if(extCount)
 	if(extCount)
 	{
 	{
 		extensionInfos.create(extCount);
 		extensionInfos.create(extCount);
@@ -1141,7 +1141,7 @@ Error GrManagerImpl::initMemory()
 					 ANKI_FORMAT_U32(m_memoryProperties.memoryTypes[i].propertyFlags));
 					 ANKI_FORMAT_U32(m_memoryProperties.memoryTypes[i].propertyFlags));
 	}
 	}
 
 
-	m_gpuMemManager.init(m_physicalDevice, m_device, getAllocator(),
+	m_gpuMemManager.init(m_physicalDevice, m_device, &m_pool,
 						 !!(m_extensions & VulkanExtensions::kKHR_buffer_device_address));
 						 !!(m_extensions & VulkanExtensions::kKHR_buffer_device_address));
 
 
 	return Error::kNone;
 	return Error::kNone;
@@ -1482,7 +1482,7 @@ VkBool32 GrManagerImpl::debugReportCallbackEXT(VkDebugUtilsMessageSeverityFlagBi
 
 
 	// Get all names of affected objects
 	// Get all names of affected objects
 	GrManagerImpl* self = static_cast<GrManagerImpl*>(pUserData);
 	GrManagerImpl* self = static_cast<GrManagerImpl*>(pUserData);
-	StringRaii objectNames(self->m_alloc);
+	StringRaii objectNames(&self->m_pool);
 	if(pCallbackData->objectCount)
 	if(pCallbackData->objectCount)
 	{
 	{
 		for(U32 i = 0; i < pCallbackData->objectCount; ++i)
 		for(U32 i = 0; i < pCallbackData->objectCount; ++i)
@@ -1537,7 +1537,7 @@ Error GrManagerImpl::printPipelineShaderInfoInternal(VkPipeline ppline, CString
 		if(!m_shaderStatsFile.isOpen())
 		if(!m_shaderStatsFile.isOpen())
 		{
 		{
 			ANKI_CHECK(m_shaderStatsFile.open(
 			ANKI_CHECK(m_shaderStatsFile.open(
-				StringRaii(getAllocator()).sprintf("%s/../ppline_stats.csv", m_cacheDir.cstr()).toCString(),
+				StringRaii(&m_pool).sprintf("%s/../ppline_stats.csv", m_cacheDir.cstr()).toCString(),
 				FileOpenFlag::kWrite));
 				FileOpenFlag::kWrite));
 
 
 			ANKI_CHECK(m_shaderStatsFile.writeText("ppline name,hash,"
 			ANKI_CHECK(m_shaderStatsFile.writeText("ppline name,hash,"
@@ -1551,7 +1551,7 @@ Error GrManagerImpl::printPipelineShaderInfoInternal(VkPipeline ppline, CString
 
 
 		ANKI_CHECK(m_shaderStatsFile.writeTextf("%s,0x%" PRIx64 ",", name.cstr(), hash));
 		ANKI_CHECK(m_shaderStatsFile.writeTextf("%s,0x%" PRIx64 ",", name.cstr(), hash));
 
 
-		StringRaii str(getAllocator());
+		StringRaii str(&m_pool);
 
 
 		for(ShaderType type = ShaderType::kFirst; type < ShaderType::kCount; ++type)
 		for(ShaderType type = ShaderType::kFirst; type < ShaderType::kCount; ++type)
 		{
 		{
@@ -1566,9 +1566,8 @@ Error GrManagerImpl::printPipelineShaderInfoInternal(VkPipeline ppline, CString
 			ANKI_VK_CHECK(m_pfnGetShaderInfoAMD(m_device, ppline, VkShaderStageFlagBits(convertShaderTypeBit(stage)),
 			ANKI_VK_CHECK(m_pfnGetShaderInfoAMD(m_device, ppline, VkShaderStageFlagBits(convertShaderTypeBit(stage)),
 												VK_SHADER_INFO_TYPE_STATISTICS_AMD, &size, &stats));
 												VK_SHADER_INFO_TYPE_STATISTICS_AMD, &size, &stats));
 
 
-			str.append(StringRaii(getAllocator())
-						   .sprintf("Stage %u: VGRPS %02u, SGRPS %02u ", U32(type), stats.resourceUsage.numUsedVgprs,
-									stats.resourceUsage.numUsedSgprs));
+			str.append(StringRaii(&m_pool).sprintf("Stage %u: VGRPS %02u, SGRPS %02u ", U32(type),
+												   stats.resourceUsage.numUsedVgprs, stats.resourceUsage.numUsedSgprs));
 
 
 			ANKI_CHECK(m_shaderStatsFile.writeTextf((type != ShaderType::kLast) ? "%u,%u," : "%u,%u\n",
 			ANKI_CHECK(m_shaderStatsFile.writeTextf((type != ShaderType::kLast) ? "%u,%u," : "%u,%u\n",
 													stats.resourceUsage.numUsedVgprs,
 													stats.resourceUsage.numUsedVgprs,
@@ -1583,14 +1582,14 @@ Error GrManagerImpl::printPipelineShaderInfoInternal(VkPipeline ppline, CString
 
 
 	if(!!(m_extensions & VulkanExtensions::kKHR_pipeline_executable_properties))
 	if(!!(m_extensions & VulkanExtensions::kKHR_pipeline_executable_properties))
 	{
 	{
-		StringListRaii log(m_alloc);
+		StringListRaii log(&m_pool);
 
 
 		VkPipelineInfoKHR pplineInf = {};
 		VkPipelineInfoKHR pplineInf = {};
 		pplineInf.sType = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR;
 		pplineInf.sType = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR;
 		pplineInf.pipeline = ppline;
 		pplineInf.pipeline = ppline;
 		U32 executableCount = 0;
 		U32 executableCount = 0;
 		ANKI_VK_CHECK(vkGetPipelineExecutablePropertiesKHR(m_device, &pplineInf, &executableCount, nullptr));
 		ANKI_VK_CHECK(vkGetPipelineExecutablePropertiesKHR(m_device, &pplineInf, &executableCount, nullptr));
-		DynamicArrayRaii<VkPipelineExecutablePropertiesKHR> executableProps(m_alloc, executableCount);
+		DynamicArrayRaii<VkPipelineExecutablePropertiesKHR> executableProps(&m_pool, executableCount);
 		for(VkPipelineExecutablePropertiesKHR& prop : executableProps)
 		for(VkPipelineExecutablePropertiesKHR& prop : executableProps)
 		{
 		{
 			prop = {};
 			prop = {};
@@ -1612,7 +1611,7 @@ Error GrManagerImpl::printPipelineShaderInfoInternal(VkPipeline ppline, CString
 			exeInf.pipeline = ppline;
 			exeInf.pipeline = ppline;
 			U32 statCount = 0;
 			U32 statCount = 0;
 			vkGetPipelineExecutableStatisticsKHR(m_device, &exeInf, &statCount, nullptr);
 			vkGetPipelineExecutableStatisticsKHR(m_device, &exeInf, &statCount, nullptr);
-			DynamicArrayRaii<VkPipelineExecutableStatisticKHR> stats(m_alloc, statCount);
+			DynamicArrayRaii<VkPipelineExecutableStatisticKHR> stats(&m_pool, statCount);
 			for(VkPipelineExecutableStatisticKHR& s : stats)
 			for(VkPipelineExecutableStatisticKHR& s : stats)
 			{
 			{
 				s = {};
 				s = {};
@@ -1651,7 +1650,7 @@ Error GrManagerImpl::printPipelineShaderInfoInternal(VkPipeline ppline, CString
 			}
 			}
 		}
 		}
 
 
-		StringRaii finalLog(m_alloc);
+		StringRaii finalLog(&m_pool);
 		log.join("", finalLog);
 		log.join("", finalLog);
 		ANKI_VK_LOGV("%s", finalLog.cstr());
 		ANKI_VK_LOGV("%s", finalLog.cstr());
 	}
 	}

+ 1 - 0
AnKi/Gr/Vulkan/GrManagerImpl.h

@@ -214,6 +214,7 @@ public:
 	}
 	}
 	/// @}
 	/// @}
 
 
+	/// @note It's thread-safe.
 	void printPipelineShaderInfo(VkPipeline ppline, CString name, ShaderTypeBit stages, U64 hash = 0) const;
 	void printPipelineShaderInfo(VkPipeline ppline, CString name, ShaderTypeBit stages, U64 hash = 0) const;
 
 
 	FrameGarbageCollector& getFrameGarbageCollector()
 	FrameGarbageCollector& getFrameGarbageCollector()

+ 2 - 2
AnKi/Gr/Vulkan/GrUpscaler.cpp

@@ -11,11 +11,11 @@ namespace anki {
 
 
 GrUpscaler* GrUpscaler::newInstance(GrManager* manager, const GrUpscalerInitInfo& initInfo)
 GrUpscaler* GrUpscaler::newInstance(GrManager* manager, const GrUpscalerInitInfo& initInfo)
 {
 {
-	GrUpscalerImpl* impl = manager->getAllocator().newInstance<GrUpscalerImpl>(manager, initInfo.getName());
+	GrUpscalerImpl* impl = anki::newInstance<GrUpscalerImpl>(manager->getMemoryPool(), manager, initInfo.getName());
 	const Error err = impl->initInternal(initInfo);
 	const Error err = impl->initInternal(initInfo);
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 5 - 5
AnKi/Gr/Vulkan/MicroObjectRecycler.h

@@ -22,9 +22,9 @@ public:
 	{
 	{
 	}
 	}
 
 
-	MicroObjectRecycler(GrAllocator<U8> alloc)
+	MicroObjectRecycler(HeapMemoryPool* pool)
 	{
 	{
-		init(alloc);
+		init(pool);
 	}
 	}
 
 
 	~MicroObjectRecycler()
 	~MicroObjectRecycler()
@@ -32,9 +32,9 @@ public:
 		destroy();
 		destroy();
 	}
 	}
 
 
-	void init(GrAllocator<U8> alloc)
+	void init(HeapMemoryPool* pool)
 	{
 	{
-		m_alloc = alloc;
+		m_pool = pool;
 	}
 	}
 
 
 	/// It's thread-safe.
 	/// It's thread-safe.
@@ -67,7 +67,7 @@ private:
 		Bool m_fenceDone;
 		Bool m_fenceDone;
 	};
 	};
 
 
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_pool = nullptr;
 	DynamicArray<Object> m_objects;
 	DynamicArray<Object> m_objects;
 	Mutex m_mtx;
 	Mutex m_mtx;
 
 

+ 11 - 11
AnKi/Gr/Vulkan/MicroObjectRecycler.inl.h

@@ -20,14 +20,14 @@ inline void MicroObjectRecycler<T>::destroy()
 		ANKI_ASSERT(mobj);
 		ANKI_ASSERT(mobj);
 		ANKI_ASSERT(!mobj->getFence());
 		ANKI_ASSERT(!mobj->getFence());
 
 
-		auto alloc = mobj->getAllocator();
-		alloc.deleteInstance(mobj);
+		auto& pool = mobj->getMemoryPool();
+		deleteInstance(pool, mobj);
 #if ANKI_EXTRA_CHECKS
 #if ANKI_EXTRA_CHECKS
 		--m_createdAndNotRecycled;
 		--m_createdAndNotRecycled;
 #endif
 #endif
 	}
 	}
 
 
-	m_objects.destroy(m_alloc);
+	m_objects.destroy(*m_pool);
 	ANKI_ASSERT(m_createdAndNotRecycled == 0 && "Destroying the recycler while objects have not recycled yet");
 	ANKI_ASSERT(m_createdAndNotRecycled == 0 && "Destroying the recycler while objects have not recycled yet");
 }
 }
 
 
@@ -49,7 +49,7 @@ inline T* MicroObjectRecycler<T>::findToReuse()
 		{
 		{
 			out = m_objects[i].m_microObject;
 			out = m_objects[i].m_microObject;
 			m_objects[i] = m_objects[m_objects.getSize() - 1];
 			m_objects[i] = m_objects[m_objects.getSize() - 1];
-			m_objects.popBack(m_alloc);
+			m_objects.popBack(*m_pool);
 
 
 			break;
 			break;
 		}
 		}
@@ -86,7 +86,7 @@ void MicroObjectRecycler<T>::recycle(T* mobj)
 		mobj->onFenceDone();
 		mobj->onFenceDone();
 	}
 	}
 
 
-	m_objects.emplaceBack(m_alloc, obj);
+	m_objects.emplaceBack(*m_pool, obj);
 	checkDoneFences();
 	checkDoneFences();
 	trimCacheInternal(m_readyObjectsAfterTrim);
 	trimCacheInternal(m_readyObjectsAfterTrim);
 }
 }
@@ -125,18 +125,18 @@ void MicroObjectRecycler<T>::trimCacheInternal(U32 aliveObjectCountAfterTrim)
 		if(inUseByTheGpu)
 		if(inUseByTheGpu)
 		{
 		{
 			// Can't delete it for sure
 			// Can't delete it for sure
-			aliveObjects.emplaceBack(m_alloc, obj);
+			aliveObjects.emplaceBack(*m_pool, obj);
 		}
 		}
 		else if(aliveObjectCountAfterTrim > 0)
 		else if(aliveObjectCountAfterTrim > 0)
 		{
 		{
 			// Need to keep a few alive for recycling
 			// Need to keep a few alive for recycling
-			aliveObjects.emplaceBack(m_alloc, obj);
+			aliveObjects.emplaceBack(*m_pool, obj);
 			--aliveObjectCountAfterTrim;
 			--aliveObjectCountAfterTrim;
 		}
 		}
 		else
 		else
 		{
 		{
-			auto alloc = mobj.getAllocator();
-			alloc.deleteInstance(&mobj);
+			auto& pool = mobj.getMemoryPool();
+			deleteInstance(pool, &mobj);
 #if ANKI_EXTRA_CHECKS
 #if ANKI_EXTRA_CHECKS
 			--m_createdAndNotRecycled;
 			--m_createdAndNotRecycled;
 #endif
 #endif
@@ -146,13 +146,13 @@ void MicroObjectRecycler<T>::trimCacheInternal(U32 aliveObjectCountAfterTrim)
 	if(aliveObjects.getSize() > 0)
 	if(aliveObjects.getSize() > 0)
 	{
 	{
 		// Some alive, store the alive
 		// Some alive, store the alive
-		m_objects.destroy(m_alloc);
+		m_objects.destroy(*m_pool);
 		m_objects = std::move(aliveObjects);
 		m_objects = std::move(aliveObjects);
 	}
 	}
 	else if(aliveObjects.getSize() == 0 && m_objects.getSize() > 0)
 	else if(aliveObjects.getSize() == 0 && m_objects.getSize() > 0)
 	{
 	{
 		// All dead, destroy the array
 		// All dead, destroy the array
-		m_objects.destroy(m_alloc);
+		m_objects.destroy(*m_pool);
 	}
 	}
 }
 }
 
 

+ 2 - 2
AnKi/Gr/Vulkan/OcclusionQuery.cpp

@@ -11,11 +11,11 @@ namespace anki {
 
 
 OcclusionQuery* OcclusionQuery::newInstance(GrManager* manager)
 OcclusionQuery* OcclusionQuery::newInstance(GrManager* manager)
 {
 {
-	OcclusionQueryImpl* impl = manager->getAllocator().newInstance<OcclusionQueryImpl>(manager, "N/A");
+	OcclusionQueryImpl* impl = anki::newInstance<OcclusionQueryImpl>(manager->getMemoryPool(), manager, "N/A");
 	const Error err = impl->init();
 	const Error err = impl->init();
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 2 - 2
AnKi/Gr/Vulkan/Pipeline.cpp

@@ -423,7 +423,7 @@ void PipelineFactory::destroy()
 		}
 		}
 	}
 	}
 
 
-	m_pplines.destroy(m_alloc);
+	m_pplines.destroy(*m_pool);
 }
 }
 
 
 void PipelineFactory::getOrCreatePipeline(PipelineStateTracker& state, Pipeline& ppline, Bool& stateDirty)
 void PipelineFactory::getOrCreatePipeline(PipelineStateTracker& state, Pipeline& ppline, Bool& stateDirty)
@@ -489,7 +489,7 @@ void PipelineFactory::getOrCreatePipeline(PipelineStateTracker& state, Pipeline&
 
 
 	ANKI_TRACE_INC_COUNTER(VK_PIPELINES_CACHE_MISS, 1);
 	ANKI_TRACE_INC_COUNTER(VK_PIPELINES_CACHE_MISS, 1);
 
 
-	m_pplines.emplace(m_alloc, hash, pp);
+	m_pplines.emplace(*m_pool, hash, pp);
 	ppline.m_handle = pp.m_handle;
 	ppline.m_handle = pp.m_handle;
 
 
 	// Print shader info
 	// Print shader info

+ 4 - 3
AnKi/Gr/Vulkan/Pipeline.h

@@ -549,14 +549,15 @@ public:
 	{
 	{
 	}
 	}
 
 
-	void init(GrAllocator<U8> alloc, VkDevice dev, VkPipelineCache pplineCache
+	void init(HeapMemoryPool* pool, VkDevice dev, VkPipelineCache pplineCache
 #if ANKI_PLATFORM_MOBILE
 #if ANKI_PLATFORM_MOBILE
 			  ,
 			  ,
 			  Mutex* globalCreatePipelineMtx
 			  Mutex* globalCreatePipelineMtx
 #endif
 #endif
 	)
 	)
 	{
 	{
-		m_alloc = alloc;
+		ANKI_ASSERT(pool);
+		m_pool = pool;
 		m_dev = dev;
 		m_dev = dev;
 		m_pplineCache = pplineCache;
 		m_pplineCache = pplineCache;
 #if ANKI_PLATFORM_MOBILE
 #if ANKI_PLATFORM_MOBILE
@@ -573,7 +574,7 @@ private:
 	class PipelineInternal;
 	class PipelineInternal;
 	class Hasher;
 	class Hasher;
 
 
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_pool = nullptr;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkPipelineCache m_pplineCache = VK_NULL_HANDLE;
 	VkPipelineCache m_pplineCache = VK_NULL_HANDLE;
 
 

+ 8 - 8
AnKi/Gr/Vulkan/PipelineCache.cpp

@@ -11,14 +11,14 @@
 namespace anki {
 namespace anki {
 
 
 Error PipelineCache::init(VkDevice dev, VkPhysicalDevice pdev, CString cacheDir, const ConfigSet& cfg,
 Error PipelineCache::init(VkDevice dev, VkPhysicalDevice pdev, CString cacheDir, const ConfigSet& cfg,
-						  GrAllocator<U8> alloc)
+						  HeapMemoryPool& pool)
 {
 {
 	ANKI_ASSERT(cacheDir && dev && pdev);
 	ANKI_ASSERT(cacheDir && dev && pdev);
 	m_dumpSize = cfg.getGrDiskShaderCacheMaxSize();
 	m_dumpSize = cfg.getGrDiskShaderCacheMaxSize();
-	m_dumpFilename.sprintf(alloc, "%s/VkPipelineCache", &cacheDir[0]);
+	m_dumpFilename.sprintf(pool, "%s/VkPipelineCache", &cacheDir[0]);
 
 
 	// Try read the pipeline cache file.
 	// Try read the pipeline cache file.
-	DynamicArrayRaii<U8, PtrSize> diskDump(alloc);
+	DynamicArrayRaii<U8, PtrSize> diskDump(&pool);
 	if(fileExists(m_dumpFilename.toCString()))
 	if(fileExists(m_dumpFilename.toCString()))
 	{
 	{
 		File file;
 		File file;
@@ -69,18 +69,18 @@ Error PipelineCache::init(VkDevice dev, VkPhysicalDevice pdev, CString cacheDir,
 	return Error::kNone;
 	return Error::kNone;
 }
 }
 
 
-void PipelineCache::destroy(VkDevice dev, VkPhysicalDevice pdev, GrAllocator<U8> alloc)
+void PipelineCache::destroy(VkDevice dev, VkPhysicalDevice pdev, HeapMemoryPool& pool)
 {
 {
-	const Error err = destroyInternal(dev, pdev, alloc);
+	const Error err = destroyInternal(dev, pdev, pool);
 	if(err)
 	if(err)
 	{
 	{
 		ANKI_VK_LOGE("An error occurred while storing the pipeline cache to disk. Will ignore");
 		ANKI_VK_LOGE("An error occurred while storing the pipeline cache to disk. Will ignore");
 	}
 	}
 
 
-	m_dumpFilename.destroy(alloc);
+	m_dumpFilename.destroy(pool);
 }
 }
 
 
-Error PipelineCache::destroyInternal(VkDevice dev, VkPhysicalDevice pdev, GrAllocator<U8> alloc)
+Error PipelineCache::destroyInternal(VkDevice dev, VkPhysicalDevice pdev, HeapMemoryPool& pool)
 {
 {
 	if(m_cacheHandle)
 	if(m_cacheHandle)
 	{
 	{
@@ -94,7 +94,7 @@ Error PipelineCache::destroyInternal(VkDevice dev, VkPhysicalDevice pdev, GrAllo
 		if(size > 0)
 		if(size > 0)
 		{
 		{
 			// Read cache
 			// Read cache
-			DynamicArrayRaii<U8, PtrSize> cacheData(alloc);
+			DynamicArrayRaii<U8, PtrSize> cacheData(&pool);
 			cacheData.create(size);
 			cacheData.create(size);
 			ANKI_VK_CHECK(vkGetPipelineCacheData(dev, m_cacheHandle, &size, &cacheData[0]));
 			ANKI_VK_CHECK(vkGetPipelineCacheData(dev, m_cacheHandle, &size, &cacheData[0]));
 
 

+ 3 - 3
AnKi/Gr/Vulkan/PipelineCache.h

@@ -21,15 +21,15 @@ class PipelineCache
 public:
 public:
 	VkPipelineCache m_cacheHandle = VK_NULL_HANDLE;
 	VkPipelineCache m_cacheHandle = VK_NULL_HANDLE;
 
 
-	Error init(VkDevice dev, VkPhysicalDevice pdev, CString cacheDir, const ConfigSet& cfg, GrAllocator<U8> alloc);
+	Error init(VkDevice dev, VkPhysicalDevice pdev, CString cacheDir, const ConfigSet& cfg, HeapMemoryPool& pool);
 
 
-	void destroy(VkDevice dev, VkPhysicalDevice pdev, GrAllocator<U8> alloc);
+	void destroy(VkDevice dev, VkPhysicalDevice pdev, HeapMemoryPool& pool);
 
 
 private:
 private:
 	String m_dumpFilename;
 	String m_dumpFilename;
 	PtrSize m_dumpSize = 0;
 	PtrSize m_dumpSize = 0;
 
 
-	Error destroyInternal(VkDevice dev, VkPhysicalDevice pdev, GrAllocator<U8> alloc);
+	Error destroyInternal(VkDevice dev, VkPhysicalDevice pdev, HeapMemoryPool& pool);
 };
 };
 /// @}
 /// @}
 
 

+ 2 - 2
AnKi/Gr/Vulkan/PipelineLayout.cpp

@@ -13,7 +13,7 @@ void PipelineLayoutFactory::destroy()
 	{
 	{
 		auto it = m_layouts.getBegin();
 		auto it = m_layouts.getBegin();
 		VkPipelineLayout handle = *it;
 		VkPipelineLayout handle = *it;
-		m_layouts.erase(m_alloc, it);
+		m_layouts.erase(*m_pool, it);
 
 
 		vkDestroyPipelineLayout(m_dev, handle, nullptr);
 		vkDestroyPipelineLayout(m_dev, handle, nullptr);
 	}
 	}
@@ -66,7 +66,7 @@ Error PipelineLayoutFactory::newPipelineLayout(const WeakArray<DescriptorSetLayo
 		VkPipelineLayout pplineLayHandle;
 		VkPipelineLayout pplineLayHandle;
 		ANKI_VK_CHECK(vkCreatePipelineLayout(m_dev, &ci, nullptr, &pplineLayHandle));
 		ANKI_VK_CHECK(vkCreatePipelineLayout(m_dev, &ci, nullptr, &pplineLayHandle));
 
 
-		m_layouts.emplace(m_alloc, hash, pplineLayHandle);
+		m_layouts.emplace(*m_pool, hash, pplineLayHandle);
 
 
 		layout.m_handle = pplineLayHandle;
 		layout.m_handle = pplineLayHandle;
 	}
 	}

+ 4 - 3
AnKi/Gr/Vulkan/PipelineLayout.h

@@ -35,9 +35,10 @@ public:
 	PipelineLayoutFactory() = default;
 	PipelineLayoutFactory() = default;
 	~PipelineLayoutFactory() = default;
 	~PipelineLayoutFactory() = default;
 
 
-	void init(GrAllocator<U8> alloc, VkDevice dev)
+	void init(HeapMemoryPool* pool, VkDevice dev)
 	{
 	{
-		m_alloc = alloc;
+		ANKI_ASSERT(pool);
+		m_pool = pool;
 		m_dev = dev;
 		m_dev = dev;
 	}
 	}
 
 
@@ -48,7 +49,7 @@ public:
 							PipelineLayout& layout);
 							PipelineLayout& layout);
 
 
 private:
 private:
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_pool = nullptr;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkDevice m_dev = VK_NULL_HANDLE;
 
 
 	HashMap<U64, VkPipelineLayout> m_layouts;
 	HashMap<U64, VkPipelineLayout> m_layouts;

+ 5 - 5
AnKi/Gr/Vulkan/QueryFactory.cpp

@@ -25,7 +25,7 @@ Error QueryFactory::newQuery(MicroQuery& handle)
 	Chunk* chunk = nullptr;
 	Chunk* chunk = nullptr;
 	for(Chunk& c : m_chunks)
 	for(Chunk& c : m_chunks)
 	{
 	{
-		if(c.m_subAllocationCount < MAX_SUB_ALLOCATIONS_PER_QUERY_CHUNK)
+		if(c.m_subAllocationCount < kMaxSuballocationsPerQueryChunk)
 		{
 		{
 			// Found one
 			// Found one
 
 
@@ -44,12 +44,12 @@ Error QueryFactory::newQuery(MicroQuery& handle)
 	if(chunk == nullptr)
 	if(chunk == nullptr)
 	{
 	{
 		// Create new chunk
 		// Create new chunk
-		chunk = m_alloc.newInstance<Chunk>();
+		chunk = newInstance<Chunk>(*m_pool);
 
 
 		VkQueryPoolCreateInfo ci = {};
 		VkQueryPoolCreateInfo ci = {};
 		ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
 		ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
 		ci.queryType = m_poolType;
 		ci.queryType = m_poolType;
-		ci.queryCount = MAX_SUB_ALLOCATIONS_PER_QUERY_CHUNK;
+		ci.queryCount = kMaxSuballocationsPerQueryChunk;
 
 
 		ANKI_VK_CHECK(vkCreateQueryPool(m_dev, &ci, nullptr, &chunk->m_pool));
 		ANKI_VK_CHECK(vkCreateQueryPool(m_dev, &ci, nullptr, &chunk->m_pool));
 		m_chunks.pushBack(chunk);
 		m_chunks.pushBack(chunk);
@@ -58,7 +58,7 @@ Error QueryFactory::newQuery(MicroQuery& handle)
 	ANKI_ASSERT(chunk);
 	ANKI_ASSERT(chunk);
 
 
 	// Allocate from chunk
 	// Allocate from chunk
-	for(U32 i = 0; i < MAX_SUB_ALLOCATIONS_PER_QUERY_CHUNK; ++i)
+	for(U32 i = 0; i < kMaxSuballocationsPerQueryChunk; ++i)
 	{
 	{
 		if(chunk->m_allocatedMask.get(i) == 0)
 		if(chunk->m_allocatedMask.get(i) == 0)
 		{
 		{
@@ -94,7 +94,7 @@ void QueryFactory::deleteQuery(MicroQuery& handle)
 		vkDestroyQueryPool(m_dev, chunk->m_pool, nullptr);
 		vkDestroyQueryPool(m_dev, chunk->m_pool, nullptr);
 
 
 		m_chunks.erase(chunk);
 		m_chunks.erase(chunk);
-		m_alloc.deleteInstance(chunk);
+		deleteInstance(*m_pool, chunk);
 	}
 	}
 	else
 	else
 	{
 	{

+ 6 - 5
AnKi/Gr/Vulkan/QueryFactory.h

@@ -17,7 +17,7 @@ class QueryFactoryChunk;
 /// @addtogroup vulkan
 /// @addtogroup vulkan
 /// @{
 /// @{
 
 
-const U MAX_SUB_ALLOCATIONS_PER_QUERY_CHUNK = 64;
+constexpr U kMaxSuballocationsPerQueryChunk = 64;
 
 
 /// The return handle of a query allocation.
 /// The return handle of a query allocation.
 class MicroQuery
 class MicroQuery
@@ -56,7 +56,7 @@ class QueryFactoryChunk : public IntrusiveListEnabled<QueryFactoryChunk>
 
 
 private:
 private:
 	VkQueryPool m_pool = VK_NULL_HANDLE;
 	VkQueryPool m_pool = VK_NULL_HANDLE;
-	BitSet<MAX_SUB_ALLOCATIONS_PER_QUERY_CHUNK> m_allocatedMask = {false};
+	BitSet<kMaxSuballocationsPerQueryChunk> m_allocatedMask = {false};
 	U32 m_subAllocationCount = 0;
 	U32 m_subAllocationCount = 0;
 };
 };
 
 
@@ -74,9 +74,10 @@ public:
 
 
 	QueryFactory& operator=(const QueryFactory&) = delete; // Non-copyable
 	QueryFactory& operator=(const QueryFactory&) = delete; // Non-copyable
 
 
-	void init(GrAllocator<U8> alloc, VkDevice dev, VkQueryType poolType)
+	void init(HeapMemoryPool* pool, VkDevice dev, VkQueryType poolType)
 	{
 	{
-		m_alloc = alloc;
+		ANKI_ASSERT(pool);
+		m_pool = pool;
 		m_dev = dev;
 		m_dev = dev;
 		m_poolType = poolType;
 		m_poolType = poolType;
 	}
 	}
@@ -90,7 +91,7 @@ public:
 private:
 private:
 	using Chunk = QueryFactoryChunk;
 	using Chunk = QueryFactoryChunk;
 
 
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_pool = nullptr;
 	VkDevice m_dev;
 	VkDevice m_dev;
 	IntrusiveList<Chunk> m_chunks;
 	IntrusiveList<Chunk> m_chunks;
 	Mutex m_mtx;
 	Mutex m_mtx;

+ 2 - 2
AnKi/Gr/Vulkan/Sampler.cpp

@@ -11,11 +11,11 @@ namespace anki {
 
 
 Sampler* Sampler::newInstance(GrManager* manager, const SamplerInitInfo& init)
 Sampler* Sampler::newInstance(GrManager* manager, const SamplerInitInfo& init)
 {
 {
-	SamplerImpl* impl = manager->getAllocator().newInstance<SamplerImpl>(manager, init.getName());
+	SamplerImpl* impl = anki::newInstance<SamplerImpl>(manager->getMemoryPool(), manager, init.getName());
 	const Error err = impl->init(init);
 	const Error err = impl->init(init);
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 7 - 7
AnKi/Gr/Vulkan/SamplerFactory.cpp

@@ -126,15 +126,15 @@ void SamplerFactory::destroy()
 		return;
 		return;
 	}
 	}
 
 
-	GrAllocator<U8> alloc = m_gr->getAllocator();
+	HeapMemoryPool& pool = m_gr->getMemoryPool();
 	for(auto it : m_map)
 	for(auto it : m_map)
 	{
 	{
 		MicroSampler* const sampler = it;
 		MicroSampler* const sampler = it;
 		ANKI_ASSERT(sampler->getRefcount() == 0 && "Someone still holds a reference to a sampler");
 		ANKI_ASSERT(sampler->getRefcount() == 0 && "Someone still holds a reference to a sampler");
-		alloc.deleteInstance(sampler);
+		deleteInstance(pool, sampler);
 	}
 	}
 
 
-	m_map.destroy(alloc);
+	m_map.destroy(pool);
 
 
 	m_gr = nullptr;
 	m_gr = nullptr;
 }
 }
@@ -159,19 +159,19 @@ Error SamplerFactory::newInstance(const SamplerInitInfo& inf, MicroSamplerPtr& p
 	{
 	{
 		// Create a new one
 		// Create a new one
 
 
-		GrAllocator<U8> alloc = m_gr->getAllocator();
+		HeapMemoryPool& pool = m_gr->getMemoryPool();
 
 
-		out = alloc.newInstance<MicroSampler>(this);
+		out = anki::newInstance<MicroSampler>(pool, this);
 		err = out->init(inf);
 		err = out->init(inf);
 
 
 		if(err)
 		if(err)
 		{
 		{
-			alloc.deleteInstance(out);
+			deleteInstance(pool, out);
 			out = nullptr;
 			out = nullptr;
 		}
 		}
 		else
 		else
 		{
 		{
-			m_map.emplace(alloc, hash, out);
+			m_map.emplace(pool, hash, out);
 		}
 		}
 	}
 	}
 
 

+ 1 - 2
AnKi/Gr/Vulkan/SamplerFactory.h

@@ -21,8 +21,7 @@ class MicroSampler
 {
 {
 	friend class MicroSamplerPtrDeleter;
 	friend class MicroSamplerPtrDeleter;
 	friend class SamplerFactory;
 	friend class SamplerFactory;
-	template<typename, typename>
-	friend class GenericPoolAllocator;
+	ANKI_FRIEND_CALL_CONSTRUCTOR
 
 
 public:
 public:
 	const VkSampler& getHandle() const
 	const VkSampler& getHandle() const

+ 7 - 8
AnKi/Gr/Vulkan/SemaphoreFactory.h

@@ -21,8 +21,7 @@ class MicroSemaphore
 {
 {
 	friend class SemaphoreFactory;
 	friend class SemaphoreFactory;
 	friend class MicroSemaphorePtrDeleter;
 	friend class MicroSemaphorePtrDeleter;
-	template<typename, typename>
-	friend class GenericPoolAllocator;
+	ANKI_FRIEND_CALL_CONSTRUCTOR
 
 
 public:
 public:
 	MicroSemaphore(const MicroSemaphore&) = delete; // Non-copyable
 	MicroSemaphore(const MicroSemaphore&) = delete; // Non-copyable
@@ -35,7 +34,7 @@ public:
 		return m_handle;
 		return m_handle;
 	}
 	}
 
 
-	GrAllocator<U8> getAllocator() const;
+	HeapMemoryPool& getMemoryPool();
 
 
 	void retain() const
 	void retain() const
 	{
 	{
@@ -124,13 +123,13 @@ class SemaphoreFactory
 	friend class MicroSemaphorePtrDeleter;
 	friend class MicroSemaphorePtrDeleter;
 
 
 public:
 public:
-	void init(GrAllocator<U8> alloc, VkDevice dev)
+	void init(HeapMemoryPool* pool, VkDevice dev)
 	{
 	{
 		ANKI_ASSERT(dev);
 		ANKI_ASSERT(dev);
-		m_alloc = alloc;
+		m_pool = pool;
 		m_dev = dev;
 		m_dev = dev;
-		m_binaryRecycler.init(alloc);
-		m_timelineRecycler.init(alloc);
+		m_binaryRecycler.init(m_pool);
+		m_timelineRecycler.init(m_pool);
 	}
 	}
 
 
 	void destroy()
 	void destroy()
@@ -142,7 +141,7 @@ public:
 	MicroSemaphorePtr newInstance(MicroFencePtr fence, Bool isTimeline);
 	MicroSemaphorePtr newInstance(MicroFencePtr fence, Bool isTimeline);
 
 
 private:
 private:
-	GrAllocator<U8> m_alloc;
+	HeapMemoryPool* m_pool = nullptr;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	VkDevice m_dev = VK_NULL_HANDLE;
 	MicroObjectRecycler<MicroSemaphore> m_binaryRecycler;
 	MicroObjectRecycler<MicroSemaphore> m_binaryRecycler;
 	MicroObjectRecycler<MicroSemaphore> m_timelineRecycler;
 	MicroObjectRecycler<MicroSemaphore> m_timelineRecycler;

+ 3 - 3
AnKi/Gr/Vulkan/SemaphoreFactory.inl.h

@@ -37,9 +37,9 @@ inline MicroSemaphore::~MicroSemaphore()
 	}
 	}
 }
 }
 
 
-inline GrAllocator<U8> MicroSemaphore::getAllocator() const
+inline HeapMemoryPool& MicroSemaphore::getMemoryPool()
 {
 {
-	return m_factory->m_alloc;
+	return *m_factory->m_pool;
 }
 }
 
 
 inline Bool MicroSemaphore::clientWait(Second seconds)
 inline Bool MicroSemaphore::clientWait(Second seconds)
@@ -86,7 +86,7 @@ inline MicroSemaphorePtr SemaphoreFactory::newInstance(MicroFencePtr fence, Bool
 	if(out == nullptr)
 	if(out == nullptr)
 	{
 	{
 		// Create a new one
 		// Create a new one
-		out = m_alloc.newInstance<MicroSemaphore>(this, fence, isTimeline);
+		out = anki::newInstance<MicroSemaphore>(*m_pool, this, fence, isTimeline);
 	}
 	}
 	else
 	else
 	{
 	{

+ 2 - 2
AnKi/Gr/Vulkan/Shader.cpp

@@ -11,11 +11,11 @@ namespace anki {
 
 
 Shader* Shader::newInstance(GrManager* manager, const ShaderInitInfo& init)
 Shader* Shader::newInstance(GrManager* manager, const ShaderInitInfo& init)
 {
 {
-	ShaderImpl* impl = manager->getAllocator().newInstance<ShaderImpl>(manager, init.getName());
+	ShaderImpl* impl = anki::newInstance<ShaderImpl>(manager->getMemoryPool(), manager, init.getName());
 	const Error err = impl->init(init);
 	const Error err = impl->init(init);
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 8 - 8
AnKi/Gr/Vulkan/ShaderImpl.cpp

@@ -27,7 +27,7 @@ ShaderImpl::~ShaderImpl()
 {
 {
 	for(auto& x : m_bindings)
 	for(auto& x : m_bindings)
 	{
 	{
-		x.destroy(getAllocator());
+		x.destroy(getMemoryPool());
 	}
 	}
 
 
 	if(m_handle)
 	if(m_handle)
@@ -37,14 +37,14 @@ ShaderImpl::~ShaderImpl()
 
 
 	if(m_specConstInfo.pMapEntries)
 	if(m_specConstInfo.pMapEntries)
 	{
 	{
-		getAllocator().deleteArray(const_cast<VkSpecializationMapEntry*>(m_specConstInfo.pMapEntries),
-								   m_specConstInfo.mapEntryCount);
+		deleteArray(getMemoryPool(), const_cast<VkSpecializationMapEntry*>(m_specConstInfo.pMapEntries),
+					m_specConstInfo.mapEntryCount);
 	}
 	}
 
 
 	if(m_specConstInfo.pData)
 	if(m_specConstInfo.pData)
 	{
 	{
-		getAllocator().deleteArray(static_cast<I32*>(const_cast<void*>(m_specConstInfo.pData)),
-								   m_specConstInfo.dataSize / sizeof(I32));
+		deleteArray(getMemoryPool(), static_cast<I32*>(const_cast<void*>(m_specConstInfo.pData)),
+					m_specConstInfo.dataSize / sizeof(I32));
 	}
 	}
 }
 }
 
 
@@ -82,9 +82,9 @@ Error ShaderImpl::init(const ShaderInitInfo& inf)
 		const U32 constCount = U32(specConstIds.m_vec.size());
 		const U32 constCount = U32(specConstIds.m_vec.size());
 
 
 		m_specConstInfo.mapEntryCount = constCount;
 		m_specConstInfo.mapEntryCount = constCount;
-		m_specConstInfo.pMapEntries = getAllocator().newArray<VkSpecializationMapEntry>(constCount);
+		m_specConstInfo.pMapEntries = newArray<VkSpecializationMapEntry>(getMemoryPool(), constCount);
 		m_specConstInfo.dataSize = constCount * sizeof(U32);
 		m_specConstInfo.dataSize = constCount * sizeof(U32);
-		m_specConstInfo.pData = getAllocator().newArray<U32>(constCount);
+		m_specConstInfo.pData = newArray<U32>(getMemoryPool(), constCount);
 
 
 		U32 count = 0;
 		U32 count = 0;
 		for(const spirv_cross::SpecializationConstant& sconst : specConstIds.m_vec)
 		for(const spirv_cross::SpecializationConstant& sconst : specConstIds.m_vec)
@@ -203,7 +203,7 @@ void ShaderImpl::doReflection(ConstWeakArray<U8> spirv, SpecConstsVector& specCo
 	{
 	{
 		if(counts[set])
 		if(counts[set])
 		{
 		{
-			m_bindings[set].create(getAllocator(), counts[set]);
+			m_bindings[set].create(getMemoryPool(), counts[set]);
 			memcpy(&m_bindings[set][0], &descriptors[set][0], counts[set] * sizeof(DescriptorBinding));
 			memcpy(&m_bindings[set][0], &descriptors[set][0], counts[set] * sizeof(DescriptorBinding));
 		}
 		}
 	}
 	}

+ 2 - 2
AnKi/Gr/Vulkan/ShaderProgram.cpp

@@ -12,11 +12,11 @@ namespace anki {
 
 
 ShaderProgram* ShaderProgram::newInstance(GrManager* manager, const ShaderProgramInitInfo& init)
 ShaderProgram* ShaderProgram::newInstance(GrManager* manager, const ShaderProgramInitInfo& init)
 {
 {
-	ShaderProgramImpl* impl = manager->getAllocator().newInstance<ShaderProgramImpl>(manager, init.getName());
+	ShaderProgramImpl* impl = anki::newInstance<ShaderProgramImpl>(manager->getMemoryPool(), manager, init.getName());
 	const Error err = impl->init(init);
 	const Error err = impl->init(init);
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 18 - 18
AnKi/Gr/Vulkan/ShaderProgramImpl.cpp

@@ -16,7 +16,7 @@ ShaderProgramImpl::~ShaderProgramImpl()
 	if(m_graphics.m_pplineFactory)
 	if(m_graphics.m_pplineFactory)
 	{
 	{
 		m_graphics.m_pplineFactory->destroy();
 		m_graphics.m_pplineFactory->destroy();
-		getAllocator().deleteInstance(m_graphics.m_pplineFactory);
+		deleteInstance(getMemoryPool(), m_graphics.m_pplineFactory);
 	}
 	}
 
 
 	if(m_compute.m_ppline)
 	if(m_compute.m_ppline)
@@ -29,8 +29,8 @@ ShaderProgramImpl::~ShaderProgramImpl()
 		vkDestroyPipeline(getDevice(), m_rt.m_ppline, nullptr);
 		vkDestroyPipeline(getDevice(), m_rt.m_ppline, nullptr);
 	}
 	}
 
 
-	m_shaders.destroy(getAllocator());
-	m_rt.m_allHandles.destroy(getAllocator());
+	m_shaders.destroy(getMemoryPool());
+	m_rt.m_allHandles.destroy(getMemoryPool());
 }
 }
 
 
 Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
@@ -39,10 +39,10 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 
 
 	// Create the shader references
 	// Create the shader references
 	//
 	//
-	HashMapRaii<U64, U32> shaderUuidToMShadersIdx(getAllocator()); // Shader UUID to m_shaders idx
+	HashMapRaii<U64, U32> shaderUuidToMShadersIdx(&getMemoryPool()); // Shader UUID to m_shaders idx
 	if(inf.m_computeShader)
 	if(inf.m_computeShader)
 	{
 	{
-		m_shaders.emplaceBack(getAllocator(), inf.m_computeShader);
+		m_shaders.emplaceBack(getMemoryPool(), inf.m_computeShader);
 	}
 	}
 	else if(inf.m_graphicsShaders[ShaderType::kVertex])
 	else if(inf.m_graphicsShaders[ShaderType::kVertex])
 	{
 	{
@@ -50,7 +50,7 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 		{
 		{
 			if(s)
 			if(s)
 			{
 			{
-				m_shaders.emplaceBack(getAllocator(), s);
+				m_shaders.emplaceBack(getMemoryPool(), s);
 			}
 			}
 		}
 		}
 	}
 	}
@@ -58,18 +58,18 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 	{
 	{
 		// Ray tracing
 		// Ray tracing
 
 
-		m_shaders.resizeStorage(getAllocator(), inf.m_rayTracingShaders.m_rayGenShaders.getSize()
-													+ inf.m_rayTracingShaders.m_missShaders.getSize()
-													+ 1); // Plus at least one hit shader
+		m_shaders.resizeStorage(getMemoryPool(), inf.m_rayTracingShaders.m_rayGenShaders.getSize()
+													 + inf.m_rayTracingShaders.m_missShaders.getSize()
+													 + 1); // Plus at least one hit shader
 
 
 		for(const ShaderPtr& s : inf.m_rayTracingShaders.m_rayGenShaders)
 		for(const ShaderPtr& s : inf.m_rayTracingShaders.m_rayGenShaders)
 		{
 		{
-			m_shaders.emplaceBack(getAllocator(), s);
+			m_shaders.emplaceBack(getMemoryPool(), s);
 		}
 		}
 
 
 		for(const ShaderPtr& s : inf.m_rayTracingShaders.m_missShaders)
 		for(const ShaderPtr& s : inf.m_rayTracingShaders.m_missShaders)
 		{
 		{
-			m_shaders.emplaceBack(getAllocator(), s);
+			m_shaders.emplaceBack(getMemoryPool(), s);
 		}
 		}
 
 
 		m_rt.m_missShaderCount = inf.m_rayTracingShaders.m_missShaders.getSize();
 		m_rt.m_missShaderCount = inf.m_rayTracingShaders.m_missShaders.getSize();
@@ -82,7 +82,7 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 				if(it == shaderUuidToMShadersIdx.getEnd())
 				if(it == shaderUuidToMShadersIdx.getEnd())
 				{
 				{
 					shaderUuidToMShadersIdx.emplace(group.m_anyHitShader->getUuid(), m_shaders.getSize());
 					shaderUuidToMShadersIdx.emplace(group.m_anyHitShader->getUuid(), m_shaders.getSize());
-					m_shaders.emplaceBack(getAllocator(), group.m_anyHitShader);
+					m_shaders.emplaceBack(getMemoryPool(), group.m_anyHitShader);
 				}
 				}
 			}
 			}
 
 
@@ -92,7 +92,7 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 				if(it == shaderUuidToMShadersIdx.getEnd())
 				if(it == shaderUuidToMShadersIdx.getEnd())
 				{
 				{
 					shaderUuidToMShadersIdx.emplace(group.m_closestHitShader->getUuid(), m_shaders.getSize());
 					shaderUuidToMShadersIdx.emplace(group.m_closestHitShader->getUuid(), m_shaders.getSize());
-					m_shaders.emplaceBack(getAllocator(), group.m_closestHitShader);
+					m_shaders.emplaceBack(getMemoryPool(), group.m_closestHitShader);
 				}
 				}
 			}
 			}
 		}
 		}
@@ -221,8 +221,8 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 	//
 	//
 	if(graphicsProg)
 	if(graphicsProg)
 	{
 	{
-		m_graphics.m_pplineFactory = getAllocator().newInstance<PipelineFactory>();
-		m_graphics.m_pplineFactory->init(getGrManagerImpl().getAllocator(), getGrManagerImpl().getDevice(),
+		m_graphics.m_pplineFactory = anki::newInstance<PipelineFactory>(getMemoryPool());
+		m_graphics.m_pplineFactory->init(&getMemoryPool(), getGrManagerImpl().getDevice(),
 										 getGrManagerImpl().getPipelineCache()
 										 getGrManagerImpl().getPipelineCache()
 #if ANKI_PLATFORM_MOBILE
 #if ANKI_PLATFORM_MOBILE
 											 ,
 											 ,
@@ -264,7 +264,7 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 	if(!!(m_stages & ShaderTypeBit::kAllRayTracing))
 	if(!!(m_stages & ShaderTypeBit::kAllRayTracing))
 	{
 	{
 		// Create shaders
 		// Create shaders
-		DynamicArrayRaii<VkPipelineShaderStageCreateInfo> stages(getAllocator(), m_shaders.getSize());
+		DynamicArrayRaii<VkPipelineShaderStageCreateInfo> stages(&getMemoryPool(), m_shaders.getSize());
 		for(U32 i = 0; i < stages.getSize(); ++i)
 		for(U32 i = 0; i < stages.getSize(); ++i)
 		{
 		{
 			const ShaderImpl& impl = static_cast<const ShaderImpl&>(*m_shaders[i]);
 			const ShaderImpl& impl = static_cast<const ShaderImpl&>(*m_shaders[i]);
@@ -289,7 +289,7 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 		U32 groupCount = inf.m_rayTracingShaders.m_rayGenShaders.getSize()
 		U32 groupCount = inf.m_rayTracingShaders.m_rayGenShaders.getSize()
 						 + inf.m_rayTracingShaders.m_missShaders.getSize()
 						 + inf.m_rayTracingShaders.m_missShaders.getSize()
 						 + inf.m_rayTracingShaders.m_hitGroups.getSize();
 						 + inf.m_rayTracingShaders.m_hitGroups.getSize();
-		DynamicArrayRaii<VkRayTracingShaderGroupCreateInfoKHR> groups(getAllocator(), groupCount, defaultGroup);
+		DynamicArrayRaii<VkRayTracingShaderGroupCreateInfoKHR> groups(&getMemoryPool(), groupCount, defaultGroup);
 
 
 		// 1st group is the ray gen
 		// 1st group is the ray gen
 		groupCount = 0;
 		groupCount = 0;
@@ -347,7 +347,7 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 		// Get RT handles
 		// Get RT handles
 		const U32 handleArraySize =
 		const U32 handleArraySize =
 			getGrManagerImpl().getPhysicalDeviceRayTracingProperties().shaderGroupHandleSize * groupCount;
 			getGrManagerImpl().getPhysicalDeviceRayTracingProperties().shaderGroupHandleSize * groupCount;
-		m_rt.m_allHandles.create(getAllocator(), handleArraySize, 0);
+		m_rt.m_allHandles.create(getMemoryPool(), handleArraySize, 0);
 		ANKI_VK_CHECK(vkGetRayTracingShaderGroupHandlesKHR(getDevice(), m_rt.m_ppline, 0, groupCount, handleArraySize,
 		ANKI_VK_CHECK(vkGetRayTracingShaderGroupHandlesKHR(getDevice(), m_rt.m_ppline, 0, groupCount, handleArraySize,
 														   &m_rt.m_allHandles[0]));
 														   &m_rt.m_allHandles[0]));
 	}
 	}

+ 8 - 8
AnKi/Gr/Vulkan/SwapchainFactory.cpp

@@ -23,7 +23,7 @@ MicroSwapchain::~MicroSwapchain()
 {
 {
 	const VkDevice dev = m_factory->m_gr->getDevice();
 	const VkDevice dev = m_factory->m_gr->getDevice();
 
 
-	m_textures.destroy(getAllocator());
+	m_textures.destroy(getMemoryPool());
 
 
 	if(m_swapchain)
 	if(m_swapchain)
 	{
 	{
@@ -70,7 +70,7 @@ Error MicroSwapchain::initInternal()
 		ANKI_VK_CHECK(vkGetPhysicalDeviceSurfaceFormatsKHR(m_factory->m_gr->getPhysicalDevice(),
 		ANKI_VK_CHECK(vkGetPhysicalDeviceSurfaceFormatsKHR(m_factory->m_gr->getPhysicalDevice(),
 														   m_factory->m_gr->getSurface(), &formatCount, nullptr));
 														   m_factory->m_gr->getSurface(), &formatCount, nullptr));
 
 
-		DynamicArrayRaii<VkSurfaceFormatKHR> formats(getAllocator());
+		DynamicArrayRaii<VkSurfaceFormatKHR> formats(&getMemoryPool());
 		formats.create(formatCount);
 		formats.create(formatCount);
 		ANKI_VK_CHECK(vkGetPhysicalDeviceSurfaceFormatsKHR(m_factory->m_gr->getPhysicalDevice(),
 		ANKI_VK_CHECK(vkGetPhysicalDeviceSurfaceFormatsKHR(m_factory->m_gr->getPhysicalDevice(),
 														   m_factory->m_gr->getSurface(), &formatCount, &formats[0]));
 														   m_factory->m_gr->getSurface(), &formatCount, &formats[0]));
@@ -222,7 +222,7 @@ Error MicroSwapchain::initInternal()
 			ANKI_VK_LOGI("Requested a swapchain with %u images but got one with %u", kMaxFramesInFlight, count);
 			ANKI_VK_LOGI("Requested a swapchain with %u images but got one with %u", kMaxFramesInFlight, count);
 		}
 		}
 
 
-		m_textures.create(getAllocator(), count);
+		m_textures.create(getMemoryPool(), count);
 
 
 		ANKI_VK_LOGI("Created a swapchain. Image count: %u, present mode: %u, size: %ux%u, vsync: %u", count,
 		ANKI_VK_LOGI("Created a swapchain. Image count: %u, present mode: %u, size: %ux%u, vsync: %u", count,
 					 presentMode, surfaceWidth, surfaceHeight, U32(m_factory->m_vsync));
 					 presentMode, surfaceWidth, surfaceHeight, U32(m_factory->m_vsync));
@@ -242,7 +242,7 @@ Error MicroSwapchain::initInternal()
 			init.m_type = TextureType::k2D;
 			init.m_type = TextureType::k2D;
 
 
 			TextureImpl* tex =
 			TextureImpl* tex =
-				m_factory->m_gr->getAllocator().newInstance<TextureImpl>(m_factory->m_gr, init.getName());
+				newInstance<TextureImpl>(m_factory->m_gr->getMemoryPool(), m_factory->m_gr, init.getName());
 			m_textures[i].reset(tex);
 			m_textures[i].reset(tex);
 			ANKI_CHECK(tex->initExternal(images[i], init));
 			ANKI_CHECK(tex->initExternal(images[i], init));
 		}
 		}
@@ -251,9 +251,9 @@ Error MicroSwapchain::initInternal()
 	return Error::kNone;
 	return Error::kNone;
 }
 }
 
 
-GrAllocator<U8> MicroSwapchain::getAllocator() const
+HeapMemoryPool& MicroSwapchain::getMemoryPool()
 {
 {
-	return m_factory->m_gr->getAllocator();
+	return m_factory->m_gr->getMemoryPool();
 }
 }
 
 
 MicroSwapchainPtr SwapchainFactory::newInstance()
 MicroSwapchainPtr SwapchainFactory::newInstance()
@@ -265,7 +265,7 @@ MicroSwapchainPtr SwapchainFactory::newInstance()
 	[[maybe_unused]] MicroSwapchain* dummy = m_recycler.findToReuse();
 	[[maybe_unused]] MicroSwapchain* dummy = m_recycler.findToReuse();
 	ANKI_ASSERT(dummy == nullptr);
 	ANKI_ASSERT(dummy == nullptr);
 
 
-	return MicroSwapchainPtr(m_gr->getAllocator().newInstance<MicroSwapchain>(this));
+	return MicroSwapchainPtr(anki::newInstance<MicroSwapchain>(m_gr->getMemoryPool(), this));
 }
 }
 
 
 void SwapchainFactory::init(GrManagerImpl* manager, Bool vsync)
 void SwapchainFactory::init(GrManagerImpl* manager, Bool vsync)
@@ -273,7 +273,7 @@ void SwapchainFactory::init(GrManagerImpl* manager, Bool vsync)
 	ANKI_ASSERT(manager);
 	ANKI_ASSERT(manager);
 	m_gr = manager;
 	m_gr = manager;
 	m_vsync = vsync;
 	m_vsync = vsync;
-	m_recycler.init(m_gr->getAllocator());
+	m_recycler.init(&m_gr->getMemoryPool());
 }
 }
 
 
 } // end namespace anki
 } // end namespace anki

+ 1 - 1
AnKi/Gr/Vulkan/SwapchainFactory.h

@@ -47,7 +47,7 @@ public:
 		return m_refcount.load();
 		return m_refcount.load();
 	}
 	}
 
 
-	GrAllocator<U8> getAllocator() const;
+	HeapMemoryPool& getMemoryPool();
 
 
 	void setFence(MicroFencePtr fence)
 	void setFence(MicroFencePtr fence)
 	{
 	{

+ 2 - 2
AnKi/Gr/Vulkan/Texture.cpp

@@ -11,11 +11,11 @@ namespace anki {
 
 
 Texture* Texture::newInstance(GrManager* manager, const TextureInitInfo& init)
 Texture* Texture::newInstance(GrManager* manager, const TextureInitInfo& init)
 {
 {
-	TextureImpl* impl = manager->getAllocator().newInstance<TextureImpl>(manager, init.getName());
+	TextureImpl* impl = anki::newInstance<TextureImpl>(manager->getMemoryPool(), manager, init.getName());
 	const Error err = impl->init(init);
 	const Error err = impl->init(init);
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 7 - 7
AnKi/Gr/Vulkan/TextureImpl.cpp

@@ -71,30 +71,30 @@ TextureImpl::~TextureImpl()
 	}
 	}
 #endif
 #endif
 
 
-	TextureGarbage* garbage = getAllocator().newInstance<TextureGarbage>();
+	TextureGarbage* garbage = anki::newInstance<TextureGarbage>(getMemoryPool());
 
 
 	for(MicroImageView& it : m_viewsMap)
 	for(MicroImageView& it : m_viewsMap)
 	{
 	{
-		garbage->m_viewHandles.emplaceBack(getAllocator(), it.m_handle);
+		garbage->m_viewHandles.emplaceBack(getMemoryPool(), it.m_handle);
 		it.m_handle = VK_NULL_HANDLE;
 		it.m_handle = VK_NULL_HANDLE;
 
 
 		if(it.m_bindlessIndex != kMaxU32)
 		if(it.m_bindlessIndex != kMaxU32)
 		{
 		{
-			garbage->m_bindlessIndices.emplaceBack(getAllocator(), it.m_bindlessIndex);
+			garbage->m_bindlessIndices.emplaceBack(getMemoryPool(), it.m_bindlessIndex);
 			it.m_bindlessIndex = kMaxU32;
 			it.m_bindlessIndex = kMaxU32;
 		}
 		}
 	}
 	}
 
 
-	m_viewsMap.destroy(getAllocator());
+	m_viewsMap.destroy(getMemoryPool());
 
 
 	if(m_singleSurfaceImageView.m_handle != VK_NULL_HANDLE)
 	if(m_singleSurfaceImageView.m_handle != VK_NULL_HANDLE)
 	{
 	{
-		garbage->m_viewHandles.emplaceBack(getAllocator(), m_singleSurfaceImageView.m_handle);
+		garbage->m_viewHandles.emplaceBack(getMemoryPool(), m_singleSurfaceImageView.m_handle);
 		m_singleSurfaceImageView.m_handle = VK_NULL_HANDLE;
 		m_singleSurfaceImageView.m_handle = VK_NULL_HANDLE;
 
 
 		if(m_singleSurfaceImageView.m_bindlessIndex != kMaxU32)
 		if(m_singleSurfaceImageView.m_bindlessIndex != kMaxU32)
 		{
 		{
-			garbage->m_bindlessIndices.emplaceBack(getAllocator(), m_singleSurfaceImageView.m_bindlessIndex);
+			garbage->m_bindlessIndices.emplaceBack(getMemoryPool(), m_singleSurfaceImageView.m_bindlessIndex);
 			m_singleSurfaceImageView.m_bindlessIndex = kMaxU32;
 			m_singleSurfaceImageView.m_bindlessIndex = kMaxU32;
 		}
 		}
 	}
 	}
@@ -592,7 +592,7 @@ const MicroImageView& TextureImpl::getOrCreateView(const TextureSubresourceInfo&
 	ANKI_VK_CHECKF(vkCreateImageView(getDevice(), &viewCi, nullptr, &handle));
 	ANKI_VK_CHECKF(vkCreateImageView(getDevice(), &viewCi, nullptr, &handle));
 	getGrManagerImpl().trySetVulkanHandleName(getName(), VK_OBJECT_TYPE_IMAGE_VIEW, ptrToNumber(handle));
 	getGrManagerImpl().trySetVulkanHandleName(getName(), VK_OBJECT_TYPE_IMAGE_VIEW, ptrToNumber(handle));
 
 
-	it = m_viewsMap.emplace(getAllocator(), subresource);
+	it = m_viewsMap.emplace(getMemoryPool(), subresource);
 	it->m_handle = handle;
 	it->m_handle = handle;
 	it->m_derivedTextureType = viewTexType;
 	it->m_derivedTextureType = viewTexType;
 
 

+ 2 - 2
AnKi/Gr/Vulkan/TextureView.cpp

@@ -11,11 +11,11 @@ namespace anki {
 
 
 TextureView* TextureView::newInstance(GrManager* manager, const TextureViewInitInfo& init)
 TextureView* TextureView::newInstance(GrManager* manager, const TextureViewInitInfo& init)
 {
 {
-	TextureViewImpl* impl = manager->getAllocator().newInstance<TextureViewImpl>(manager, init.getName());
+	TextureViewImpl* impl = anki::newInstance<TextureViewImpl>(manager->getMemoryPool(), manager, init.getName());
 	const Error err = impl->init(init);
 	const Error err = impl->init(init);
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 2 - 2
AnKi/Gr/Vulkan/TimestampQuery.cpp

@@ -11,11 +11,11 @@ namespace anki {
 
 
 TimestampQuery* TimestampQuery::newInstance(GrManager* manager)
 TimestampQuery* TimestampQuery::newInstance(GrManager* manager)
 {
 {
-	TimestampQueryImpl* impl = manager->getAllocator().newInstance<TimestampQueryImpl>(manager, "N/A");
+	TimestampQueryImpl* impl = anki::newInstance<TimestampQueryImpl>(manager->getMemoryPool(), manager, "N/A");
 	const Error err = impl->init();
 	const Error err = impl->init();
 	if(err)
 	if(err)
 	{
 	{
-		manager->getAllocator().deleteInstance(impl);
+		deleteInstance(manager->getMemoryPool(), impl);
 		impl = nullptr;
 		impl = nullptr;
 	}
 	}
 	return impl;
 	return impl;

+ 2 - 2
AnKi/Util/ClassAllocatorBuilder.h

@@ -56,7 +56,7 @@ public:
 	ClassAllocatorBuilder& operator=(const ClassAllocatorBuilder&) = delete; // Non-copyable
 	ClassAllocatorBuilder& operator=(const ClassAllocatorBuilder&) = delete; // Non-copyable
 
 
 	/// Initialize it. Feel free to feedle with the TInterface before you do that.
 	/// Initialize it. Feel free to feedle with the TInterface before you do that.
-	void init(GenericMemoryPoolAllocator<U8> alloc);
+	void init(BaseMemoryPool* pool);
 
 
 	/// Destroy the allocator builder.
 	/// Destroy the allocator builder.
 	void destroy();
 	void destroy();
@@ -110,7 +110,7 @@ private:
 		mutable TLock m_mtx;
 		mutable TLock m_mtx;
 	};
 	};
 
 
-	GenericMemoryPoolAllocator<U8> m_alloc;
+	BaseMemoryPool* m_pool = nullptr;
 
 
 	/// The interface as decribed in the class docs.
 	/// The interface as decribed in the class docs.
 	TInterface m_interface;
 	TInterface m_interface;

+ 5 - 4
AnKi/Util/ClassAllocatorBuilder.inl.h

@@ -8,11 +8,12 @@
 namespace anki {
 namespace anki {
 
 
 template<typename TChunk, typename TInterface, typename TLock>
 template<typename TChunk, typename TInterface, typename TLock>
-void ClassAllocatorBuilder<TChunk, TInterface, TLock>::init(GenericMemoryPoolAllocator<U8> alloc)
+void ClassAllocatorBuilder<TChunk, TInterface, TLock>::init(BaseMemoryPool* pool)
 {
 {
-	m_alloc = std::move(alloc);
+	ANKI_ASSERT(pool);
+	m_pool = pool;
 
 
-	m_classes.create(m_alloc, m_interface.getClassCount());
+	m_classes.create(*m_pool, m_interface.getClassCount());
 
 
 	for(U32 classIdx = 0; classIdx < m_classes.getSize(); ++classIdx)
 	for(U32 classIdx = 0; classIdx < m_classes.getSize(); ++classIdx)
 	{
 	{
@@ -32,7 +33,7 @@ void ClassAllocatorBuilder<TChunk, TInterface, TLock>::destroy()
 		ANKI_ASSERT(c.m_chunkList.isEmpty() && "Forgot to deallocate");
 		ANKI_ASSERT(c.m_chunkList.isEmpty() && "Forgot to deallocate");
 	}
 	}
 
 
-	m_classes.destroy(m_alloc);
+	m_classes.destroy(*m_pool);
 }
 }
 
 
 template<typename TChunk, typename TInterface, typename TLock>
 template<typename TChunk, typename TInterface, typename TLock>