BufferImpl.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137
  1. // Copyright (C) 2009-2021, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #pragma once
  6. #include <AnKi/Gr/Buffer.h>
  7. #include <AnKi/Gr/Vulkan/VulkanObject.h>
  8. #include <AnKi/Gr/Vulkan/GpuMemoryManager.h>
  9. namespace anki {
  10. /// @addtogroup vulkan
  11. /// @{
  12. /// Buffer implementation
  13. class BufferImpl final : public Buffer, public VulkanObject<Buffer, BufferImpl>
  14. {
  15. public:
  16. BufferImpl(GrManager* manager, CString name)
  17. : Buffer(manager, name)
  18. , m_needsFlush(false)
  19. , m_needsInvalidate(false)
  20. {
  21. }
  22. ~BufferImpl();
  23. ANKI_USE_RESULT Error init(const BufferInitInfo& inf);
  24. ANKI_USE_RESULT void* map(PtrSize offset, PtrSize range, BufferMapAccessBit access);
  25. void unmap()
  26. {
  27. ANKI_ASSERT(isCreated());
  28. ANKI_ASSERT(m_mapped);
  29. #if ANKI_EXTRA_CHECKS
  30. m_mapped = false;
  31. #endif
  32. }
  33. VkBuffer getHandle() const
  34. {
  35. ANKI_ASSERT(isCreated());
  36. return m_handle;
  37. }
  38. Bool usageValid(BufferUsageBit usage) const
  39. {
  40. return (m_usage & usage) == usage;
  41. }
  42. PtrSize getActualSize() const
  43. {
  44. ANKI_ASSERT(m_actualSize > 0);
  45. return m_actualSize;
  46. }
  47. void computeBarrierInfo(BufferUsageBit before, BufferUsageBit after, VkPipelineStageFlags& srcStages,
  48. VkAccessFlags& srcAccesses, VkPipelineStageFlags& dstStages,
  49. VkAccessFlags& dstAccesses) const;
  50. ANKI_FORCE_INLINE void flush(PtrSize offset, PtrSize range) const
  51. {
  52. ANKI_ASSERT(!!(m_access & BufferMapAccessBit::WRITE) && "No need to flush when the CPU doesn't write");
  53. if(m_needsFlush)
  54. {
  55. VkMappedMemoryRange vkrange = setVkMappedMemoryRange(offset, range);
  56. ANKI_VK_CHECKF(vkFlushMappedMemoryRanges(getDevice(), 1, &vkrange));
  57. #if ANKI_EXTRA_CHECKS
  58. m_flushCount.fetchAdd(1);
  59. #endif
  60. }
  61. }
  62. ANKI_FORCE_INLINE void invalidate(PtrSize offset, PtrSize range) const
  63. {
  64. ANKI_ASSERT(!!(m_access & BufferMapAccessBit::READ) && "No need to invalidate when the CPU doesn't read");
  65. if(m_needsInvalidate)
  66. {
  67. VkMappedMemoryRange vkrange = setVkMappedMemoryRange(offset, range);
  68. ANKI_VK_CHECKF(vkInvalidateMappedMemoryRanges(getDevice(), 1, &vkrange));
  69. #if ANKI_EXTRA_CHECKS
  70. m_invalidateCount.fetchAdd(1);
  71. #endif
  72. }
  73. }
  74. private:
  75. VkBuffer m_handle = VK_NULL_HANDLE;
  76. GpuMemoryHandle m_memHandle;
  77. VkMemoryPropertyFlags m_memoryFlags = 0;
  78. PtrSize m_actualSize = 0;
  79. PtrSize m_mappedMemoryRangeAlignment = 0; ///< Cache this value.
  80. Bool m_needsFlush : 1;
  81. Bool m_needsInvalidate : 1;
  82. #if ANKI_EXTRA_CHECKS
  83. Bool m_mapped = false;
  84. mutable Atomic<U32> m_flushCount = {0};
  85. mutable Atomic<U32> m_invalidateCount = {0};
  86. #endif
  87. Bool isCreated() const
  88. {
  89. return m_handle != VK_NULL_HANDLE;
  90. }
  91. static VkPipelineStageFlags computePplineStage(BufferUsageBit usage);
  92. static VkAccessFlags computeAccessMask(BufferUsageBit usage);
  93. ANKI_FORCE_INLINE VkMappedMemoryRange setVkMappedMemoryRange(PtrSize offset, PtrSize range) const
  94. {
  95. // First the offset
  96. ANKI_ASSERT(offset < m_size);
  97. offset += m_memHandle.m_offset; // Move from buffer offset to memory offset
  98. alignRoundDown(m_mappedMemoryRangeAlignment, offset);
  99. // And the range
  100. range = (range == MAX_PTR_SIZE) ? m_actualSize : range;
  101. alignRoundUp(m_mappedMemoryRangeAlignment, range);
  102. ANKI_ASSERT(offset + range <= m_memHandle.m_offset + m_actualSize);
  103. VkMappedMemoryRange vkrange = {};
  104. vkrange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
  105. vkrange.memory = m_memHandle.m_memory;
  106. vkrange.offset = offset;
  107. vkrange.size = range;
  108. return vkrange;
  109. }
  110. };
  111. /// @}
  112. } // end namespace anki