Explorar el Código

gobj: Don't pad ShaderBuffer to 16 byte-breaks runtime array length

rdb hace 1 año
padre
commit
fdfa3fad5a

+ 3 - 1
panda/src/glstuff/glGraphicsStateGuardian_src.cxx

@@ -7531,7 +7531,9 @@ prepare_shader_buffer(ShaderBuffer *data) {
     }
 
     // Some drivers require the buffer to be padded to 16 byte boundary.
-    uint64_t num_bytes = (data->get_data_size_bytes() + 15u) & ~15u;
+    //XXX rdb: actually, this breaks runtime-sized arrays.
+    //uint64_t num_bytes = (data->get_data_size_bytes() + 15u) & ~15u;
+    uint64_t num_bytes = data->get_data_size_bytes();
     if (_supports_buffer_storage) {
       _glBufferStorage(GL_SHADER_STORAGE_BUFFER, num_bytes, data->get_initial_data(), 0);
     } else {

+ 0 - 6
panda/src/gobj/shaderBuffer.I

@@ -32,12 +32,6 @@ ShaderBuffer(const std::string &name, vector_uchar initial_data, UsageHint usage
   _data_size_bytes(initial_data.size()),
   _usage_hint(usage_hint),
   _initial_data(std::move(initial_data)) {
-
-  // Make sure it is padded to 16 bytes.  Some drivers like that.
-  if ((_initial_data.size() & 15u) != 0) {
-    _initial_data.resize((_initial_data.size() + 15u) & ~15u, 0);
-    _data_size_bytes = _initial_data.size();
-  }
 }
 
 /**