Browse Source

Fix memory leak caused by awful realloc usage on Linux

gingerBill 2 years ago
parent
commit
a95b064d6d
3 changed files with 32 additions and 13 deletions
  1. 26 11
      src/common_memory.cpp
  2. 3 1
      src/ptr_map.cpp
  3. 3 1
      src/string_map.cpp

+ 26 - 11
src/common_memory.cpp

@@ -1,3 +1,4 @@
+#include <malloc.h>
 
 
 gb_internal gb_inline void zero_size(void *ptr, isize len) {
 gb_internal gb_inline void zero_size(void *ptr, isize len) {
 	memset(ptr, 0, len);
 	memset(ptr, 0, len);
@@ -121,7 +122,6 @@ struct PlatformMemoryBlock {
 	PlatformMemoryBlock *prev, *next;
 	PlatformMemoryBlock *prev, *next;
 };
 };
 
 
-
 gb_global std::atomic<isize> global_platform_memory_total_usage;
 gb_global std::atomic<isize> global_platform_memory_total_usage;
 gb_global PlatformMemoryBlock global_platform_memory_block_sentinel;
 gb_global PlatformMemoryBlock global_platform_memory_block_sentinel;
 
 
@@ -177,12 +177,12 @@ gb_internal void platform_virtual_memory_protect(void *memory, isize size);
 			gb_printf_err("Total Usage: %lld bytes\n", cast(long long)global_platform_memory_total_usage);
 			gb_printf_err("Total Usage: %lld bytes\n", cast(long long)global_platform_memory_total_usage);
 			GB_ASSERT_MSG(pmblock != nullptr, "Out of Virtual Memory, oh no...");
 			GB_ASSERT_MSG(pmblock != nullptr, "Out of Virtual Memory, oh no...");
 		}
 		}
-		global_platform_memory_total_usage += total_size;
+		global_platform_memory_total_usage.fetch_add(total_size);
 		return pmblock;
 		return pmblock;
 	}
 	}
 	gb_internal void platform_virtual_memory_free(PlatformMemoryBlock *block) {
 	gb_internal void platform_virtual_memory_free(PlatformMemoryBlock *block) {
 		isize size = block->total_size;
 		isize size = block->total_size;
-		global_platform_memory_total_usage -= size;
+		global_platform_memory_total_usage.fetch_sub(size);
 		munmap(block, size);
 		munmap(block, size);
 	}
 	}
 	gb_internal void platform_virtual_memory_protect(void *memory, isize size) {
 	gb_internal void platform_virtual_memory_protect(void *memory, isize size) {
@@ -396,6 +396,8 @@ gb_internal gbAllocator heap_allocator(void) {
 	return a;
 	return a;
 }
 }
 
 
+gb_internal std::atomic<isize> total_heap_memory_allocated;
+
 
 
 gb_internal GB_ALLOCATOR_PROC(heap_allocator_proc) {
 gb_internal GB_ALLOCATOR_PROC(heap_allocator_proc) {
 	void *ptr = nullptr;
 	void *ptr = nullptr;
@@ -403,7 +405,6 @@ gb_internal GB_ALLOCATOR_PROC(heap_allocator_proc) {
 	gb_unused(old_size);
 	gb_unused(old_size);
 
 
 
 
-
 // TODO(bill): Throughly test!
 // TODO(bill): Throughly test!
 	switch (type) {
 	switch (type) {
 #if defined(GB_COMPILER_MSVC)
 #if defined(GB_COMPILER_MSVC)
@@ -436,28 +437,34 @@ gb_internal GB_ALLOCATOR_PROC(heap_allocator_proc) {
 #elif defined(GB_SYSTEM_LINUX)
 #elif defined(GB_SYSTEM_LINUX)
 	// TODO(bill): *nix version that's decent
 	// TODO(bill): *nix version that's decent
 	case gbAllocation_Alloc: {
 	case gbAllocation_Alloc: {
-		ptr = aligned_alloc(alignment, (size + alignment - 1) & ~(alignment - 1));
+		isize total_size = (size + alignment - 1) & ~(alignment - 1);
+		total_heap_memory_allocated.fetch_add(total_size);
+		ptr = aligned_alloc(alignment, total_size);
 		gb_zero_size(ptr, size);
 		gb_zero_size(ptr, size);
 	} break;
 	} break;
 
 
 	case gbAllocation_Free:
 	case gbAllocation_Free:
 		if (old_memory != nullptr) {
 		if (old_memory != nullptr) {
+			total_heap_memory_allocated.fetch_sub(malloc_usable_size(old_memory));
 			free(old_memory);
 			free(old_memory);
 		}
 		}
 		break;
 		break;
 
 
-	case gbAllocation_Resize:
+	case gbAllocation_Resize: {
 		if (size == 0) {
 		if (size == 0) {
 			if (old_memory != nullptr) {
 			if (old_memory != nullptr) {
+				total_heap_memory_allocated.fetch_sub(malloc_usable_size(old_memory));
 				free(old_memory);
 				free(old_memory);
 			}
 			}
 			break;
 			break;
 		}
 		}
-		
+
 		alignment = gb_max(alignment, gb_align_of(max_align_t));
 		alignment = gb_max(alignment, gb_align_of(max_align_t));
-		
+
 		if (old_memory == nullptr) {
 		if (old_memory == nullptr) {
-			ptr = aligned_alloc(alignment, (size + alignment - 1) & ~(alignment - 1));
+			isize total_size = (size + alignment - 1) & ~(alignment - 1);
+			total_heap_memory_allocated.fetch_add(total_size);
+			ptr = aligned_alloc(alignment, total_size);
 			gb_zero_size(ptr, size);
 			gb_zero_size(ptr, size);
 			break;
 			break;
 		}
 		}
@@ -466,11 +473,19 @@ gb_internal GB_ALLOCATOR_PROC(heap_allocator_proc) {
 			break;
 			break;
 		}
 		}
 
 
-		ptr = aligned_alloc(alignment, (size + alignment - 1) & ~(alignment - 1));
+		size_t actual_old_size = malloc_usable_size(old_memory);
+		if (size <= actual_old_size) {
+			ptr = old_memory;
+			break;
+		}
+
+		isize total_size = (size + alignment - 1) & ~(alignment - 1);
+		total_heap_memory_allocated.fetch_add(total_size);
+		ptr = aligned_alloc(alignment, total_size);
 		gb_memmove(ptr, old_memory, old_size);
 		gb_memmove(ptr, old_memory, old_size);
 		free(old_memory);
 		free(old_memory);
 		gb_zero_size(cast(u8 *)ptr + old_size, gb_max(size-old_size, 0));
 		gb_zero_size(cast(u8 *)ptr + old_size, gb_max(size-old_size, 0));
-		break;
+	} break;
 #else
 #else
 	// TODO(bill): *nix version that's decent
 	// TODO(bill): *nix version that's decent
 	case gbAllocation_Alloc: {
 	case gbAllocation_Alloc: {

+ 3 - 1
src/ptr_map.cpp

@@ -114,7 +114,9 @@ gb_internal MapIndex map__add_entry(PtrMap<K, V> *h, K key) {
 	PtrMapEntry<K, V> e = {};
 	PtrMapEntry<K, V> e = {};
 	e.key = key;
 	e.key = key;
 	e.next = MAP_SENTINEL;
 	e.next = MAP_SENTINEL;
-	map__reserve_entries(h, h->count+1);
+	if (h->count+1 >= h->entries_capacity) {
+		map__reserve_entries(h, gb_max(h->entries_capacity*2, 4));
+	}
 	h->entries[h->count++] = e;
 	h->entries[h->count++] = e;
 	return cast(MapIndex)(h->count-1);
 	return cast(MapIndex)(h->count-1);
 }
 }

+ 3 - 1
src/string_map.cpp

@@ -96,7 +96,9 @@ gb_internal MapIndex string_map__add_entry(StringMap<T> *h, u32 hash, String con
 	e.key = key;
 	e.key = key;
 	e.hash = hash;
 	e.hash = hash;
 	e.next = MAP_SENTINEL;
 	e.next = MAP_SENTINEL;
-	string_map__reserve_entries(h, h->count+1);
+	if (h->count+1 >= h->entries_capacity) {
+		string_map__reserve_entries(h, gb_max(h->entries_capacity*2, 4));
+	}
 	h->entries[h->count++] = e;
 	h->entries[h->count++] = e;
 	return cast(MapIndex)(h->count-1);
 	return cast(MapIndex)(h->count-1);
 }
 }