tracking_allocator.odin 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167
  1. //+build !freestanding
  2. package mem
  3. import "base:runtime"
  4. import "core:sync"
  5. Tracking_Allocator_Entry :: struct {
  6. memory: rawptr,
  7. size: int,
  8. alignment: int,
  9. mode: Allocator_Mode,
  10. err: Allocator_Error,
  11. location: runtime.Source_Code_Location,
  12. }
  13. Tracking_Allocator_Bad_Free_Entry :: struct {
  14. memory: rawptr,
  15. location: runtime.Source_Code_Location,
  16. }
  17. Tracking_Allocator :: struct {
  18. backing: Allocator,
  19. allocation_map: map[rawptr]Tracking_Allocator_Entry,
  20. bad_free_array: [dynamic]Tracking_Allocator_Bad_Free_Entry,
  21. mutex: sync.Mutex,
  22. clear_on_free_all: bool,
  23. total_memory_allocated: i64,
  24. total_allocation_count: i64,
  25. total_memory_freed: i64,
  26. total_free_count: i64,
  27. peak_memory_allocated: i64,
  28. current_memory_allocated: i64,
  29. }
  30. tracking_allocator_init :: proc(t: ^Tracking_Allocator, backing_allocator: Allocator, internals_allocator := context.allocator) {
  31. t.backing = backing_allocator
  32. t.allocation_map.allocator = internals_allocator
  33. t.bad_free_array.allocator = internals_allocator
  34. if .Free_All in query_features(t.backing) {
  35. t.clear_on_free_all = true
  36. }
  37. }
  38. tracking_allocator_destroy :: proc(t: ^Tracking_Allocator) {
  39. delete(t.allocation_map)
  40. delete(t.bad_free_array)
  41. }
  42. tracking_allocator_clear :: proc(t: ^Tracking_Allocator) {
  43. sync.mutex_lock(&t.mutex)
  44. clear(&t.allocation_map)
  45. clear(&t.bad_free_array)
  46. t.current_memory_allocated = 0
  47. sync.mutex_unlock(&t.mutex)
  48. }
  49. @(require_results)
  50. tracking_allocator :: proc(data: ^Tracking_Allocator) -> Allocator {
  51. return Allocator{
  52. data = data,
  53. procedure = tracking_allocator_proc,
  54. }
  55. }
  56. tracking_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
  57. size, alignment: int,
  58. old_memory: rawptr, old_size: int, loc := #caller_location) -> (result: []byte, err: Allocator_Error) {
  59. track_alloc :: proc(data: ^Tracking_Allocator, entry: ^Tracking_Allocator_Entry) {
  60. data.total_memory_allocated += i64(entry.size)
  61. data.total_allocation_count += 1
  62. data.current_memory_allocated += i64(entry.size)
  63. if data.current_memory_allocated > data.peak_memory_allocated {
  64. data.peak_memory_allocated = data.current_memory_allocated
  65. }
  66. }
  67. track_free :: proc(data: ^Tracking_Allocator, entry: ^Tracking_Allocator_Entry) {
  68. data.total_memory_freed += i64(entry.size)
  69. data.total_free_count += 1
  70. data.current_memory_allocated -= i64(entry.size)
  71. }
  72. data := (^Tracking_Allocator)(allocator_data)
  73. sync.mutex_guard(&data.mutex)
  74. if mode == .Query_Info {
  75. info := (^Allocator_Query_Info)(old_memory)
  76. if info != nil && info.pointer != nil {
  77. if entry, ok := data.allocation_map[info.pointer]; ok {
  78. info.size = entry.size
  79. info.alignment = entry.alignment
  80. }
  81. info.pointer = nil
  82. }
  83. return
  84. }
  85. if mode == .Free && old_memory != nil && old_memory not_in data.allocation_map {
  86. append(&data.bad_free_array, Tracking_Allocator_Bad_Free_Entry{
  87. memory = old_memory,
  88. location = loc,
  89. })
  90. } else {
  91. result = data.backing.procedure(data.backing.data, mode, size, alignment, old_memory, old_size, loc) or_return
  92. }
  93. result_ptr := raw_data(result)
  94. if data.allocation_map.allocator.procedure == nil {
  95. data.allocation_map.allocator = context.allocator
  96. }
  97. switch mode {
  98. case .Alloc, .Alloc_Non_Zeroed:
  99. data.allocation_map[result_ptr] = Tracking_Allocator_Entry{
  100. memory = result_ptr,
  101. size = size,
  102. mode = mode,
  103. alignment = alignment,
  104. err = err,
  105. location = loc,
  106. }
  107. track_alloc(data, &data.allocation_map[result_ptr])
  108. case .Free:
  109. if old_memory != nil && old_memory in data.allocation_map {
  110. track_free(data, &data.allocation_map[old_memory])
  111. }
  112. delete_key(&data.allocation_map, old_memory)
  113. case .Free_All:
  114. if data.clear_on_free_all {
  115. clear_map(&data.allocation_map)
  116. data.current_memory_allocated = 0
  117. }
  118. case .Resize, .Resize_Non_Zeroed:
  119. if old_memory != nil && old_memory in data.allocation_map {
  120. track_free(data, &data.allocation_map[old_memory])
  121. }
  122. if old_memory != result_ptr {
  123. delete_key(&data.allocation_map, old_memory)
  124. }
  125. data.allocation_map[result_ptr] = Tracking_Allocator_Entry{
  126. memory = result_ptr,
  127. size = size,
  128. mode = mode,
  129. alignment = alignment,
  130. err = err,
  131. location = loc,
  132. }
  133. track_alloc(data, &data.allocation_map[result_ptr])
  134. case .Query_Features:
  135. set := (^Allocator_Mode_Set)(old_memory)
  136. if set != nil {
  137. set^ = {.Alloc, .Alloc_Non_Zeroed, .Free, .Free_All, .Resize, .Query_Features, .Query_Info}
  138. }
  139. return nil, nil
  140. case .Query_Info:
  141. unreachable()
  142. }
  143. return
  144. }