Skip to content

Commit

Permalink
track peak allocation
Browse files Browse the repository at this point in the history
  • Loading branch information
PSeitz committed Apr 4, 2023
1 parent be31f74 commit d925d3c
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 24 deletions.
4 changes: 1 addition & 3 deletions rustfmt.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,9 @@ where_single_line = true
format_strings = true
imports_indent = "Block"
imports_layout = "HorizontalVertical"
merge_imports = true
imports_granularity="Crate"
match_block_trailing_comma = true
reorder_impl_items = true
report_todo = "Always"
report_fixme = "Always"
use_field_init_shorthand = true
use_try_shorthand = true
unstable_features = true
Expand Down
65 changes: 44 additions & 21 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ pub struct StatsAlloc<T: GlobalAlloc> {
bytes_allocated: AtomicUsize,
bytes_deallocated: AtomicUsize,
bytes_reallocated: AtomicIsize,
peak_bytes_allocated_tracker: AtomicIsize,
peak_bytes_allocated: AtomicUsize,
inner: T,
}

Expand Down Expand Up @@ -87,6 +89,10 @@ pub struct Stats {
/// positive value indicates that resizable structures are growing, while
/// a negative value indicates that such structures are shrinking.
pub bytes_reallocated: isize,
/// Internally tracks sum of allocations and deallocations and keeps the
/// peak.
/// Call [Region::reset_peak_memory] to reset the peak counter.
pub peak_bytes_allocated: usize,
}

/// An instrumented instance of the system allocator.
Expand All @@ -97,6 +103,8 @@ pub static INSTRUMENTED_SYSTEM: StatsAlloc<System> = StatsAlloc {
bytes_allocated: AtomicUsize::new(0),
bytes_deallocated: AtomicUsize::new(0),
bytes_reallocated: AtomicIsize::new(0),
peak_bytes_allocated_tracker: AtomicIsize::new(0),
peak_bytes_allocated: AtomicUsize::new(0),
inner: System,
};

Expand All @@ -110,6 +118,8 @@ impl StatsAlloc<System> {
bytes_allocated: AtomicUsize::new(0),
bytes_deallocated: AtomicUsize::new(0),
bytes_reallocated: AtomicIsize::new(0),
peak_bytes_allocated_tracker: AtomicIsize::new(0),
peak_bytes_allocated: AtomicUsize::new(0),
inner: System,
}
}
Expand All @@ -118,7 +128,6 @@ impl StatsAlloc<System> {
impl<T: GlobalAlloc> StatsAlloc<T> {
/// Provides access to an instrumented instance of the given global
/// allocator.
#[cfg(feature = "nightly")]
pub const fn new(inner: T) -> Self {
StatsAlloc {
allocations: AtomicUsize::new(0),
Expand All @@ -127,21 +136,8 @@ impl<T: GlobalAlloc> StatsAlloc<T> {
bytes_allocated: AtomicUsize::new(0),
bytes_deallocated: AtomicUsize::new(0),
bytes_reallocated: AtomicIsize::new(0),
inner,
}
}

/// Provides access to an instrumented instance of the given global
/// allocator.
#[cfg(not(feature = "nightly"))]
pub fn new(inner: T) -> Self {
StatsAlloc {
allocations: AtomicUsize::new(0),
deallocations: AtomicUsize::new(0),
reallocations: AtomicUsize::new(0),
bytes_allocated: AtomicUsize::new(0),
bytes_deallocated: AtomicUsize::new(0),
bytes_reallocated: AtomicIsize::new(0),
peak_bytes_allocated_tracker: AtomicIsize::new(0),
peak_bytes_allocated: AtomicUsize::new(0),
inner,
}
}
Expand All @@ -155,8 +151,29 @@ impl<T: GlobalAlloc> StatsAlloc<T> {
bytes_allocated: self.bytes_allocated.load(Ordering::SeqCst),
bytes_deallocated: self.bytes_deallocated.load(Ordering::SeqCst),
bytes_reallocated: self.bytes_reallocated.load(Ordering::SeqCst),
peak_bytes_allocated: self.peak_bytes_allocated.load(Ordering::SeqCst),
}
}

fn reset_peak_memory(&self) {
self.peak_bytes_allocated.store(0, Ordering::SeqCst);
self.peak_bytes_allocated_tracker.store(0, Ordering::SeqCst);
}

fn track_alloc(&self, bytes: usize) {
self.bytes_allocated.fetch_add(bytes, Ordering::SeqCst);
let prev = self
.peak_bytes_allocated_tracker
.fetch_add(bytes as isize, Ordering::SeqCst);
let current_peak = (prev + bytes as isize).max(0) as usize;
self.peak_bytes_allocated.fetch_max(current_peak, Ordering::SeqCst);
}

fn track_dealloc(&self, bytes: usize) {
self.bytes_deallocated.fetch_add(bytes, Ordering::SeqCst);
self.peak_bytes_allocated_tracker
.fetch_sub(bytes as isize, Ordering::SeqCst);
}
}

impl ops::Sub for Stats {
Expand Down Expand Up @@ -228,6 +245,12 @@ impl<'a, T: GlobalAlloc + 'a> Region<'a, T> {
pub fn reset(&mut self) {
self.initial_stats = self.alloc.stats();
}

/// Resets the peak memory tracker to zero
#[inline]
pub fn reset_peak_memory(&mut self) {
self.alloc.reset_peak_memory();
}
}

unsafe impl<'a, T: GlobalAlloc + 'a> GlobalAlloc for &'a StatsAlloc<T> {
Expand All @@ -251,30 +274,30 @@ unsafe impl<'a, T: GlobalAlloc + 'a> GlobalAlloc for &'a StatsAlloc<T> {
unsafe impl<T: GlobalAlloc> GlobalAlloc for StatsAlloc<T> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.allocations.fetch_add(1, Ordering::SeqCst);
self.bytes_allocated.fetch_add(layout.size(), Ordering::SeqCst);
self.track_alloc(layout.size());
self.inner.alloc(layout)
}

unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
self.deallocations.fetch_add(1, Ordering::SeqCst);
self.bytes_deallocated.fetch_add(layout.size(), Ordering::SeqCst);
self.track_dealloc(layout.size());
self.inner.dealloc(ptr, layout)
}

unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
self.allocations.fetch_add(1, Ordering::SeqCst);
self.bytes_allocated.fetch_add(layout.size(), Ordering::SeqCst);
self.track_alloc(layout.size());
self.inner.alloc_zeroed(layout)
}

unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
self.reallocations.fetch_add(1, Ordering::SeqCst);
if new_size > layout.size() {
let difference = new_size - layout.size();
self.bytes_allocated.fetch_add(difference, Ordering::SeqCst);
self.track_alloc(difference);
} else if new_size < layout.size() {
let difference = layout.size() - new_size;
self.bytes_deallocated.fetch_add(difference, Ordering::SeqCst);
self.track_dealloc(difference);
}
self.bytes_reallocated
.fetch_add(new_size.wrapping_sub(layout.size()) as isize, Ordering::SeqCst);
Expand Down

0 comments on commit d925d3c

Please sign in to comment.