From f84afe19d0e298b02858acbb7545cacbdfd2ed8c Mon Sep 17 00:00:00 2001 From: Aaron Green Date: Mon, 18 Nov 2024 16:59:25 +0000 Subject: [PATCH] pw_allocator: Parameterize BlockAllocator on blocks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This CL switches the template parameter type for BlockAllocator from an offset type and poison interval to just a block type. The poison interval is now controlled by the module configuration. A type alias for the legacy type is provided. Change-Id: Id0add6254caf56dfb9a65b7f0bc9ed44249c72a4 Reviewed-on: https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/234812 Lint: Lint 🤖 Pigweed-Auto-Submit: Aaron Green Commit-Queue: Aaron Green Reviewed-by: Taylor Cramer --- pw_allocator/BUILD.bazel | 19 +- pw_allocator/BUILD.gn | 20 +- pw_allocator/CMakeLists.txt | 16 +- pw_allocator/api.rst | 2 +- pw_allocator/best_fit_block_allocator_test.cc | 2 + pw_allocator/block/CMakeLists.txt | 1 + .../public/pw_allocator/block/contiguous.h | 4 +- pw_allocator/block_allocator_testing.cc | 34 ++-- pw_allocator/bucket.cc | 4 +- pw_allocator/bucket_allocator_test.cc | 82 +++++++- .../dual_first_fit_block_allocator_test.cc | 4 + pw_allocator/examples/block_allocator.cc | 3 +- pw_allocator/examples/size_report.cc | 2 +- .../first_fit_block_allocator_test.cc | 104 +--------- pw_allocator/last_fit_block_allocator_test.cc | 2 + .../pw_allocator/best_fit_block_allocator.h | 28 +-- .../public/pw_allocator/block_allocator.h | 181 ++++++++++-------- .../pw_allocator/block_allocator_testing.h | 67 ++++++- pw_allocator/public/pw_allocator/bucket.h | 4 +- .../public/pw_allocator/bucket_allocator.h | 68 +++---- .../pw_allocator/bucket_block_allocator.h | 5 +- .../public/pw_allocator/buddy_allocator.h | 2 +- .../dual_first_fit_block_allocator.h | 33 ++-- .../pw_allocator/first_fit_block_allocator.h | 28 +-- .../public/pw_allocator/freelist_heap.h | 7 +- .../pw_allocator/last_fit_block_allocator.h | 29 +-- pw_allocator/public/pw_allocator/testing.h | 7 +- .../pw_allocator/worst_fit_block_allocator.h | 28 +-- .../size_report/best_fit_block_allocator.cc | 2 +- pw_allocator/size_report/bucket_allocator.cc | 4 +- .../dual_first_fit_block_allocator.cc | 4 +- .../size_report/fallback_allocator.cc | 4 +- .../size_report/fallback_allocator_base.cc | 4 +- .../size_report/first_fit_block_allocator.cc | 2 +- .../size_report/last_fit_block_allocator.cc | 2 +- pw_allocator/size_report/pmr_allocator.cc | 2 +- .../size_report/pmr_allocator_base.cc | 2 +- .../size_report/synchronized_allocator_isl.cc | 2 +- .../synchronized_allocator_mutex.cc | 2 +- .../tracking_allocator_all_metrics.cc | 2 +- .../tracking_allocator_no_metrics.cc | 2 +- .../size_report/worst_fit_block_allocator.cc | 2 +- pw_allocator/tracking_allocator_test.cc | 2 +- .../worst_fit_block_allocator_test.cc | 2 + pw_malloc/best_fit_block_allocator.cc | 3 +- pw_malloc/bucket_allocator.cc | 4 +- pw_malloc/docs.rst | 2 - pw_malloc/dual_first_fit_block_allocator.cc | 5 +- pw_malloc/first_fit_block_allocator.cc | 3 +- pw_malloc/last_fit_block_allocator.cc | 3 +- pw_malloc/public/pw_malloc/config.h | 24 --- pw_malloc/worst_fit_block_allocator.cc | 4 +- 52 files changed, 483 insertions(+), 391 deletions(-) diff --git a/pw_allocator/BUILD.bazel b/pw_allocator/BUILD.bazel index c5bae893ba..20251370f5 100644 --- a/pw_allocator/BUILD.bazel +++ b/pw_allocator/BUILD.bazel @@ -34,7 +34,10 @@ label_flag( cc_library( name = "test_config", - defines = ["PW_ALLOCATOR_STRICT_VALIDATION=1"], + defines = [ + "PW_ALLOCATOR_STRICT_VALIDATION=1", + "PW_ALLOCATOR_BLOCK_POISON_INTERVAL=4", + ], ) # Libraries @@ -80,6 +83,7 @@ cc_library( deps = [ ":block_allocator", ":config", + "//pw_allocator/block:detailed_block", ], ) @@ -91,7 +95,11 @@ cc_library( deps = [ ":allocator", ":fragmentation", - "//pw_allocator/block:detailed_block", + "//pw_allocator/block:allocatable", + "//pw_allocator/block:basic", + "//pw_allocator/block:iterable", + "//pw_allocator/block:poisonable", + "//pw_allocator/block:with_layout", "//pw_assert", "//pw_bytes:alignment", "//pw_result", @@ -125,6 +133,7 @@ cc_library( deps = [ ":block_allocator", ":bucket", + "//pw_allocator/block:detailed_block", "//pw_status", ], ) @@ -241,6 +250,7 @@ cc_library( deps = [ ":block_allocator", ":config", + "//pw_allocator/block:detailed_block", ], ) @@ -269,6 +279,7 @@ cc_library( deps = [ ":block_allocator", ":config", + "//pw_allocator/block:detailed_block", ], ) @@ -287,7 +298,6 @@ cc_library( strip_include_prefix = "public", deps = [ ":bucket_allocator", - "//pw_allocator/block:detailed_block", "//pw_assert", "//pw_bytes", "//pw_preprocessor", @@ -301,6 +311,7 @@ cc_library( deps = [ ":block_allocator", ":config", + "//pw_allocator/block:detailed_block", ], ) @@ -402,6 +413,7 @@ cc_library( deps = [ ":block_allocator", ":config", + "//pw_allocator/block:detailed_block", ], ) @@ -530,6 +542,7 @@ pw_cc_test( deps = [ ":block_allocator_testing", ":bucket_allocator", + ":bucket_block_allocator", "//pw_unit_test", ], ) diff --git a/pw_allocator/BUILD.gn b/pw_allocator/BUILD.gn index 73b7e3dd2f..269f6b3af9 100644 --- a/pw_allocator/BUILD.gn +++ b/pw_allocator/BUILD.gn @@ -48,7 +48,10 @@ pw_source_set("config") { } config("test_config") { - defines = [ "PW_ALLOCATOR_STRICT_VALIDATION=1" ] + defines = [ + "PW_ALLOCATOR_STRICT_VALIDATION=1", + "PW_ALLOCATOR_BLOCK_POISON_INTERVAL=4", + ] } # Libraries @@ -82,6 +85,7 @@ pw_source_set("best_fit_block_allocator") { public_deps = [ ":block_allocator", ":config", + "block:detailed_block", ] } @@ -92,7 +96,11 @@ pw_source_set("block_allocator") { ":allocator", ":bucket", ":fragmentation", - "block:detailed_block", + "block:allocatable", + "block:basic", + "block:iterable", + "block:poisonable", + "block:with_layout", dir_pw_bytes, dir_pw_result, dir_pw_status, @@ -119,6 +127,7 @@ pw_source_set("bucket_allocator") { public_deps = [ ":block_allocator", ":bucket", + "block:detailed_block", dir_pw_status, ] } @@ -214,6 +223,7 @@ pw_source_set("dual_first_fit_block_allocator") { public_deps = [ ":block_allocator", ":config", + "block:detailed_block", ] } @@ -236,6 +246,7 @@ pw_source_set("first_fit_block_allocator") { public_deps = [ ":block_allocator", ":config", + "block:detailed_block", ] } @@ -250,7 +261,6 @@ pw_source_set("freelist_heap") { public = [ "public/pw_allocator/freelist_heap.h" ] public_deps = [ ":bucket_allocator", - "block:detailed_block", dir_pw_assert, dir_pw_bytes, dir_pw_preprocessor, @@ -263,6 +273,7 @@ pw_source_set("last_fit_block_allocator") { public_deps = [ ":block_allocator", ":config", + "block:detailed_block", ] } @@ -340,6 +351,7 @@ pw_source_set("worst_fit_block_allocator") { public_deps = [ ":block_allocator", ":config", + "block:detailed_block", ] } @@ -373,6 +385,7 @@ pw_source_set("block_allocator_testing") { deps = [ "$dir_pw_bytes:alignment", "$dir_pw_third_party/fuchsia:stdcompat", + "block:detailed_block", dir_pw_assert, dir_pw_status, ] @@ -432,6 +445,7 @@ pw_test("bucket_allocator_test") { deps = [ ":block_allocator_testing", ":bucket_allocator", + ":bucket_block_allocator", ] sources = [ "bucket_allocator_test.cc" ] } diff --git a/pw_allocator/CMakeLists.txt b/pw_allocator/CMakeLists.txt index 81494b1373..c49f01cc91 100644 --- a/pw_allocator/CMakeLists.txt +++ b/pw_allocator/CMakeLists.txt @@ -30,6 +30,7 @@ pw_add_library(pw_allocator.config INTERFACE pw_add_library(pw_allocator.test_config INTERFACE PUBLIC_DEFINES PW_ALLOCATOR_STRICT_VALIDATION=1 + PW_ALLOCATOR_BLOCK_POISON_INTERVAL=4 ) # Libraries @@ -67,6 +68,7 @@ pw_add_library(pw_allocator.best_fit_block_allocator INTERFACE public PUBLIC_DEPS pw_allocator.block_allocator + pw_allocator.block.detailed_block pw_allocator.config ) @@ -84,7 +86,11 @@ pw_add_library(pw_allocator.block_allocator STATIC public PUBLIC_DEPS pw_allocator.allocator - pw_allocator.block.detailed_block + pw_allocator.block.allocatable + pw_allocator.block.basic + pw_allocator.block.iterable + pw_allocator.block.poisonable + pw_allocator.block.with_layout pw_allocator.fragmentation pw_bytes.alignment pw_result @@ -117,6 +123,7 @@ pw_add_library(pw_allocator.bucket_allocator INTERFACE public PUBLIC_DEPS pw_allocator.block_allocator + pw_allocator.block.detailed_block pw_allocator.bucket pw_status ) @@ -214,6 +221,7 @@ pw_add_library(pw_allocator.dual_first_fit_block_allocator INTERFACE public PUBLIC_DEPS pw_allocator.block_allocator + pw_allocator.block.detailed_block pw_allocator.config ) @@ -240,6 +248,7 @@ pw_add_library(pw_allocator.first_fit_block_allocator INTERFACE public PUBLIC_DEPS pw_allocator.block_allocator + pw_allocator.block.detailed_block pw_allocator.config ) @@ -258,7 +267,6 @@ pw_add_library(pw_allocator.freelist_heap INTERFACE PUBLIC_INCLUDES public PUBLIC_DEPS - pw_allocator.block.detailed_block pw_allocator.bucket_allocator pw_assert pw_bytes @@ -272,6 +280,7 @@ pw_add_library(pw_allocator.last_fit_block_allocator INTERFACE public PUBLIC_DEPS pw_allocator.block_allocator + pw_allocator.block.detailed_block pw_allocator.config ) @@ -360,6 +369,7 @@ pw_add_library(pw_allocator.worst_fit_block_allocator INTERFACE public PUBLIC_DEPS pw_allocator.block_allocator + pw_allocator.block.detailed_block pw_allocator.config ) @@ -394,6 +404,7 @@ pw_add_library(pw_allocator.block_allocator_testing STATIC PUBLIC_DEPS pw_allocator.block.testing pw_allocator.block_allocator + pw_allocator.block.detailed_block pw_unit_test PRIVATE_DEPS pw_assert @@ -473,6 +484,7 @@ pw_add_test(pw_allocator.bucket_allocator_test PRIVATE_DEPS pw_allocator.block_allocator_testing pw_allocator.bucket_allocator + pw_allocator.bucket_block_allocator GROUPS modules pw_allocator diff --git a/pw_allocator/api.rst b/pw_allocator/api.rst index 9d6609fd8d..d68ce3c326 100644 --- a/pw_allocator/api.rst +++ b/pw_allocator/api.rst @@ -312,7 +312,7 @@ uses the mix-ins above. Bucket ====== -.. doxygenclass:: pw::allocator::internal::Bucket +.. doxygenclass:: pw::allocator::Bucket :members: .. _module-pw_allocator-api-metrics_adapter: diff --git a/pw_allocator/best_fit_block_allocator_test.cc b/pw_allocator/best_fit_block_allocator_test.cc index abb5385659..64db3ee9ec 100644 --- a/pw_allocator/best_fit_block_allocator_test.cc +++ b/pw_allocator/best_fit_block_allocator_test.cc @@ -119,4 +119,6 @@ TEST_F(BestFitBlockAllocatorTest, CanMeasureFragmentation) { CanMeasureFragmentation(); } +TEST_F(BestFitBlockAllocatorTest, PoisonPeriodically) { PoisonPeriodically(); } + } // namespace diff --git a/pw_allocator/block/CMakeLists.txt b/pw_allocator/block/CMakeLists.txt index 4836271e66..d726827cc2 100644 --- a/pw_allocator/block/CMakeLists.txt +++ b/pw_allocator/block/CMakeLists.txt @@ -45,6 +45,7 @@ pw_add_library(pw_allocator.block.allocatable INTERFACE PUBLIC_INCLUDES public PUBLIC_DEPS + pw_allocator.config pw_allocator.block.contiguous pw_allocator.block.result pw_allocator.deallocator diff --git a/pw_allocator/block/public/pw_allocator/block/contiguous.h b/pw_allocator/block/public/pw_allocator/block/contiguous.h index d933eb9b08..276564ce06 100644 --- a/pw_allocator/block/public/pw_allocator/block/contiguous.h +++ b/pw_allocator/block/public/pw_allocator/block/contiguous.h @@ -86,7 +86,7 @@ class ContiguousBlock : public internal::ContiguousBase { inline Derived* Next() const; protected: - /// Split a block into two smaller blocks and allocates the leading one. + /// Split a block into two smaller blocks. /// /// This method splits a block into a leading block of the given /// `new_inner_size` and a trailing block, and returns the trailing space as a @@ -97,7 +97,7 @@ class ContiguousBlock : public internal::ContiguousBase { /// @pre The space remaining after a split can hold a new block. Derived* DoSplitFirst(size_t new_inner_size); - /// Split a block into two smaller blocks and allocates the trailing one. + /// Split a block into two smaller blocks. /// /// This method splits a block into a leading block and a trailing block of /// the given `new_inner_size`, and returns the trailing space is returned as diff --git a/pw_allocator/block_allocator_testing.cc b/pw_allocator/block_allocator_testing.cc index 5c370dd379..883b8d0a34 100644 --- a/pw_allocator/block_allocator_testing.cc +++ b/pw_allocator/block_allocator_testing.cc @@ -50,14 +50,14 @@ void BlockAllocatorTestBase::UseMemory(void* ptr, size_t size) { // Unit tests. void BlockAllocatorTestBase::GetCapacity() { - Allocator& allocator = GetAllocator(); + Allocator& allocator = GetGenericAllocator(); StatusWithSize capacity = allocator.GetCapacity(); EXPECT_EQ(capacity.status(), OkStatus()); EXPECT_EQ(capacity.size(), kCapacity); } void BlockAllocatorTestBase::AllocateLarge() { - Allocator& allocator = GetAllocator(); + Allocator& allocator = GetGenericAllocator(); constexpr Layout layout = Layout::Of(); Store(0, allocator.Allocate(layout)); ASSERT_NE(Fetch(0), nullptr); @@ -68,7 +68,7 @@ void BlockAllocatorTestBase::AllocateLarge() { } void BlockAllocatorTestBase::AllocateSmall() { - Allocator& allocator = GetAllocator(); + Allocator& allocator = GetGenericAllocator(); constexpr Layout layout = Layout::Of(); Store(0, allocator.Allocate(layout)); ASSERT_NE(Fetch(0), nullptr); @@ -79,13 +79,13 @@ void BlockAllocatorTestBase::AllocateSmall() { } void BlockAllocatorTestBase::AllocateTooLarge() { - Allocator& allocator = GetAllocator(); + Allocator& allocator = GetGenericAllocator(); Store(0, allocator.Allocate(Layout::Of())); EXPECT_EQ(Fetch(0), nullptr); } void BlockAllocatorTestBase::AllocateLargeAlignment() { - Allocator& allocator = GetAllocator(); + Allocator& allocator = GetGenericAllocator(); constexpr size_t kAlignment = 64; Store(0, allocator.Allocate(Layout(kLargeInnerSize, kAlignment))); @@ -106,7 +106,7 @@ void BlockAllocatorTestBase::AllocateAlignmentFailure() { size_t outer_size = GetAlignedOffsetAfter(bytes.data(), kAlignment, kSmallInnerSize) + kAlignment; - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {outer_size, Preallocation::kUsed}, {kLargeOuterSize, Preallocation::kFree}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, @@ -118,12 +118,12 @@ void BlockAllocatorTestBase::AllocateAlignmentFailure() { } void BlockAllocatorTestBase::DeallocateNull() { - Allocator& allocator = GetAllocator(); + Allocator& allocator = GetGenericAllocator(); allocator.Deallocate(nullptr); } void BlockAllocatorTestBase::DeallocateShuffled() { - Allocator& allocator = GetAllocator(); + Allocator& allocator = GetGenericAllocator(); constexpr Layout layout = Layout::Of(); for (size_t i = 0; i < kNumPtrs; ++i) { Store(i, allocator.Allocate(layout)); @@ -150,13 +150,13 @@ void BlockAllocatorTestBase::DeallocateShuffled() { } void BlockAllocatorTestBase::ResizeNull() { - Allocator& allocator = GetAllocator(); + Allocator& allocator = GetGenericAllocator(); size_t new_size = 1; EXPECT_FALSE(allocator.Resize(nullptr, new_size)); } void BlockAllocatorTestBase::ResizeLargeSame() { - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {kLargeOuterSize, Preallocation::kUsed}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); @@ -166,7 +166,7 @@ void BlockAllocatorTestBase::ResizeLargeSame() { } void BlockAllocatorTestBase::ResizeLargeSmaller() { - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {kLargeOuterSize, Preallocation::kUsed}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); @@ -176,7 +176,7 @@ void BlockAllocatorTestBase::ResizeLargeSmaller() { } void BlockAllocatorTestBase::ResizeLargeLarger() { - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {kLargeOuterSize, Preallocation::kUsed}, {kLargeOuterSize, Preallocation::kFree}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, @@ -187,7 +187,7 @@ void BlockAllocatorTestBase::ResizeLargeLarger() { } void BlockAllocatorTestBase::ResizeLargeLargerFailure() { - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {kLargeOuterSize, Preallocation::kUsed}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); @@ -197,7 +197,7 @@ void BlockAllocatorTestBase::ResizeLargeLargerFailure() { } void BlockAllocatorTestBase::ResizeSmallSame() { - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {kSmallOuterSize, Preallocation::kUsed}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); @@ -207,7 +207,7 @@ void BlockAllocatorTestBase::ResizeSmallSame() { } void BlockAllocatorTestBase::ResizeSmallSmaller() { - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {kSmallOuterSize, Preallocation::kUsed}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); @@ -217,7 +217,7 @@ void BlockAllocatorTestBase::ResizeSmallSmaller() { } void BlockAllocatorTestBase::ResizeSmallLarger() { - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {kSmallOuterSize, Preallocation::kUsed}, {kSmallOuterSize, Preallocation::kFree}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, @@ -228,7 +228,7 @@ void BlockAllocatorTestBase::ResizeSmallLarger() { } void BlockAllocatorTestBase::ResizeSmallLargerFailure() { - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {kSmallOuterSize, Preallocation::kUsed}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); diff --git a/pw_allocator/bucket.cc b/pw_allocator/bucket.cc index 7b28a9284a..e6ffc3e174 100644 --- a/pw_allocator/bucket.cc +++ b/pw_allocator/bucket.cc @@ -16,7 +16,7 @@ #include "pw_assert/check.h" -namespace pw::allocator::internal { +namespace pw::allocator { Bucket::Bucket() { Init(); } @@ -94,4 +94,4 @@ std::byte* Bucket::Remove(Chunk* chunk) { return chunk->AsBytes(); } -} // namespace pw::allocator::internal +} // namespace pw::allocator diff --git a/pw_allocator/bucket_allocator_test.cc b/pw_allocator/bucket_allocator_test.cc index 3eece23e92..ef56b8f4fd 100644 --- a/pw_allocator/bucket_allocator_test.cc +++ b/pw_allocator/bucket_allocator_test.cc @@ -16,6 +16,7 @@ #include "pw_allocator/allocator.h" #include "pw_allocator/block_allocator_testing.h" +#include "pw_allocator/bucket_block_allocator.h" #include "pw_unit_test/framework.h" namespace { @@ -26,13 +27,13 @@ constexpr size_t kMinChunkSize = 64; constexpr size_t kNumBuckets = 4; using ::pw::allocator::Layout; +using ::pw::allocator::test::BlockAllocatorTest; using ::pw::allocator::test::Preallocation; +using BlockType = ::pw::allocator::BucketBlock; using BucketAllocator = - ::pw::allocator::BucketAllocator; -using BlockAllocatorTest = - ::pw::allocator::test::BlockAllocatorTest; + ::pw::allocator::BucketAllocator; -class BucketAllocatorTest : public BlockAllocatorTest { +class BucketAllocatorTest : public BlockAllocatorTest { public: BucketAllocatorTest() : BlockAllocatorTest(allocator_) {} @@ -253,4 +254,77 @@ TEST_F(BucketAllocatorTest, LaterSmallSplitNotIsRecycled) { } } +TEST_F(BucketAllocatorTest, PoisonPeriodically) { PoisonPeriodically(); } + +// TODO(b/376730645): Remove this test when the legacy alias is deprecated. +using BucketBlockAllocator = ::pw::allocator::BucketBlockAllocator; + +class BucketBlockAllocatorTest + : public BlockAllocatorTest { + public: + BucketBlockAllocatorTest() : BlockAllocatorTest(allocator_) {} + + private: + BucketBlockAllocator allocator_; +}; + +TEST_F(BucketBlockAllocatorTest, AllocatesFromCompatibleBucket) { + // Bucket sizes are: [ 64, 128, 256 ] + // Start with everything allocated in order to recycle blocks into buckets. + auto& allocator = GetAllocator({ + {63 + BlockType::kBlockOverhead, Preallocation::kUsed}, + {kSmallerOuterSize, Preallocation::kUsed}, + {128 + BlockType::kBlockOverhead, Preallocation::kUsed}, + {kSmallerOuterSize, Preallocation::kUsed}, + {255 + BlockType::kBlockOverhead, Preallocation::kUsed}, + {kSmallerOuterSize, Preallocation::kUsed}, + {257 + BlockType::kBlockOverhead, Preallocation::kUsed}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + + // Deallocate to fill buckets. + void* bucket0_ptr = Fetch(0); + Store(0, nullptr); + allocator.Deallocate(bucket0_ptr); + + void* bucket1_ptr = Fetch(2); + Store(2, nullptr); + allocator.Deallocate(bucket1_ptr); + + void* bucket2_ptr = Fetch(4); + Store(4, nullptr); + allocator.Deallocate(bucket2_ptr); + + // Bucket 3 is the implicit, unbounded bucket. + void* bucket3_ptr = Fetch(6); + Store(6, nullptr); + allocator.Deallocate(bucket3_ptr); + + // Allocate in a different order. The correct bucket should be picked for each + // allocation + + // The allocation from bucket 2 splits a trailing block off the chunk. + Store(4, allocator.Allocate(Layout(129, 1))); + auto* block2 = BlockType::FromUsableSpace(bucket2_ptr); + EXPECT_TRUE(block2->IsFree()); + EXPECT_EQ(Fetch(4), block2->Next()->UsableSpace()); + + // This allocation exactly matches the chunk size of bucket 1. + Store(2, allocator.Allocate(Layout(128, 1))); + EXPECT_EQ(Fetch(2), bucket1_ptr); + + // 129 should start with bucket 2, then use bucket 3 since 2 is empty. + // The allocation from bucket 3 splits a trailing block off the chunk. + auto* block3 = BlockType::FromUsableSpace(bucket3_ptr); + Store(6, allocator.Allocate(Layout(129, 1))); + EXPECT_TRUE(block3->IsFree()); + EXPECT_EQ(Fetch(6), block3->Next()->UsableSpace()); + + // The allocation from bucket 0 splits a trailing block off the chunk. + auto* block0 = BlockType::FromUsableSpace(bucket0_ptr); + Store(0, allocator.Allocate(Layout(32, 1))); + EXPECT_TRUE(block0->IsFree()); + EXPECT_EQ(Fetch(0), block0->Next()->UsableSpace()); +} + } // namespace diff --git a/pw_allocator/dual_first_fit_block_allocator_test.cc b/pw_allocator/dual_first_fit_block_allocator_test.cc index d2369fb387..e28ece67fa 100644 --- a/pw_allocator/dual_first_fit_block_allocator_test.cc +++ b/pw_allocator/dual_first_fit_block_allocator_test.cc @@ -163,4 +163,8 @@ TEST_F(DualFirstFitBlockAllocatorTest, CanMeasureFragmentation) { CanMeasureFragmentation(); } +TEST_F(DualFirstFitBlockAllocatorTest, PoisonPeriodically) { + PoisonPeriodically(); +} + } // namespace diff --git a/pw_allocator/examples/block_allocator.cc b/pw_allocator/examples/block_allocator.cc index f7d519a128..c3ba14e2e7 100644 --- a/pw_allocator/examples/block_allocator.cc +++ b/pw_allocator/examples/block_allocator.cc @@ -24,8 +24,7 @@ namespace examples { std::array buffer; // DOCSTAG: [pw_allocator-examples-block_allocator-poison] -// Poisons every third deallocation. -pw::allocator::LastFitBlockAllocator allocator(buffer); +pw::allocator::LastFitBlockAllocator allocator(buffer); // DOCSTAG: [pw_allocator-examples-block_allocator-poison] // DOCSTAG: [pw_allocator-examples-block_allocator-layout_of] diff --git a/pw_allocator/examples/size_report.cc b/pw_allocator/examples/size_report.cc index 707cc08804..0c3d037c9d 100644 --- a/pw_allocator/examples/size_report.cc +++ b/pw_allocator/examples/size_report.cc @@ -23,7 +23,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::FirstFitBlockAllocator allocator(reporter.buffer()); + pw::allocator::FirstFitBlockAllocator<> allocator(reporter.buffer()); examples::CustomAllocator custom(allocator, 128); reporter.Measure(custom); diff --git a/pw_allocator/first_fit_block_allocator_test.cc b/pw_allocator/first_fit_block_allocator_test.cc index fe6450fe39..0a7044ea27 100644 --- a/pw_allocator/first_fit_block_allocator_test.cc +++ b/pw_allocator/first_fit_block_allocator_test.cc @@ -57,7 +57,9 @@ TEST_F(FirstFitBlockAllocatorTest, AllocateSmall) { AllocateSmall(); } TEST_F(FirstFitBlockAllocatorTest, AllocateLargeAlignment) { AllocateLargeAlignment(); - alignas(BlockType::kAlignment) std::array buffer; + alignas(FirstFitBlockAllocator::BlockType::kAlignment) + std::array + buffer; pw::ByteSpan bytes(buffer); auto addr = cpp20::bit_cast(bytes.data()); size_t offset = 64 - (addr % 64); @@ -135,104 +137,6 @@ TEST_F(FirstFitBlockAllocatorTest, CanMeasureFragmentation) { CanMeasureFragmentation(); } -TEST_F(FirstFitBlockAllocatorTest, DisablePoisoning) { - auto& allocator = GetAllocator(); - constexpr Layout layout = Layout::Of(); - - // Allocate 3 blocks to prevent the middle one from being merged when freed. - std::array ptrs; - for (auto& ptr : ptrs) { - ptr = allocator.Allocate(layout); - ASSERT_NE(ptr, nullptr); - } - - // Modify the contents of the block and check if it is still valid. - auto* bytes = cpp20::bit_cast(ptrs[1]); - auto* block = BlockType::FromUsableSpace(bytes); - allocator.Deallocate(bytes); - EXPECT_TRUE(block->IsFree()); - EXPECT_TRUE(block->IsValid()); - bytes[0] = ~bytes[0]; - EXPECT_TRUE(block->IsValid()); - - allocator.Deallocate(ptrs[0]); - allocator.Deallocate(ptrs[2]); -} - -TEST(PoisonedFirstFitBlockAllocatorTest, PoisonEveryFreeBlock) { - using PoisonedFirstFitBlockAllocator = - ::pw::allocator::FirstFitBlockAllocator; - using BlockType = PoisonedFirstFitBlockAllocator::BlockType; - - pw::allocator::WithBuffer - allocator; - allocator->Init(allocator.as_bytes()); - constexpr Layout layout = - Layout::Of(); - - // Allocate 3 blocks to prevent the middle one from being merged when freed. - std::array ptrs; - for (auto& ptr : ptrs) { - ptr = allocator->Allocate(layout); - ASSERT_NE(ptr, nullptr); - } - - // Modify the contents of the block and check if it is still valid. - auto* bytes = cpp20::bit_cast(ptrs[1]); - auto* block = BlockType::FromUsableSpace(bytes); - allocator->Deallocate(bytes); - - EXPECT_TRUE(block->IsFree()); - EXPECT_TRUE(block->IsValid()); - bytes[0] = ~bytes[0]; - EXPECT_FALSE(block->IsValid()); - - // Fix the block to prevent crashing on teardown. - bytes[0] = ~bytes[0]; - allocator->Deallocate(ptrs[0]); - allocator->Deallocate(ptrs[2]); -} - -TEST(PoisonedFirstFitBlockAllocatorTest, PoisonPeriodically) { - using PoisonedFirstFitBlockAllocator = - ::pw::allocator::FirstFitBlockAllocator; - using BlockType = PoisonedFirstFitBlockAllocator::BlockType; - - pw::allocator::WithBuffer - allocator; - allocator->Init(allocator.as_bytes()); - constexpr Layout layout = - Layout::Of(); - - // Allocate 9 blocks to prevent every other from being merged when freed. - std::array ptrs; - for (auto& ptr : ptrs) { - ptr = allocator->Allocate(layout); - ASSERT_NE(ptr, nullptr); - } - - for (size_t i = 1; i < ptrs.size(); i += 2) { - auto* bytes = cpp20::bit_cast(ptrs[i]); - auto* block = BlockType::FromUsableSpace(bytes); - allocator->Deallocate(bytes); - EXPECT_TRUE(block->IsFree()); - EXPECT_TRUE(block->IsValid()); - bytes[0] = ~bytes[0]; - - // Corruption is only detected on the fourth freed block. - if (i == 7) { - EXPECT_FALSE(block->IsValid()); - bytes[0] = ~bytes[0]; - } else { - EXPECT_TRUE(block->IsValid()); - } - } - - for (size_t i = 0; i < ptrs.size(); i += 2) { - allocator->Deallocate(ptrs[i]); - } -} +TEST_F(FirstFitBlockAllocatorTest, PoisonPeriodically) { PoisonPeriodically(); } } // namespace diff --git a/pw_allocator/last_fit_block_allocator_test.cc b/pw_allocator/last_fit_block_allocator_test.cc index 43f7f967ad..aceabfa23a 100644 --- a/pw_allocator/last_fit_block_allocator_test.cc +++ b/pw_allocator/last_fit_block_allocator_test.cc @@ -110,4 +110,6 @@ TEST_F(LastFitBlockAllocatorTest, CanMeasureFragmentation) { CanMeasureFragmentation(); } +TEST_F(LastFitBlockAllocatorTest, PoisonPeriodically) { PoisonPeriodically(); } + } // namespace diff --git a/pw_allocator/public/pw_allocator/best_fit_block_allocator.h b/pw_allocator/public/pw_allocator/best_fit_block_allocator.h index 4d215cd09a..e66246a3bf 100644 --- a/pw_allocator/public/pw_allocator/best_fit_block_allocator.h +++ b/pw_allocator/public/pw_allocator/best_fit_block_allocator.h @@ -16,11 +16,17 @@ #include #include +#include "pw_allocator/block/detailed_block.h" #include "pw_allocator/block_allocator.h" #include "pw_allocator/config.h" namespace pw::allocator { +/// Alias for a default block type that is compatible with +/// `BestFitBlockAllocator`. +template +using BestFitBlock = DetailedBlock; + /// Block allocator that uses a "best-fit" allocation strategy. /// /// In this strategy, the allocator handles an allocation request by looking at @@ -30,24 +36,24 @@ namespace pw::allocator { /// This algorithm may make better use of available memory by wasting less on /// unused fragments, but may also lead to worse fragmentation as those /// fragments are more likely to be too small to be useful to other requests. -template -class BestFitBlockAllocator - : public BlockAllocator { +template +class BestFitBlockAllocator : public BlockAllocator> { public: - using Base = BlockAllocator; - using BlockType = typename Base::BlockType; + using BlockType = BestFitBlock; + + private: + using Base = BlockAllocator; + public: /// Constexpr constructor. Callers must explicitly call `Init`. - constexpr BestFitBlockAllocator() : Base() {} + constexpr BestFitBlockAllocator() = default; /// Non-constexpr constructor that automatically calls `Init`. /// /// @param[in] region Region of memory to use when satisfying allocation - /// requests. The region MUST be large enough to fit an - /// aligned block with overhead. It MUST NOT be larger - /// than what is addressable by `OffsetType`. - explicit BestFitBlockAllocator(ByteSpan region) : Base(region) {} + /// requests. The region MUST be valid as an argument to + /// `BlockType::Init`. + explicit BestFitBlockAllocator(ByteSpan region) { Base::Init(region); } private: /// @copydoc Allocator::Allocate diff --git a/pw_allocator/public/pw_allocator/block_allocator.h b/pw_allocator/public/pw_allocator/block_allocator.h index ec90b782b1..5ea32a996c 100644 --- a/pw_allocator/public/pw_allocator/block_allocator.h +++ b/pw_allocator/public/pw_allocator/block_allocator.h @@ -16,9 +16,13 @@ #include #include "pw_allocator/allocator.h" -#include "pw_allocator/block/detailed_block.h" +#include "pw_allocator/block/basic.h" +#include "pw_allocator/block/iterable.h" +#include "pw_allocator/block/poisonable.h" #include "pw_allocator/block/result.h" +#include "pw_allocator/block/with_layout.h" #include "pw_allocator/capability.h" +#include "pw_allocator/config.h" #include "pw_allocator/fragmentation.h" #include "pw_assert/assert.h" #include "pw_bytes/span.h" @@ -39,11 +43,6 @@ namespace internal { /// one of its specializations. class GenericBlockAllocator : public Allocator { public: - static constexpr Capabilities kCapabilities = - kImplementsGetRequestedLayout | kImplementsGetUsableLayout | - kImplementsGetAllocatedLayout | kImplementsGetCapacity | - kImplementsRecognizes; - // Not copyable or movable. GenericBlockAllocator(const GenericBlockAllocator&) = delete; GenericBlockAllocator& operator=(const GenericBlockAllocator&) = delete; @@ -51,7 +50,20 @@ class GenericBlockAllocator : public Allocator { GenericBlockAllocator& operator=(GenericBlockAllocator&&) = delete; protected: - constexpr GenericBlockAllocator() : Allocator(kCapabilities) {} + template + static constexpr Capabilities GetCapabilities() { + Capabilities common = kImplementsGetUsableLayout | + kImplementsGetAllocatedLayout | + kImplementsGetCapacity | kImplementsRecognizes; + if constexpr (has_layout_v) { + return common | kImplementsGetRequestedLayout; + } else { + return common; + } + } + + constexpr explicit GenericBlockAllocator(Capabilities capabilities) + : Allocator(capabilities) {} /// Crashes with an informational message that a given block is allocated. /// @@ -79,36 +91,34 @@ class GenericBlockAllocator : public Allocator { /// another allocator. If this is done, the `Query` method may incorrectly /// think pointers returned by that allocator were created by this one, and /// report that this allocator can de/reallocate them. -template +template class BlockAllocator : public internal::GenericBlockAllocator { + private: + using Base = internal::GenericBlockAllocator; + public: - using BlockType = DetailedBlock; + using BlockType = BlockType_; using Range = typename BlockType::Range; - /// Constexpr constructor. Callers must explicitly call `Init`. - constexpr BlockAllocator() : internal::GenericBlockAllocator() {} - - /// Non-constexpr constructor that automatically calls `Init`. - /// - /// @param[in] region Region of memory to use when satisfying allocation - /// requests. The region MUST be large enough to fit an - /// aligned block with overhead. It MUST NOT be larger - /// than what is addressable by `OffsetType`. - explicit BlockAllocator(ByteSpan region) : BlockAllocator() { Init(region); } + static constexpr Capabilities kCapabilities = + Base::GetCapabilities(); + static constexpr size_t kPoisonInterval = PW_ALLOCATOR_BLOCK_POISON_INTERVAL; ~BlockAllocator() override { Reset(); } /// Returns a ``Range`` of blocks tracking the memory of this allocator. Range blocks() const; + /// Returns fragmentation information for the block allocator's memory region. + Fragmentation MeasureFragmentation() const; + /// Sets the memory region to be used by this allocator. /// /// This method will instantiate an initial block using the memory region. /// /// @param[in] region Region of memory to use when satisfying allocation - /// requests. The region MUST be large enough to fit an - /// aligned block with overhead. It MUST NOT be larger - /// than what is addressable by `OffsetType`. + /// requests. The region MUST be valid as an argument to + /// `BlockType::Init`. void Init(ByteSpan region); /// Sets the blocks to be used by this allocator. @@ -118,7 +128,7 @@ class BlockAllocator : public internal::GenericBlockAllocator { /// /// @param[in] begin The first block for this allocator. /// The block must not have a previous block. - void Init(BlockType* begin) { return Init(begin, nullptr); } + void Init(BlockType* begin) { Init(begin, nullptr); } /// Sets the blocks to be used by this allocator. /// @@ -129,10 +139,7 @@ class BlockAllocator : public internal::GenericBlockAllocator { /// which the sequence including and following `begin` is /// used. If not null, the block must not have a next /// block. - virtual void Init(BlockType* begin, BlockType* end); - - /// Returns fragmentation information for the block allocator's memory region. - Fragmentation MeasureFragmentation() const; + void Init(BlockType* begin, BlockType* end); /// Resets the allocator to an uninitialized state. /// @@ -143,6 +150,8 @@ class BlockAllocator : public internal::GenericBlockAllocator { protected: using ReverseRange = typename BlockType::ReverseRange; + constexpr explicit BlockAllocator() : Base(kCapabilities) {} + /// Returns a ``ReverseRange`` of blocks tracking the memory of this /// allocator. ReverseRange rblocks(); @@ -164,12 +173,9 @@ class BlockAllocator : public internal::GenericBlockAllocator { /// OUT_OF_RANGE: Given pointer is outside the allocator's memory. /// /// @endrst - template >, - const BlockType*, - BlockType*>> - Result FromUsableSpace(PtrType ptr) const; + template + Result> FromUsableSpace( + Ptr ptr) const; private: using BlockResultPrev = internal::GenericBlockResult::Prev; @@ -209,7 +215,7 @@ class BlockAllocator : public internal::GenericBlockAllocator { /// bookkeeeping. /// /// @param block The block being freed. - virtual void ReserveBlock(BlockType*) {} + virtual void ReserveBlock(BlockType&) {} /// Indicates that a block is now free. /// @@ -217,7 +223,7 @@ class BlockAllocator : public internal::GenericBlockAllocator { /// bookkeeeping. /// /// @param block The block being freed. - virtual void RecycleBlock(BlockType*) {} + virtual void RecycleBlock(BlockType&) {} /// Returns if the previous block exists and is free. static bool PrevIsFree(const BlockType* block) { @@ -245,30 +251,32 @@ class BlockAllocator : public internal::GenericBlockAllocator { // Template method implementations -template -typename BlockAllocator::Range -BlockAllocator::blocks() const { +template +typename BlockAllocator::Range BlockAllocator::blocks() + const { return Range(first_); } -template -typename BlockAllocator::ReverseRange -BlockAllocator::rblocks() { - PW_ASSERT(last_ == nullptr || last_->Next() == nullptr); - return ReverseRange(last_); +template +typename BlockAllocator::ReverseRange +BlockAllocator::rblocks() { + if constexpr (is_reverse_iterable_v) { + PW_ASSERT(last_ == nullptr || last_->Next() == nullptr); + return ReverseRange(last_); + } } -template -void BlockAllocator::Init(ByteSpan region) { +template +void BlockAllocator::Init(ByteSpan region) { Result result = BlockType::Init(region); Init(*result, nullptr); } -template -void BlockAllocator::Init(BlockType* begin, - BlockType* end) { +template +void BlockAllocator::Init(BlockType* begin, BlockType* end) { PW_ASSERT(begin != nullptr); PW_ASSERT(begin->Prev() == nullptr); + Reset(); if (end == nullptr) { end = begin; for (BlockType* next = end->Next(); next != nullptr; next = end->Next()) { @@ -280,22 +288,31 @@ void BlockAllocator::Init(BlockType* begin, } first_ = begin; last_ = end; - for (const auto& block : blocks()) { + + for (auto* block : blocks()) { capacity_ += block->OuterSize(); + if (block->IsFree()) { + RecycleBlock(*block); + } } } -template -void BlockAllocator::Reset() { +template +void BlockAllocator::Reset() { for (auto* block : blocks()) { if (!block->IsFree()) { CrashOnAllocated(block); } + ReserveBlock(*block); } + capacity_ = 0; + first_ = nullptr; + last_ = nullptr; + unpoisoned_ = 0; } -template -void* BlockAllocator::DoAllocate(Layout layout) { +template +void* BlockAllocator::DoAllocate(Layout layout) { if (capacity_ == 0) { // Not initialized. return nullptr; @@ -312,7 +329,7 @@ void* BlockAllocator::DoAllocate(Layout layout) { switch (result.prev()) { case BlockResultPrev::kSplitNew: // New free blocks may be created when allocating. - RecycleBlock(block->Prev()); + RecycleBlock(*(block->Prev())); break; case BlockResultPrev::kResizedLarger: // Extra bytes may be appended to the previous block. @@ -323,16 +340,17 @@ void* BlockAllocator::DoAllocate(Layout layout) { break; } if (result.next() == BlockResultNext::kSplitNew) { - RecycleBlock(block->Next()); + RecycleBlock(*(block->Next())); } UpdateLast(block); PW_ASSERT(block <= last_); + return block->UsableSpace(); } -template -void BlockAllocator::DoDeallocate(void* ptr) { +template +void BlockAllocator::DoDeallocate(void* ptr) { auto from_usable_space_result = FromUsableSpace(ptr); if (!from_usable_space_result.ok()) { CrashOnInvalidFree(ptr); @@ -344,10 +362,10 @@ void BlockAllocator::DoDeallocate(void* ptr) { // Neighboring blocks may be merged when freeing. if (auto* prev = block->Prev(); prev != nullptr && prev->IsFree()) { - ReserveBlock(prev); + ReserveBlock(*prev); } if (auto* next = block->Next(); next != nullptr && next->IsFree()) { - ReserveBlock(next); + ReserveBlock(*next); } // Free the block and merge it with its neighbors, if possible. @@ -361,7 +379,7 @@ void BlockAllocator::DoDeallocate(void* ptr) { allocated_ -= free_result.size(); } - if constexpr (kPoisonInterval != 0) { + if constexpr (is_poisonable_v && kPoisonInterval != 0) { ++unpoisoned_; if (unpoisoned_ >= kPoisonInterval) { block->Poison(); @@ -369,12 +387,11 @@ void BlockAllocator::DoDeallocate(void* ptr) { } } - RecycleBlock(block); + RecycleBlock(*block); } -template -bool BlockAllocator::DoResize(void* ptr, - size_t new_size) { +template +bool BlockAllocator::DoResize(void* ptr, size_t new_size) { auto result = FromUsableSpace(ptr); if (!result.ok()) { return false; @@ -383,7 +400,7 @@ bool BlockAllocator::DoResize(void* ptr, // Neighboring blocks may be merged when resizing. if (auto* next = block->Next(); next != nullptr && next->IsFree()) { - ReserveBlock(block->Next()); + ReserveBlock(*next); } size_t old_size = block->OuterSize(); @@ -395,15 +412,15 @@ bool BlockAllocator::DoResize(void* ptr, UpdateLast(block); if (auto* next = block->Next(); next != nullptr && next->IsFree()) { - RecycleBlock(block->Next()); + RecycleBlock(*next); } return true; } -template -Result BlockAllocator::DoGetInfo( - InfoType info_type, const void* ptr) const { +template +Result BlockAllocator::DoGetInfo(InfoType info_type, + const void* ptr) const { // Handle types not related to a block first. if (info_type == InfoType::kCapacity) { return Layout(capacity_); @@ -417,9 +434,12 @@ Result BlockAllocator::DoGetInfo( if (block->IsFree()) { return Status::FailedPrecondition(); } - switch (info_type) { - case InfoType::kRequestedLayoutOf: + if constexpr (kCapabilities.has(kImplementsGetRequestedLayout)) { + if (info_type == InfoType::kRequestedLayoutOf) { return block->RequestedLayout(); + } + } + switch (info_type) { case InfoType::kUsableLayoutOf: return Layout(block->InnerSize(), BlockType::kAlignment); case InfoType::kAllocatedLayoutOf: @@ -427,14 +447,14 @@ Result BlockAllocator::DoGetInfo( case InfoType::kRecognizes: return Layout(); case InfoType::kCapacity: + case InfoType::kRequestedLayoutOf: default: return Status::Unimplemented(); } } -template -Fragmentation -BlockAllocator::MeasureFragmentation() const { +template +Fragmentation BlockAllocator::MeasureFragmentation() const { Fragmentation fragmentation; for (auto block : blocks()) { if (block->IsFree()) { @@ -444,19 +464,18 @@ BlockAllocator::MeasureFragmentation() const { return fragmentation; } -template -template -Result -BlockAllocator::FromUsableSpace( - PtrType ptr) const { +template +template +Result> +BlockAllocator::FromUsableSpace(Ptr ptr) const { if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) { return Status::OutOfRange(); } return BlockType::FromUsableSpace(ptr); } -template -void BlockAllocator::UpdateLast(BlockType* block) { +template +void BlockAllocator::UpdateLast(BlockType* block) { BlockType* next = block->Next(); if (next == nullptr) { last_ = block; diff --git a/pw_allocator/public/pw_allocator/block_allocator_testing.h b/pw_allocator/public/pw_allocator/block_allocator_testing.h index 869e2b7616..98825be498 100644 --- a/pw_allocator/public/pw_allocator/block_allocator_testing.h +++ b/pw_allocator/public/pw_allocator/block_allocator_testing.h @@ -16,6 +16,7 @@ #include #include +#include "pw_allocator/block/detailed_block.h" #include "pw_allocator/block/testing.h" #include "pw_allocator/block_allocator.h" #include "pw_status/status.h" @@ -61,13 +62,13 @@ class BlockAllocatorTestBase : public ::testing::Test { virtual ByteSpan GetBytes() = 0; /// Initialize the allocator with a region of memory and return it. - virtual Allocator& GetAllocator() = 0; + virtual Allocator& GetGenericAllocator() = 0; /// Initialize the allocator with a sequence of preallocated blocks and return /// it. /// /// See also ``Preallocation``. - virtual Allocator& GetAllocator( + virtual Allocator& GetGenericAllocator( std::initializer_list preallocations) = 0; /// Gets the next allocation from an allocated pointer. @@ -128,10 +129,17 @@ class BlockAllocatorTest : public BlockAllocatorTestBase { ByteSpan GetBytes() override { return bytes_; } - Allocator& GetAllocator() override; + Allocator& GetGenericAllocator() override { return GetAllocator(); } - Allocator& GetAllocator( - std::initializer_list preallocations) override; + BlockAllocatorType& GetAllocator(); + + Allocator& GetGenericAllocator( + std::initializer_list preallocations) override { + return GetAllocator(preallocations); + } + + BlockAllocatorType& GetAllocator( + std::initializer_list preallocations); void* NextAfter(size_t index) override; @@ -142,6 +150,7 @@ class BlockAllocatorTest : public BlockAllocatorTestBase { void CanExplicitlyInit(BlockAllocatorType& allocator); void IterateOverBlocks(); void CanMeasureFragmentation(); + void PoisonPeriodically(); private: BlockAllocatorType& allocator_; @@ -152,13 +161,13 @@ class BlockAllocatorTest : public BlockAllocatorTestBase { // Test fixture template method implementations. template -Allocator& BlockAllocatorTest::GetAllocator() { +BlockAllocatorType& BlockAllocatorTest::GetAllocator() { allocator_.Init(GetBytes()); return allocator_; } template -Allocator& BlockAllocatorTest::GetAllocator( +BlockAllocatorType& BlockAllocatorTest::GetAllocator( std::initializer_list preallocations) { auto* first = Preallocate(GetBytes(), preallocations); size_t index = 0; @@ -214,7 +223,7 @@ void BlockAllocatorTest::CanExplicitlyInit( template void BlockAllocatorTest::IterateOverBlocks() { - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {kSmallOuterSize, Preallocation::kFree}, {kLargeOuterSize, Preallocation::kUsed}, {kSmallOuterSize, Preallocation::kFree}, @@ -243,7 +252,7 @@ void BlockAllocatorTest::IterateOverBlocks() { template void BlockAllocatorTest::CanMeasureFragmentation() { - Allocator& allocator = GetAllocator({ + Allocator& allocator = GetGenericAllocator({ {0x020, Preallocation::kFree}, {0x040, Preallocation::kUsed}, {0x080, Preallocation::kFree}, @@ -270,4 +279,44 @@ void BlockAllocatorTest::CanMeasureFragmentation() { EXPECT_EQ(fragmentation.sum, sum); } +template +void BlockAllocatorTest::PoisonPeriodically() { + // Allocate 8 blocks to prevent every other from being merged when freed. + Allocator& allocator = GetGenericAllocator({ + {kSmallOuterSize, Preallocation::kUsed}, + {kSmallOuterSize, Preallocation::kUsed}, + {kSmallOuterSize, Preallocation::kUsed}, + {kSmallOuterSize, Preallocation::kUsed}, + {kSmallOuterSize, Preallocation::kUsed}, + {kSmallOuterSize, Preallocation::kUsed}, + {kSmallOuterSize, Preallocation::kUsed}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + ASSERT_LT(BlockType::kPoisonOffset, kSmallInnerSize); + + // Since the test poisons blocks, it cannot iterate over the blocks without + // crashing. Use `Fetch` instead. + for (size_t i = 0; i < 8; ++i) { + if (i % 2 != 0) { + continue; + } + auto* bytes = cpp20::bit_cast(Fetch(i)); + auto* block = BlockType::FromUsableSpace(bytes); + allocator.Deallocate(bytes); + EXPECT_TRUE(block->IsFree()); + EXPECT_TRUE(block->IsValid()); + bytes[BlockType::kPoisonOffset] = ~bytes[BlockType::kPoisonOffset]; + + if (i == 6) { + // The test_config is defined to only detect corruption is on every fourth + // freed block. Fix up the block to avoid crashing on teardown. + EXPECT_FALSE(block->IsValid()); + bytes[BlockType::kPoisonOffset] = ~bytes[BlockType::kPoisonOffset]; + } else { + EXPECT_TRUE(block->IsValid()); + } + Store(i, nullptr); + } +} + } // namespace pw::allocator::test diff --git a/pw_allocator/public/pw_allocator/bucket.h b/pw_allocator/public/pw_allocator/bucket.h index f36ab3d10d..308faaed43 100644 --- a/pw_allocator/public/pw_allocator/bucket.h +++ b/pw_allocator/public/pw_allocator/bucket.h @@ -20,7 +20,7 @@ #include "pw_function/function.h" #include "pw_span/span.h" -namespace pw::allocator::internal { +namespace pw::allocator { /// Doubly linked list of free memory regions, or "chunks", of a maximum size or /// less. @@ -120,4 +120,4 @@ class Bucket final { size_t chunk_size_; }; -} // namespace pw::allocator::internal +} // namespace pw::allocator diff --git a/pw_allocator/public/pw_allocator/bucket_allocator.h b/pw_allocator/public/pw_allocator/bucket_allocator.h index f643ed3bed..991c56bf6d 100644 --- a/pw_allocator/public/pw_allocator/bucket_allocator.h +++ b/pw_allocator/public/pw_allocator/bucket_allocator.h @@ -24,6 +24,11 @@ namespace pw::allocator { +/// Alias for a default block type that is compatible with +/// `BucketAllocator`. +template +using BucketBlock = DetailedBlock; + /// Block allocator that uses sized buckets of free blocks. /// /// In this strategy, the allocator handles an allocation request by starting @@ -49,14 +54,19 @@ namespace pw::allocator { /// /// Note that since this allocator stores information in free chunks, it does /// not currently support poisoning. -template -class BucketAllocator : public BlockAllocator { - public: - using Base = BlockAllocator; - using BlockType = typename Base::BlockType; +template , + size_t kMinBucketChunkSize = 32, + size_t kNumBuckets = 5> +class BucketAllocator : public BlockAllocator { + private: + using Base = BlockAllocator; + public: /// Constexpr constructor. Callers must explicitly call `Init`. - constexpr BucketAllocator() : Base() {} + constexpr BucketAllocator() { + Bucket::Init(span(buckets_.data(), buckets_.size() - 1), + kMinBucketChunkSize); + } /// Non-constexpr constructor that automatically calls `Init`. /// @@ -68,31 +78,11 @@ class BucketAllocator : public BlockAllocator { Base::Init(region); } - /// @copydoc BlockAllocator::Init - void Init(ByteSpan region) { Base::Init(region); } - - /// @copydoc BlockAllocator::Init - void Init(BlockType* begin) { Base::Init(begin); } - - /// @copydoc BlockAllocator::Init - void Init(BlockType* begin, BlockType* end) override { - Base::Init(begin, end); - internal::Bucket::Init(span(buckets_.data(), buckets_.size() - 1), - kMinBucketChunkSize); - buckets_.back().Init(); - for (auto* block : Base::blocks()) { - if (block->IsFree()) { - RecycleBlock(block); - } - } - } - private: /// @copydoc BlockAllocator::ChooseBlock BlockResult ChooseBlock(Layout layout) override { - layout = - Layout(std::max(layout.size(), sizeof(internal::Bucket::Chunk)), - std::max(layout.alignment(), alignof(internal::Bucket::Chunk))); + layout = Layout(std::max(layout.size(), sizeof(Bucket::Chunk)), + std::max(layout.alignment(), alignof(Bucket::Chunk))); for (auto& bucket : buckets_) { if (bucket.chunk_size() < layout.size()) { continue; @@ -111,31 +101,31 @@ class BucketAllocator : public BlockAllocator { } /// @copydoc BlockAllocator::ReserveBlock - void ReserveBlock(BlockType* block) override { - PW_ASSERT(block->IsFree()); - size_t inner_size = block->InnerSize(); - if (inner_size < sizeof(internal::Bucket::Chunk)) { + void ReserveBlock(BlockType& block) override { + PW_ASSERT(block.IsFree()); + size_t inner_size = block.InnerSize(); + if (inner_size < sizeof(Bucket::Chunk)) { return; } - internal::Bucket::Remove(block->UsableSpace()); + Bucket::Remove(block.UsableSpace()); } /// @copydoc BlockAllocator::RecycleBlock - void RecycleBlock(BlockType* block) override { - PW_ASSERT(block->IsFree()); - size_t inner_size = block->InnerSize(); - if (inner_size < sizeof(internal::Bucket::Chunk)) { + void RecycleBlock(BlockType& block) override { + PW_ASSERT(block.IsFree()); + size_t inner_size = block.InnerSize(); + if (inner_size < sizeof(Bucket::Chunk)) { return; } for (auto& bucket : buckets_) { if (inner_size <= bucket.chunk_size()) { - bucket.Add(block->UsableSpace()); + bucket.Add(block.UsableSpace()); break; } } } - std::array buckets_; + std::array buckets_; }; } // namespace pw::allocator diff --git a/pw_allocator/public/pw_allocator/bucket_block_allocator.h b/pw_allocator/public/pw_allocator/bucket_block_allocator.h index 4a8dd5c355..68788c9f1d 100644 --- a/pw_allocator/public/pw_allocator/bucket_block_allocator.h +++ b/pw_allocator/public/pw_allocator/bucket_block_allocator.h @@ -20,7 +20,6 @@ #include "pw_allocator/config.h" namespace pw::allocator { -namespace internal {} /// Legacy alias for `BucketAllocator`. /// @@ -31,10 +30,10 @@ namespace internal {} /// the `BlockAllocator`. Since the free blocks' usable space is interpreted as /// intrusive items, they must have a pointer-compatible alignment. As a result, /// the first template parameter is ignored. -template using BucketBlockAllocator PW_ALLOCATOR_DEPRECATED = - BucketAllocator; + BucketAllocator, kMinBucketChunkSize, kNumBuckets>; } // namespace pw::allocator diff --git a/pw_allocator/public/pw_allocator/buddy_allocator.h b/pw_allocator/public/pw_allocator/buddy_allocator.h index 7b6826c248..55baaa8393 100644 --- a/pw_allocator/public/pw_allocator/buddy_allocator.h +++ b/pw_allocator/public/pw_allocator/buddy_allocator.h @@ -151,7 +151,7 @@ class BuddyAllocator : public Allocator { } } - std::array buckets_; + std::array buckets_; internal::GenericBuddyAllocator impl_; }; diff --git a/pw_allocator/public/pw_allocator/dual_first_fit_block_allocator.h b/pw_allocator/public/pw_allocator/dual_first_fit_block_allocator.h index 9fc4b8dbfb..4915576d08 100644 --- a/pw_allocator/public/pw_allocator/dual_first_fit_block_allocator.h +++ b/pw_allocator/public/pw_allocator/dual_first_fit_block_allocator.h @@ -13,11 +13,16 @@ // the License. #pragma once +#include "pw_allocator/block/detailed_block.h" #include "pw_allocator/block_allocator.h" -#include "pw_allocator/config.h" namespace pw::allocator { +/// Alias for a default block type that is compatible with +/// `DualFirstFitBlockAllocator`. +template +using DualFirstFitBlock = DetailedBlock; + /// Block allocator that uses a "dual first-fit" allocation strategy split /// between large and small allocations. /// @@ -27,25 +32,29 @@ namespace pw::allocator { /// /// This algorithm approaches the performance of `FirstFit` and `LastFit` while /// improving on those algorithms fragmentation. -template +template class DualFirstFitBlockAllocator - : public BlockAllocator { + : public BlockAllocator> { public: - using Base = BlockAllocator; - using BlockType = typename Base::BlockType; + using BlockType = DualFirstFitBlock; + + private: + using Base = BlockAllocator; + public: /// Constexpr constructor. Callers must explicitly call `Init`. - constexpr DualFirstFitBlockAllocator() : Base() {} + constexpr DualFirstFitBlockAllocator() = default; /// Non-constexpr constructor that automatically calls `Init`. /// - /// @param[in] region Region of memory to use when satisfying allocation - /// requests. The region MUST be large enough to fit an - /// aligned block with overhead. It MUST NOT be larger - /// than what is addressable by `OffsetType`. + /// @param[in] region Region of memory to use when satisfying allocation + /// requests. The region MUST be valid as an argument + /// to `BlockType::Init`. + /// @param[in] threshold Value for which requests are considered "large". DualFirstFitBlockAllocator(ByteSpan region, size_t threshold) - : Base(region), threshold_(threshold) {} + : threshold_(threshold) { + Base::Init(region); + } /// Sets the threshold value for which requests are considered "large". void set_threshold(size_t threshold) { threshold_ = threshold; } diff --git a/pw_allocator/public/pw_allocator/first_fit_block_allocator.h b/pw_allocator/public/pw_allocator/first_fit_block_allocator.h index 0cf1366f54..5c0980c093 100644 --- a/pw_allocator/public/pw_allocator/first_fit_block_allocator.h +++ b/pw_allocator/public/pw_allocator/first_fit_block_allocator.h @@ -13,11 +13,16 @@ // the License. #pragma once +#include "pw_allocator/block/detailed_block.h" #include "pw_allocator/block_allocator.h" -#include "pw_allocator/config.h" namespace pw::allocator { +/// Alias for a default block type that is compatible with +/// `FirstFitBlockAllocator`. +template +using FirstFitBlock = DetailedBlock; + /// Block allocator that uses a "first-fit" allocation strategy. /// /// In this strategy, the allocator handles an allocation request by starting at @@ -27,24 +32,25 @@ namespace pw::allocator { /// This strategy may result in slightly worse fragmentation than the /// corresponding "last-fit" strategy, since the alignment may result in unused /// fragments both before and after an allocated block. -template +template class FirstFitBlockAllocator - : public BlockAllocator { + : public BlockAllocator> { public: - using Base = BlockAllocator; - using BlockType = typename Base::BlockType; + using BlockType = FirstFitBlock; + + private: + using Base = BlockAllocator; + public: /// Constexpr constructor. Callers must explicitly call `Init`. - constexpr FirstFitBlockAllocator() : Base() {} + constexpr FirstFitBlockAllocator() = default; /// Non-constexpr constructor that automatically calls `Init`. /// /// @param[in] region Region of memory to use when satisfying allocation - /// requests. The region MUST be large enough to fit an - /// aligned block with overhead. It MUST NOT be larger - /// than what is addressable by `OffsetType`. - explicit FirstFitBlockAllocator(ByteSpan region) : Base(region) {} + /// requests. The region MUST be valid as an argument to + /// `BlockType::Init`. + explicit FirstFitBlockAllocator(ByteSpan region) { Base::Init(region); } private: /// @copydoc Allocator::Allocate diff --git a/pw_allocator/public/pw_allocator/freelist_heap.h b/pw_allocator/public/pw_allocator/freelist_heap.h index 90d2be1ed2..58ece818c5 100644 --- a/pw_allocator/public/pw_allocator/freelist_heap.h +++ b/pw_allocator/public/pw_allocator/freelist_heap.h @@ -22,10 +22,10 @@ namespace pw::allocator { -/// Legacy interface to BucketBlockAllocator. +/// Legacy interface to BucketAllocator. /// /// This interface is deprecated, and is only maintained for compatibility -/// reasons. New projects should use ``BucketBlockAllocator``. +/// reasons. New projects should use ``BucketAllocator``. template class FreeListHeapBuffer { public: @@ -50,8 +50,9 @@ class FreeListHeapBuffer { } private: + using BlockType = BucketBlock<>; static constexpr size_t kMinChunkSize = 16; - BucketAllocator allocator_; + BucketAllocator allocator_; }; } // namespace pw::allocator diff --git a/pw_allocator/public/pw_allocator/last_fit_block_allocator.h b/pw_allocator/public/pw_allocator/last_fit_block_allocator.h index d83302f5ab..2dc2beb5a3 100644 --- a/pw_allocator/public/pw_allocator/last_fit_block_allocator.h +++ b/pw_allocator/public/pw_allocator/last_fit_block_allocator.h @@ -13,11 +13,16 @@ // the License. #pragma once +#include "pw_allocator/block/detailed_block.h" #include "pw_allocator/block_allocator.h" -#include "pw_allocator/config.h" namespace pw::allocator { +/// Alias for a default block type that is compatible with +/// `LastFitBlockAllocator`. +template +using LastFitBlock = DetailedBlock; + /// Block allocator that uses a "last-fit" allocation strategy. /// /// In this strategy, the allocator handles an allocation request by starting at @@ -27,24 +32,24 @@ namespace pw::allocator { /// This strategy may result in slightly better fragmentation than the /// corresponding "first-fit" strategy, since even with alignment it will result /// in at most one unused fragment before the allocated block. -template -class LastFitBlockAllocator - : public BlockAllocator { +template +class LastFitBlockAllocator : public BlockAllocator> { public: - using Base = BlockAllocator; - using BlockType = typename Base::BlockType; + using BlockType = LastFitBlock; + + private: + using Base = BlockAllocator; + public: /// Constexpr constructor. Callers must explicitly call `Init`. - constexpr LastFitBlockAllocator() : Base() {} + constexpr LastFitBlockAllocator() = default; /// Non-constexpr constructor that automatically calls `Init`. /// /// @param[in] region Region of memory to use when satisfying allocation - /// requests. The region MUST be large enough to fit an - /// aligned block with overhead. It MUST NOT be larger - /// than what is addressable by `OffsetType`. - explicit LastFitBlockAllocator(ByteSpan region) : Base(region) {} + /// requests. The region MUST be valid as an argument to + /// `BlockType::Init`. + explicit LastFitBlockAllocator(ByteSpan region) { Base::Init(region); } private: /// @copydoc Allocator::Allocate diff --git a/pw_allocator/public/pw_allocator/testing.h b/pw_allocator/public/pw_allocator/testing.h index f8f9a82a9a..561816fdfa 100644 --- a/pw_allocator/public/pw_allocator/testing.h +++ b/pw_allocator/public/pw_allocator/testing.h @@ -17,6 +17,7 @@ #include #include "pw_allocator/allocator.h" +#include "pw_allocator/block/detailed_block.h" #include "pw_allocator/buffer.h" #include "pw_allocator/config.h" #include "pw_allocator/first_fit_block_allocator.h" @@ -70,7 +71,7 @@ template class AllocatorForTest : public Allocator { public: using AllocatorType = FirstFitBlockAllocator; - using BlockType = AllocatorType::BlockType; + using BlockType = typename AllocatorType::BlockType; AllocatorForTest() : Allocator(AllocatorType::kCapabilities), tracker_(kToken, *allocator_) { @@ -83,8 +84,8 @@ class AllocatorForTest : public Allocator { allocator_->Reset(); } - AllocatorType::Range blocks() const { return allocator_->blocks(); } - AllocatorType::Range blocks() { return allocator_->blocks(); } + typename BlockType::Range blocks() const { return allocator_->blocks(); } + typename BlockType::Range blocks() { return allocator_->blocks(); } const metric::Group& metric_group() const { return tracker_.metric_group(); } metric::Group& metric_group() { return tracker_.metric_group(); } diff --git a/pw_allocator/public/pw_allocator/worst_fit_block_allocator.h b/pw_allocator/public/pw_allocator/worst_fit_block_allocator.h index 6498801c35..624a90aef3 100644 --- a/pw_allocator/public/pw_allocator/worst_fit_block_allocator.h +++ b/pw_allocator/public/pw_allocator/worst_fit_block_allocator.h @@ -13,7 +13,7 @@ // the License. #pragma once -#include "pw_allocator/block/allocatable.h" +#include "pw_allocator/block/detailed_block.h" #include "pw_allocator/block_allocator.h" #include "pw_allocator/config.h" #include "pw_preprocessor/compiler.h" @@ -21,6 +21,11 @@ namespace pw::allocator { +/// Alias for a default block type that is compatible with +/// `WorstFitBlockAllocator`. +template +using WorstFitBlock = DetailedBlock; + /// Block allocator that uses a "worst-fit" allocation strategy. /// /// In this strategy, the allocator handles an allocation request by looking at @@ -29,24 +34,25 @@ namespace pw::allocator { /// /// This algorithm may lead to less fragmentation as any unused fragments are /// more likely to be large enough to be useful to other requests. -template +template class WorstFitBlockAllocator - : public BlockAllocator { + : public BlockAllocator> { public: - using Base = BlockAllocator; - using BlockType = typename Base::BlockType; + using BlockType = WorstFitBlock; + + private: + using Base = BlockAllocator; + public: /// Constexpr constructor. Callers must explicitly call `Init`. - constexpr WorstFitBlockAllocator() : Base() {} + constexpr WorstFitBlockAllocator() = default; /// Non-constexpr constructor that automatically calls `Init`. /// /// @param[in] region Region of memory to use when satisfying allocation - /// requests. The region MUST be large enough to fit an - /// aligned block with overhead. It MUST NOT be larger - /// than what is addressable by `OffsetType`. - explicit WorstFitBlockAllocator(ByteSpan region) : Base(region) {} + /// requests. The region MUST be valid as an argument to + /// `BlockType::Init`. + explicit WorstFitBlockAllocator(ByteSpan region) { Base::Init(region); } private: /// @copydoc Allocator::Allocate diff --git a/pw_allocator/size_report/best_fit_block_allocator.cc b/pw_allocator/size_report/best_fit_block_allocator.cc index 70ef893b29..7df6a9b72e 100644 --- a/pw_allocator/size_report/best_fit_block_allocator.cc +++ b/pw_allocator/size_report/best_fit_block_allocator.cc @@ -20,7 +20,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::BestFitBlockAllocator allocator(reporter.buffer()); + pw::allocator::BestFitBlockAllocator<> allocator(reporter.buffer()); reporter.Measure(allocator); return 0; diff --git a/pw_allocator/size_report/bucket_allocator.cc b/pw_allocator/size_report/bucket_allocator.cc index 6b536b4bff..ce49bd8988 100644 --- a/pw_allocator/size_report/bucket_allocator.cc +++ b/pw_allocator/size_report/bucket_allocator.cc @@ -17,10 +17,12 @@ #include "pw_allocator/size_reporter.h" int main() { + using BlockType = ::pw::allocator::BucketBlock; + pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::BucketAllocator<4, 8> allocator(reporter.buffer()); + pw::allocator::BucketAllocator allocator(reporter.buffer()); reporter.Measure(allocator); return 0; diff --git a/pw_allocator/size_report/dual_first_fit_block_allocator.cc b/pw_allocator/size_report/dual_first_fit_block_allocator.cc index b23a0404bb..01961d21bc 100644 --- a/pw_allocator/size_report/dual_first_fit_block_allocator.cc +++ b/pw_allocator/size_report/dual_first_fit_block_allocator.cc @@ -21,8 +21,8 @@ int main() { reporter.SetBaseline(); constexpr size_t kThreshold = 128; - pw::allocator::DualFirstFitBlockAllocator allocator( - reporter.buffer(), kThreshold); + pw::allocator::DualFirstFitBlockAllocator<> allocator(reporter.buffer(), + kThreshold); reporter.Measure(allocator); return 0; diff --git a/pw_allocator/size_report/fallback_allocator.cc b/pw_allocator/size_report/fallback_allocator.cc index 04727a1dc4..fe698d60d0 100644 --- a/pw_allocator/size_report/fallback_allocator.cc +++ b/pw_allocator/size_report/fallback_allocator.cc @@ -22,8 +22,8 @@ int main() { reporter.SetBaseline(); std::array buffer; - pw::allocator::FirstFitBlockAllocator primary(reporter.buffer()); - pw::allocator::FirstFitBlockAllocator secondary(buffer); + pw::allocator::FirstFitBlockAllocator<> primary(reporter.buffer()); + pw::allocator::FirstFitBlockAllocator<> secondary(buffer); reporter.Measure(secondary); reporter.Measure(primary); diff --git a/pw_allocator/size_report/fallback_allocator_base.cc b/pw_allocator/size_report/fallback_allocator_base.cc index f4c5b6e446..390e288861 100644 --- a/pw_allocator/size_report/fallback_allocator_base.cc +++ b/pw_allocator/size_report/fallback_allocator_base.cc @@ -20,8 +20,8 @@ int main() { reporter.SetBaseline(); std::array buffer; - pw::allocator::FirstFitBlockAllocator primary(reporter.buffer()); - pw::allocator::FirstFitBlockAllocator secondary(buffer); + pw::allocator::FirstFitBlockAllocator<> primary(reporter.buffer()); + pw::allocator::FirstFitBlockAllocator<> secondary(buffer); reporter.Measure(primary); reporter.Measure(secondary); diff --git a/pw_allocator/size_report/first_fit_block_allocator.cc b/pw_allocator/size_report/first_fit_block_allocator.cc index 61c1615061..f25978aca8 100644 --- a/pw_allocator/size_report/first_fit_block_allocator.cc +++ b/pw_allocator/size_report/first_fit_block_allocator.cc @@ -20,7 +20,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::FirstFitBlockAllocator allocator(reporter.buffer()); + pw::allocator::FirstFitBlockAllocator<> allocator(reporter.buffer()); reporter.Measure(allocator); return 0; diff --git a/pw_allocator/size_report/last_fit_block_allocator.cc b/pw_allocator/size_report/last_fit_block_allocator.cc index c7d69c6724..3c28318996 100644 --- a/pw_allocator/size_report/last_fit_block_allocator.cc +++ b/pw_allocator/size_report/last_fit_block_allocator.cc @@ -20,7 +20,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::LastFitBlockAllocator allocator(reporter.buffer()); + pw::allocator::LastFitBlockAllocator<> allocator(reporter.buffer()); reporter.Measure(allocator); return 0; diff --git a/pw_allocator/size_report/pmr_allocator.cc b/pw_allocator/size_report/pmr_allocator.cc index e55b27d333..f3d575f992 100644 --- a/pw_allocator/size_report/pmr_allocator.cc +++ b/pw_allocator/size_report/pmr_allocator.cc @@ -26,7 +26,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::FirstFitBlockAllocator base(reporter.buffer()); + pw::allocator::FirstFitBlockAllocator<> base(reporter.buffer()); pw::allocator::PmrAllocator allocator(base); std::pmr::vector vec(allocator); vec.emplace_back(1); diff --git a/pw_allocator/size_report/pmr_allocator_base.cc b/pw_allocator/size_report/pmr_allocator_base.cc index f8bf5c6e99..2551f89128 100644 --- a/pw_allocator/size_report/pmr_allocator_base.cc +++ b/pw_allocator/size_report/pmr_allocator_base.cc @@ -24,7 +24,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::FirstFitBlockAllocator base(reporter.buffer()); + pw::allocator::FirstFitBlockAllocator<> base(reporter.buffer()); std::vector vec; auto* bar = base.New(1); vec.emplace_back(std::move(*bar)); diff --git a/pw_allocator/size_report/synchronized_allocator_isl.cc b/pw_allocator/size_report/synchronized_allocator_isl.cc index f689cc718c..ccba686b39 100644 --- a/pw_allocator/size_report/synchronized_allocator_isl.cc +++ b/pw_allocator/size_report/synchronized_allocator_isl.cc @@ -21,7 +21,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::FirstFitBlockAllocator allocator(reporter.buffer()); + pw::allocator::FirstFitBlockAllocator<> allocator(reporter.buffer()); pw::allocator::SynchronizedAllocator synchronized(allocator); reporter.Measure(synchronized); diff --git a/pw_allocator/size_report/synchronized_allocator_mutex.cc b/pw_allocator/size_report/synchronized_allocator_mutex.cc index 8b3e55d1cd..98e9edab5f 100644 --- a/pw_allocator/size_report/synchronized_allocator_mutex.cc +++ b/pw_allocator/size_report/synchronized_allocator_mutex.cc @@ -21,7 +21,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::FirstFitBlockAllocator allocator(reporter.buffer()); + pw::allocator::FirstFitBlockAllocator<> allocator(reporter.buffer()); pw::allocator::SynchronizedAllocator synchronized(allocator); reporter.Measure(synchronized); diff --git a/pw_allocator/size_report/tracking_allocator_all_metrics.cc b/pw_allocator/size_report/tracking_allocator_all_metrics.cc index b1b0e6d713..edb4794d5a 100644 --- a/pw_allocator/size_report/tracking_allocator_all_metrics.cc +++ b/pw_allocator/size_report/tracking_allocator_all_metrics.cc @@ -21,7 +21,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::FirstFitBlockAllocator allocator(reporter.buffer()); + pw::allocator::FirstFitBlockAllocator<> allocator(reporter.buffer()); pw::allocator::TrackingAllocator tracker( 1, allocator); reporter.Measure(tracker); diff --git a/pw_allocator/size_report/tracking_allocator_no_metrics.cc b/pw_allocator/size_report/tracking_allocator_no_metrics.cc index 9b189cc476..2daa8e45f5 100644 --- a/pw_allocator/size_report/tracking_allocator_no_metrics.cc +++ b/pw_allocator/size_report/tracking_allocator_no_metrics.cc @@ -21,7 +21,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::FirstFitBlockAllocator allocator(reporter.buffer()); + pw::allocator::FirstFitBlockAllocator<> allocator(reporter.buffer()); pw::allocator::TrackingAllocator tracker(1, allocator); reporter.Measure(tracker); diff --git a/pw_allocator/size_report/worst_fit_block_allocator.cc b/pw_allocator/size_report/worst_fit_block_allocator.cc index ab2133fa2c..b186a31211 100644 --- a/pw_allocator/size_report/worst_fit_block_allocator.cc +++ b/pw_allocator/size_report/worst_fit_block_allocator.cc @@ -20,7 +20,7 @@ int main() { pw::allocator::SizeReporter reporter; reporter.SetBaseline(); - pw::allocator::WorstFitBlockAllocator allocator(reporter.buffer()); + pw::allocator::WorstFitBlockAllocator<> allocator(reporter.buffer()); reporter.Measure(allocator); return 0; diff --git a/pw_allocator/tracking_allocator_test.cc b/pw_allocator/tracking_allocator_test.cc index e354a972f4..824bcd0efa 100644 --- a/pw_allocator/tracking_allocator_test.cc +++ b/pw_allocator/tracking_allocator_test.cc @@ -45,7 +45,7 @@ class TrackingAllocatorForTest : public TrackingAllocator { class TrackingAllocatorTest : public ::testing::Test { protected: using AllocatorType = ::pw::allocator::FirstFitBlockAllocator; - using BlockType = AllocatorType::BlockType; + using BlockType = typename AllocatorType::BlockType; constexpr static size_t kCapacity = 256; constexpr static pw::metric::Token kToken = 1U; diff --git a/pw_allocator/worst_fit_block_allocator_test.cc b/pw_allocator/worst_fit_block_allocator_test.cc index 691616700c..0b2b24fd87 100644 --- a/pw_allocator/worst_fit_block_allocator_test.cc +++ b/pw_allocator/worst_fit_block_allocator_test.cc @@ -122,4 +122,6 @@ TEST_F(WorstFitBlockAllocatorTest, CanMeasureFragmentation) { CanMeasureFragmentation(); } +TEST_F(WorstFitBlockAllocatorTest, PoisonPeriodically) { PoisonPeriodically(); } + } // namespace diff --git a/pw_malloc/best_fit_block_allocator.cc b/pw_malloc/best_fit_block_allocator.cc index 831e153f24..7a82d6c15f 100644 --- a/pw_malloc/best_fit_block_allocator.cc +++ b/pw_malloc/best_fit_block_allocator.cc @@ -20,8 +20,7 @@ namespace pw::malloc { using BestFitBlockAllocator = - ::pw::allocator::BestFitBlockAllocator; + ::pw::allocator::BestFitBlockAllocator; void InitSystemAllocator(ByteSpan heap) { InitSystemAllocator(heap); diff --git a/pw_malloc/bucket_allocator.cc b/pw_malloc/bucket_allocator.cc index 74e645a680..00a333b837 100644 --- a/pw_malloc/bucket_allocator.cc +++ b/pw_malloc/bucket_allocator.cc @@ -19,8 +19,10 @@ namespace pw::malloc { +using BlockType = ::pw::allocator::BucketBlock; using BucketAllocator = - ::pw::allocator::BucketAllocator; void InitSystemAllocator(ByteSpan heap) { diff --git a/pw_malloc/docs.rst b/pw_malloc/docs.rst index d9a3df7820..91a043f35e 100644 --- a/pw_malloc/docs.rst +++ b/pw_malloc/docs.rst @@ -52,8 +52,6 @@ This module has configuration options that globally affect the behavior of .. doxygendefine:: PW_MALLOC_LOCK_TYPE .. doxygendefine:: PW_MALLOC_METRICS_TYPE .. doxygendefine:: PW_MALLOC_BLOCK_OFFSET_TYPE -.. doxygendefine:: PW_MALLOC_BLOCK_POISON_INTERVAL -.. doxygendefine:: PW_MALLOC_BLOCK_ALIGNMENT .. doxygendefine:: PW_MALLOC_MIN_BUCKET_SIZE .. doxygendefine:: PW_MALLOC_NUM_BUCKETS .. doxygendefine:: PW_MALLOC_DUAL_FIRST_FIT_THRESHOLD diff --git a/pw_malloc/dual_first_fit_block_allocator.cc b/pw_malloc/dual_first_fit_block_allocator.cc index ab8dbdf611..4061e128b7 100644 --- a/pw_malloc/dual_first_fit_block_allocator.cc +++ b/pw_malloc/dual_first_fit_block_allocator.cc @@ -20,9 +20,8 @@ namespace pw::malloc { namespace { -using DualFirstFitBlockAllocator = ::pw::allocator::DualFirstFitBlockAllocator< - PW_MALLOC_BLOCK_OFFSET_TYPE, - PW_MALLOC_BLOCK_POISON_INTERVAL>; +using DualFirstFitBlockAllocator = + ::pw::allocator::DualFirstFitBlockAllocator; DualFirstFitBlockAllocator& GetDualFirstFitBlockAllocator() { static DualFirstFitBlockAllocator system_allocator; diff --git a/pw_malloc/first_fit_block_allocator.cc b/pw_malloc/first_fit_block_allocator.cc index ea466d7534..4e2cf03d89 100644 --- a/pw_malloc/first_fit_block_allocator.cc +++ b/pw_malloc/first_fit_block_allocator.cc @@ -20,8 +20,7 @@ namespace pw::malloc { using FirstFitBlockAllocator = - ::pw::allocator::FirstFitBlockAllocator; + ::pw::allocator::FirstFitBlockAllocator; void InitSystemAllocator(ByteSpan heap) { InitSystemAllocator(heap); diff --git a/pw_malloc/last_fit_block_allocator.cc b/pw_malloc/last_fit_block_allocator.cc index c69c1013e3..46ce4c9382 100644 --- a/pw_malloc/last_fit_block_allocator.cc +++ b/pw_malloc/last_fit_block_allocator.cc @@ -20,8 +20,7 @@ namespace pw::malloc { using LastFitBlockAllocator = - ::pw::allocator::LastFitBlockAllocator; + ::pw::allocator::LastFitBlockAllocator; void InitSystemAllocator(ByteSpan heap) { InitSystemAllocator(heap); diff --git a/pw_malloc/public/pw_malloc/config.h b/pw_malloc/public/pw_malloc/config.h index 965990122a..86dac3c622 100644 --- a/pw_malloc/public/pw_malloc/config.h +++ b/pw_malloc/public/pw_malloc/config.h @@ -51,30 +51,6 @@ #define PW_MALLOC_BLOCK_OFFSET_TYPE uintptr_t #endif // PW_MALLOC_BLOCK_OFFSET_TYPE -#ifndef PW_MALLOC_BLOCK_POISON_INTERVAL -/// Sets how frequently `pw::allocator::BlockAllocator`s poison free blocks. -/// -/// Poisoned free blocks are checked on allocation to ensure nothing has -/// modified their usable space while deallocated. Setting this value to a -/// nonzero value N while poison every N-th free block. -/// -/// Defaults to 0, which disables poisoning. -#define PW_MALLOC_BLOCK_POISON_INTERVAL 0 -#endif // PW_MALLOC_BLOCK_POISON_INTERVAL - -#ifndef PW_MALLOC_BLOCK_ALIGNMENT -/// Sets the minimum alignment for a `pw::allocator::BlockAllocator`s memory. -/// -/// Must be a power of two. -/// -/// Defaults to the block offset type's alignment, which is the smallest value -/// that has any effect on the block allocator. -#define PW_MALLOC_BLOCK_ALIGNMENT alignof(PW_MALLOC_BLOCK_OFFSET_TYPE) -#endif // PW_MALLOC_BLOCK_ALIGNMENT -static_assert(((PW_MALLOC_BLOCK_ALIGNMENT - 1) & PW_MALLOC_BLOCK_ALIGNMENT) == - 0, - "PW_MALLOC_BLOCK_ALIGNMENT must be a power of two"); - #ifndef PW_MALLOC_MIN_BUCKET_SIZE /// Sets the size of the smallest ``pw::allocator::Bucket` used by an allocator. /// diff --git a/pw_malloc/worst_fit_block_allocator.cc b/pw_malloc/worst_fit_block_allocator.cc index eeeb6f6ea5..0870dcf0fa 100644 --- a/pw_malloc/worst_fit_block_allocator.cc +++ b/pw_malloc/worst_fit_block_allocator.cc @@ -20,8 +20,8 @@ namespace pw::malloc { using WorstFitBlockAllocator = - ::pw::allocator::WorstFitBlockAllocator; + ::pw::allocator::WorstFitBlockAllocator; + void InitSystemAllocator(ByteSpan heap) { InitSystemAllocator(heap); }