From 82759ccb711c3f34320ae9ae37bf70a029baec57 Mon Sep 17 00:00:00 2001 From: jcomito Date: Fri, 13 Sep 2024 21:42:47 +0000 Subject: [PATCH] pw_allocator: Check for room to split the first block MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Changing the check in Block::AllocLast(). Usually, during an allocation, a free block is checked for enough space for the allocation. If there more than enough space, but not enough to hold another block, the excess is appended to the previous Block. This is not possible with the first Block since there isn't a previous Block. In the case of the first Block, the extra space will become part of the block and the Block will be returned to the caller. Before this change, even when there was not enough room for the first Block to be split into two, it was. This caused heap corruption and eventually a crash. As part of this fix, the unit tests were made less brittle and more exhaustive by adding test utilities that manage aligned and unaligned buffers, and then ensuring unit test coverage of every path though AllocFirst, CanAllocLast, and AllocLast. Bug: 366175024 Change-Id: Ic3c9538e5e33eaf507f08f0d3ac54361b5dcf998 Reviewed-on: https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/235312 Pigweed-Auto-Submit: Aaron Green Reviewed-by: John Comito Reviewed-by: Ewout van Bekkum Lint: Lint 🤖 Reviewed-by: Keir Mierle Commit-Queue: Auto-Submit Docs-Not-Needed: Aaron Green Presubmit-Verified: CQ Bot Account --- pw_allocator/BUILD.bazel | 1 + pw_allocator/BUILD.gn | 1 + pw_allocator/CMakeLists.txt | 1 + pw_allocator/block_test.cc | 738 ++++++++++++++---- pw_allocator/freelist_heap_test.cc | 19 +- pw_allocator/public/pw_allocator/block.h | 110 +-- .../public/pw_allocator/block_testing.h | 25 + .../public/pw_allocator/freelist_heap.h | 2 +- 8 files changed, 685 insertions(+), 212 deletions(-) diff --git a/pw_allocator/BUILD.bazel b/pw_allocator/BUILD.bazel index 359b170ad7..d4c82d7cb6 100644 --- a/pw_allocator/BUILD.bazel +++ b/pw_allocator/BUILD.bazel @@ -625,6 +625,7 @@ pw_cc_test( "freelist_heap_test.cc", ], deps = [ + ":block_testing", ":freelist_heap", "//pw_bytes:alignment", ], diff --git a/pw_allocator/BUILD.gn b/pw_allocator/BUILD.gn index 6d50f04ea6..e36fa36efb 100644 --- a/pw_allocator/BUILD.gn +++ b/pw_allocator/BUILD.gn @@ -486,6 +486,7 @@ pw_test("fragmentation_test") { pw_test("freelist_heap_test") { deps = [ + ":block_testing", ":freelist_heap", "$dir_pw_bytes:alignment", ] diff --git a/pw_allocator/CMakeLists.txt b/pw_allocator/CMakeLists.txt index 9a71f7221e..ec541e1e42 100644 --- a/pw_allocator/CMakeLists.txt +++ b/pw_allocator/CMakeLists.txt @@ -573,6 +573,7 @@ pw_add_test(pw_allocator.freelist_heap_test SOURCES freelist_heap_test.cc PRIVATE_DEPS + pw_allocator.block_testing pw_allocator.freelist_heap pw_bytes.alignment GROUPS diff --git a/pw_allocator/block_test.cc b/pw_allocator/block_test.cc index 699fbec8b2..2383e592cd 100644 --- a/pw_allocator/block_test.cc +++ b/pw_allocator/block_test.cc @@ -21,6 +21,8 @@ #include "pw_allocator/block_testing.h" #include "pw_assert/check.h" #include "pw_assert/internal/check_impl.h" +#include "pw_bytes/alignment.h" +#include "pw_bytes/span.h" #include "pw_span/span.h" #include "pw_unit_test/framework.h" @@ -29,6 +31,8 @@ namespace { // Test fixtures. using ::pw::allocator::BlockAllocType; using ::pw::allocator::Layout; +using ::pw::allocator::test::GetAlignedOffsetAfter; +using ::pw::allocator::test::GetOuterSize; using ::pw::allocator::test::Preallocate; using ::pw::allocator::test::Preallocation; @@ -80,22 +84,10 @@ class PoisonedBlockTest : public BlockTest { /// ``layout.size()``. template size_t GetFirstAlignedOffset(pw::ByteSpan bytes, Layout layout) { - auto start = reinterpret_cast(bytes.data()); - uintptr_t end = start + bytes.size(); - size_t alignment = std::max(BlockType::kAlignment, layout.alignment()); - - // Find the minimum address of the usable space of the second block. - size_t min_addr = start; - PW_CHECK_ADD(min_addr, BlockType::kBlockOverhead + 1, &min_addr); - PW_CHECK_ADD(min_addr, BlockType::kBlockOverhead, &min_addr); - - // Align the usable space and ensure it fits. - uintptr_t addr = pw::AlignUp(min_addr, alignment); - PW_CHECK_UINT_LE(addr + layout.size(), end); - - // Return the offset to the start of the block. - PW_CHECK_SUB(addr, BlockType::kBlockOverhead, &addr); - return addr - start; + size_t min_block = BlockType::kBlockOverhead + 1; + size_t offset = GetAlignedOffsetAfter( + bytes.data(), layout.alignment(), min_block + BlockType::kBlockOverhead); + return min_block + offset; } /// Returns the largest offset into the given memory region at which a block @@ -105,24 +97,23 @@ size_t GetFirstAlignedOffset(pw::ByteSpan bytes, Layout layout) { /// ``layout.size()``. template size_t GetLastAlignedOffset(pw::ByteSpan bytes, Layout layout) { - auto start = reinterpret_cast(bytes.data()); - uintptr_t end = start + bytes.size(); - size_t alignment = std::max(BlockType::kAlignment, layout.alignment()); - - // Find the minimum address of the usable space of the second block. - size_t min_addr = start; - PW_CHECK_ADD(min_addr, BlockType::kBlockOverhead + 1, &min_addr); - PW_CHECK_ADD(min_addr, BlockType::kBlockOverhead, &min_addr); - - // Align the usable space and ensure it fits. - uintptr_t addr; - PW_CHECK_SUB(end, layout.size(), &addr); - addr = pw::AlignDown(addr, alignment); - PW_CHECK_UINT_LE(min_addr, addr); + size_t min_offset = GetFirstAlignedOffset(bytes, layout); + return min_offset + + pw::AlignDown(bytes.subspan(min_offset).size() - layout.size(), + layout.alignment()); +} - // Return the offset to the start of the block. - PW_CHECK_SUB(addr, BlockType::kBlockOverhead, &addr); - return addr - start; +/// Iterates to each block reachable from the given one and asserts that it is +/// valid. +template +void CheckAllReachableBlock(BlockType* block) { + // Rewind to start. + for (BlockType* prev = block->Prev(); prev != nullptr; prev = block->Prev()) { + block = prev; + } + for (; block != nullptr; block = block->Next()) { + block->CrashIfInvalid(); + } } // Macro to provide type-parameterized tests for the various block types above. @@ -159,6 +150,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanCreateSingleAlignedBlock) { EXPECT_EQ(block->Next(), nullptr); EXPECT_FALSE(block->Used()); EXPECT_TRUE(block->Last()); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CanCreateUnalignedSingleBlock) { @@ -180,298 +172,722 @@ TEST(BlockTest, CannotCreateTooLargeBlock) { EXPECT_EQ(result.status(), pw::Status::OutOfRange()); } -TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst) { - constexpr Layout kLayout(256, kAlign); +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocFirst_Null) { + constexpr Layout kLayout(1, 1); + + BlockType* block = nullptr; + + auto result = BlockType::AllocFirst(block, kLayout); + EXPECT_EQ(result.status(), pw::Status::InvalidArgument()); +} + +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocFirst_ZeroSize) { + constexpr Layout kLayout(0, 1); - // Make sure the block's usable space is aligned. - size_t outer_size = GetFirstAlignedOffset(bytes_, kLayout); auto* block = Preallocate( bytes_, { - {outer_size, Preallocation::kUsed}, {Preallocation::kSizeRemaining, Preallocation::kFree}, }); + + auto result = BlockType::AllocFirst(block, kLayout); + EXPECT_EQ(result.status(), pw::Status::InvalidArgument()); + CheckAllReachableBlock(block); +} + +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocFirst_Used) { + constexpr Layout kLayout(1, 1); + + auto* block = Preallocate( + bytes_, + { + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + + auto result = BlockType::AllocFirst(block, kLayout); + EXPECT_EQ(result.status(), pw::Status::FailedPrecondition()); + CheckAllReachableBlock(block); +} + +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocFirst_TooSmall) { + constexpr Layout kLayout(256, kAlign); + + // Trim the buffer so that the layout does not fit. + pw::ByteSpan bytes = bytes_.subspan( + 0, GetOuterSize(kLayout.size()) - BlockType::kAlignment); + + auto* block = Preallocate( + bytes, + { + {Preallocation::kSizeRemaining, Preallocation::kFree}, + }); + auto result = BlockType::AllocFirst(block, kLayout); + EXPECT_EQ(result.status(), pw::Status::OutOfRange()); + CheckAllReachableBlock(block); +} + +TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst_Exact_FirstBlock) { + constexpr Layout kLayout(256, kAlign); + + // Trim the front of the buffer so that the first block is aligned. + size_t trim = + GetAlignedOffsetAfter(bytes_.data(), kAlign, BlockType::kBlockOverhead); + pw::ByteSpan bytes = bytes_.subspan(trim); + + // Leave enough space free for the requested block. + size_t available = GetOuterSize(kLayout.size()); + + auto* block = Preallocate( + bytes, + { + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + + // Allocate from the front of the block. + auto result = BlockType::AllocFirst(block, kLayout); + ASSERT_EQ(result.status(), pw::OkStatus()); + EXPECT_EQ(*result, BlockAllocType::kExact); + + EXPECT_GE(block->InnerSize(), kLayout.size()); + auto addr = reinterpret_cast(block->UsableSpace()); + EXPECT_EQ(addr % kAlign, 0U); + EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); +} + +TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst_Exact_SubsequentBlock) { + constexpr Layout kLayout(256, kAlign); + + // Preallocate a first block so that the next block is aligned. + size_t leading = GetFirstAlignedOffset(bytes_, kLayout); + + // Leave enough space free for the requested block. + size_t available = GetOuterSize(kLayout.size()); + + auto* block = Preallocate( + bytes_, + { + {leading, Preallocation::kUsed}, + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); block = block->Next(); // Allocate from the front of the block. - BlockType* prev = block->Prev(); + auto result = BlockType::AllocFirst(block, kLayout); + ASSERT_EQ(result.status(), pw::OkStatus()); + EXPECT_EQ(*result, BlockAllocType::kExact); + + EXPECT_GE(block->InnerSize(), kLayout.size()); + auto addr = reinterpret_cast(block->UsableSpace()); + EXPECT_EQ(addr % kAlign, 0U); + EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); +} + +TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst_NewNext_FirstBlock) { + constexpr Layout kLayout(256, kAlign); + + // Trim the front of the buffer so that the first block is aligned. + size_t trim = + GetAlignedOffsetAfter(bytes_.data(), kAlign, BlockType::kBlockOverhead); + pw::ByteSpan bytes = bytes_.subspan(trim); + + // Leave enough space free for the requested block and one more block. + size_t available = + GetOuterSize(kLayout.size()) + GetOuterSize(1); + + auto* block = Preallocate( + bytes, + { + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + + // Allocate from the front of the block. auto result = BlockType::AllocFirst(block, kLayout); ASSERT_EQ(result.status(), pw::OkStatus()); EXPECT_EQ(*result, BlockAllocType::kNewNext); - EXPECT_EQ(block->InnerSize(), kLayout.size()); + EXPECT_GE(block->InnerSize(), kLayout.size()); auto addr = reinterpret_cast(block->UsableSpace()); EXPECT_EQ(addr % kAlign, 0U); EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); +} - // No new padding block was allocated. - EXPECT_EQ(prev, block->Prev()); +TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst_NewNext_SubsequentBlock) { + constexpr Layout kLayout(256, kAlign); + + // Preallocate a first block so that the next block is aligned. + size_t leading = GetFirstAlignedOffset(bytes_, kLayout); + + // Leave enough space free for the requested block and one more block. + size_t available = + GetOuterSize(kLayout.size()) + GetOuterSize(1); + + auto* block = Preallocate( + bytes_, + { + {leading, Preallocation::kUsed}, + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + block = block->Next(); + + // Allocate from the front of the block. + auto result = BlockType::AllocFirst(block, kLayout); + ASSERT_EQ(result.status(), pw::OkStatus()); + EXPECT_EQ(*result, BlockAllocType::kNewNext); + + EXPECT_GE(block->InnerSize(), kLayout.size()); + auto addr = reinterpret_cast(block->UsableSpace()); + EXPECT_EQ(addr % kAlign, 0U); + EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); +} + +TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst_NewPrev_FirstBlock) { + constexpr Layout kLayout(256, kAlign); + + // Trim the front of the buffer so that there is room for a block before the + // first alignment boundary. + size_t trim = + GetAlignedOffsetAfter(bytes_.data(), kAlign, BlockType::kBlockOverhead) + + kAlign - GetOuterSize(1); + pw::ByteSpan bytes = bytes_.subspan(trim); + + // Leave enough space free for a block and the requested block. + size_t available = + GetOuterSize(1) + GetOuterSize(kLayout.size()); + + auto* block = Preallocate( + bytes, + { + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + + // Allocate from the front of the block. + auto result = BlockType::AllocFirst(block, kLayout); + ASSERT_EQ(result.status(), pw::OkStatus()); + EXPECT_EQ(*result, BlockAllocType::kNewPrev); - // Extra was split from the end of the block. - EXPECT_FALSE(block->Last()); + EXPECT_GE(block->InnerSize(), kLayout.size()); + auto addr = reinterpret_cast(block->UsableSpace()); + EXPECT_EQ(addr % kAlign, 0U); + EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); } -TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirstWithNewPrevBlock) { +TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst_NewPrev_SubsequentBlock) { constexpr Layout kLayout(256, kAlign); - // Make sure the block's usable space is not aligned. - size_t outer_size = GetFirstAlignedOffset(bytes_, kLayout); - outer_size += BlockType::kAlignment; + // Preallocate a first block with room for another block before the next + // alignment boundary. + size_t leading = GetFirstAlignedOffset(bytes_, kLayout) + kAlign - + GetOuterSize(1); + + // Leave enough space free for a block and the requested block. + size_t available = + GetOuterSize(1) + GetOuterSize(kLayout.size()); auto* block = Preallocate( bytes_, { - {outer_size, Preallocation::kUsed}, - {Preallocation::kSizeRemaining, Preallocation::kFree}, + {leading, Preallocation::kUsed}, + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); block = block->Next(); - BlockType* prev = block->Prev(); // Allocate from the front of the block. auto result = BlockType::AllocFirst(block, kLayout); ASSERT_EQ(result.status(), pw::OkStatus()); - EXPECT_EQ(*result, BlockAllocType::kNewPrevAndNewNext); + EXPECT_EQ(*result, BlockAllocType::kNewPrev); - EXPECT_EQ(block->InnerSize(), kLayout.size()); + EXPECT_GE(block->InnerSize(), kLayout.size()); auto addr = reinterpret_cast(block->UsableSpace()); EXPECT_EQ(addr % kAlign, 0U); EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); +} + +TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst_NewPrevAndNewNext_FirstBlock) { + constexpr Layout kLayout(256, kAlign); + + // Trim the front of the buffer so that there is room for a block before the + // first alignment boundary. + size_t trim = + GetAlignedOffsetAfter(bytes_.data(), kAlign, BlockType::kBlockOverhead) + + kAlign - GetOuterSize(1); + pw::ByteSpan bytes = bytes_.subspan(trim); - // A new free block was added. - EXPECT_LT(prev, block->Prev()); - EXPECT_FALSE(block->Prev()->Used()); + // Leave enough space free for a block, the requested block, and one more + // block. + size_t available = GetOuterSize(1) + + GetOuterSize(kLayout.size()) + + GetOuterSize(1); - // Extra was split from the end of the block. - EXPECT_FALSE(block->Last()); + auto* block = Preallocate( + bytes, + { + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + + // Allocate from the front of the block. + auto result = BlockType::AllocFirst(block, kLayout); + ASSERT_EQ(result.status(), pw::OkStatus()); + EXPECT_EQ(*result, BlockAllocType::kNewPrevAndNewNext); + + EXPECT_GE(block->InnerSize(), kLayout.size()); + auto addr = reinterpret_cast(block->UsableSpace()); + EXPECT_EQ(addr % kAlign, 0U); + EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); } -TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirstWithNoNewNextBlock) { +TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst_NewPrevAndNewNext_SubsequentBlock) { constexpr Layout kLayout(256, kAlign); - constexpr size_t kOuterSize = BlockType::kBlockOverhead + kLayout.size(); - // Make sure the block's usable space is aligned. - size_t outer_size = GetFirstAlignedOffset(bytes_, kLayout); + // Preallocate a first block with room for another block before the next + // alignment boundary. + size_t leading = GetFirstAlignedOffset(bytes_, kLayout) + kAlign - + GetOuterSize(1); + + // Leave enough space free for a block and the requested block and one more + // block. + size_t available = kAlign + GetOuterSize(kLayout.size()); + auto* block = Preallocate( bytes_, { - {outer_size, Preallocation::kUsed}, - {kOuterSize, Preallocation::kFree}, + {leading, Preallocation::kUsed}, + {available, Preallocation::kFree}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); block = block->Next(); - BlockType* next = block->Next(); + // Allocate from the front of the block. auto result = BlockType::AllocFirst(block, kLayout); ASSERT_EQ(result.status(), pw::OkStatus()); - EXPECT_EQ(*result, BlockAllocType::kExact); + EXPECT_EQ(*result, BlockAllocType::kNewPrevAndNewNext); - EXPECT_EQ(block->InnerSize(), kLayout.size()); + EXPECT_GE(block->InnerSize(), kLayout.size()); auto addr = reinterpret_cast(block->UsableSpace()); EXPECT_EQ(addr % kAlign, 0U); EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); +} - // No new trailing block was created. - EXPECT_EQ(next, block->Next()); +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocFirst_ShiftToPrev_FirstBlock) { + constexpr Layout kLayout(256, kAlign); + + // Trim the front of the buffer so that there is `kAlignment` bytes before + // where the aligned block would start. + size_t trim = + GetAlignedOffsetAfter(bytes_.data(), kAlign, BlockType::kBlockOverhead) + + kAlign - BlockType::kAlignment; + pw::ByteSpan bytes = bytes_.subspan(trim); + + // Leave enough space free for the `kAlignment` bytes and the requested block. + size_t available = + BlockType::kAlignment + GetOuterSize(kLayout.size()); + + auto* block = Preallocate( + bytes, + { + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + + // Attempt and fail to allocate from the front of the block. + auto result = BlockType::AllocFirst(block, kLayout); + EXPECT_EQ(result.status(), pw::Status::ResourceExhausted()); + CheckAllReachableBlock(block); } -TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirstWithResizedPrevBlock) { +TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst_ShiftToPrev_SubsequentBlock) { constexpr Layout kLayout(256, kAlign); - // Make sure the block's usable space is not aligned. - size_t outer_size = GetFirstAlignedOffset(bytes_, kLayout); - outer_size += - pw::AlignUp(BlockType::kBlockOverhead, kAlign) - BlockType::kAlignment; + // Preallocate a first block so that there is `kAlignment` bytes before + // where the aligned block would start. + size_t leading = GetFirstAlignedOffset(bytes_, kLayout) + kAlign - + BlockType::kAlignment; + + // Leave enough space free for the `kAlignment` bytes and the requested block. + size_t available = + BlockType::kAlignment + GetOuterSize(kLayout.size()); + auto* block = Preallocate( bytes_, { - {outer_size, Preallocation::kUsed}, - {Preallocation::kSizeRemaining, Preallocation::kFree}, + {leading, Preallocation::kUsed}, + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); block = block->Next(); - BlockType* prev = block->Prev(); - size_t prev_inner_size = prev->InnerSize(); // Allocate from the front of the block. auto result = BlockType::AllocFirst(block, kLayout); ASSERT_EQ(result.status(), pw::OkStatus()); - EXPECT_EQ(*result, BlockAllocType::kShiftToPrevAndNewNext); + EXPECT_EQ(*result, BlockAllocType::kShiftToPrev); - EXPECT_EQ(block->InnerSize(), kLayout.size()); + EXPECT_GE(block->InnerSize(), kLayout.size()); auto addr = reinterpret_cast(block->UsableSpace()); EXPECT_EQ(addr % kAlign, 0U); EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); +} - /// Less than a minimum block was added to the previous block. - EXPECT_EQ(prev, block->Prev()); - EXPECT_EQ(prev->InnerSize() - prev_inner_size, BlockType::kAlignment); +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocFirst_ShiftToPrevAndNewNext_FirstBlock) { + constexpr Layout kLayout(256, kAlign); - // Extra was split from the end of the block. - EXPECT_FALSE(block->Last()); + // Trim the front of the buffer so that there is `kAlignment` bytes before + // where the aligned block would start. + size_t trim = + GetAlignedOffsetAfter(bytes_.data(), kAlign, BlockType::kBlockOverhead) + + kAlign - BlockType::kAlignment; + pw::ByteSpan bytes = bytes_.subspan(trim); - // On freeing the block, the previous block goes back to its original size. - BlockType::Free(block); - EXPECT_EQ(prev->InnerSize(), prev_inner_size); + // Leave enough space free for the `kAlignment` bytes, the requested block, + // and one more block. + size_t available = BlockType::kAlignment + + GetOuterSize(kLayout.size()) + + GetOuterSize(1); + + auto* block = Preallocate( + bytes, + { + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + + // Attempt and fail to allocate from the front of the block. + auto result = BlockType::AllocFirst(block, kLayout); + EXPECT_EQ(result.status(), pw::Status::ResourceExhausted()); + CheckAllReachableBlock(block); } -TEST_FOR_EACH_BLOCK_TYPE(CannotAllocFirstIfTooSmallForAlignment) { +TEST_FOR_EACH_BLOCK_TYPE(CanAllocFirst_ShiftToPrevAndNewNext_SubsequentBlock) { constexpr Layout kLayout(256, kAlign); - constexpr size_t kOuterSize = BlockType::kBlockOverhead + kLayout.size(); - // Make sure the block's usable space is not aligned. - size_t outer_size = GetFirstAlignedOffset(bytes_, kLayout) + 1; + // Preallocate a first block so that there is `kAlignment` bytes before + // where the aligned block would start. + size_t leading = GetFirstAlignedOffset(bytes_, kLayout) + kAlign - + BlockType::kAlignment; + + // Leave enough space free for the `kAlignment` bytes, the requested block, + // and one more block. + size_t available = BlockType::kAlignment + + GetOuterSize(kLayout.size()) + + GetOuterSize(1); + auto* block = Preallocate( bytes_, { - {outer_size, Preallocation::kUsed}, - {kOuterSize, Preallocation::kFree}, + {leading, Preallocation::kUsed}, + {available, Preallocation::kFree}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); block = block->Next(); - // Cannot allocate without room to a split a block for alignment. + // Allocate from the front of the block. auto result = BlockType::AllocFirst(block, kLayout); - EXPECT_EQ(result.status(), pw::Status::OutOfRange()); + ASSERT_EQ(result.status(), pw::OkStatus()); + EXPECT_EQ(*result, BlockAllocType::kShiftToPrevAndNewNext); + + EXPECT_GE(block->InnerSize(), kLayout.size()); + auto addr = reinterpret_cast(block->UsableSpace()); + EXPECT_EQ(addr % kAlign, 0U); + EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); } -TEST_FOR_EACH_BLOCK_TYPE(CannotAllocFirstFromNull) { - BlockType* block = nullptr; +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocLast_Null) { constexpr Layout kLayout(1, 1); - auto result = BlockType::AllocFirst(block, kLayout); + + BlockType* block = nullptr; + + // Attempt and fail to allocate from the front of the block. + auto result = BlockType::AllocLast(block, kLayout); EXPECT_EQ(result.status(), pw::Status::InvalidArgument()); } -TEST_FOR_EACH_BLOCK_TYPE(CannotAllocFirstZeroSize) { +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocLast_ZeroSize) { + constexpr Layout kLayout(0, 1); + auto* block = Preallocate( bytes_, { {Preallocation::kSizeRemaining, Preallocation::kFree}, }); - constexpr Layout kLayout(0, 1); - auto result = BlockType::AllocFirst(block, kLayout); + + // Check if we expect this to succeed. + auto can_alloc_last = block->CanAllocLast(kLayout); + EXPECT_EQ(can_alloc_last.status(), pw::Status::InvalidArgument()); + + // Attempt and fail to allocate from the front of the block. + auto result = BlockType::AllocLast(block, kLayout); EXPECT_EQ(result.status(), pw::Status::InvalidArgument()); } -TEST_FOR_EACH_BLOCK_TYPE(CannotAllocFirstFromUsed) { +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocLast_Used) { + constexpr Layout kLayout(1, 1); + auto* block = Preallocate( bytes_, { {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); - constexpr Layout kLayout(1, 1); - auto result = BlockType::AllocFirst(block, kLayout); + + // Check if we expect this to succeed. + auto can_alloc_last = block->CanAllocLast(kLayout); + EXPECT_EQ(can_alloc_last.status(), pw::Status::FailedPrecondition()); + + // Attempt and fail to allocate from the front of the block. + auto result = BlockType::AllocLast(block, kLayout); EXPECT_EQ(result.status(), pw::Status::FailedPrecondition()); + CheckAllReachableBlock(block); } -TEST_FOR_EACH_BLOCK_TYPE(CanAllocLastWithNewPrevBlock) { +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocLast_TooSmall) { constexpr Layout kLayout(256, kAlign); + // Trim the buffer so that the layout does not fit. + pw::ByteSpan bytes = bytes_.subspan( + 0, GetOuterSize(kLayout.size()) - BlockType::kAlignment); + auto* block = Preallocate( - bytes_, + bytes, { {Preallocation::kSizeRemaining, Preallocation::kFree}, }); + // Check if we expect this to succeed. + auto can_alloc_last = block->CanAllocLast(kLayout); + EXPECT_EQ(can_alloc_last.status(), pw::Status::OutOfRange()); + + // Attempt and fail to allocate from the front of the block. + auto result = BlockType::AllocLast(block, kLayout); + EXPECT_EQ(result.status(), pw::Status::OutOfRange()); +} + +TEST_FOR_EACH_BLOCK_TYPE(CanAllocLast_Exact_FirstBlock) { + constexpr Layout kLayout(256, kAlign); + + // Trim the front of the buffer so that the first block is aligned. + size_t trim = + GetAlignedOffsetAfter(bytes_.data(), kAlign, BlockType::kBlockOverhead); + pw::ByteSpan bytes = bytes_.subspan(trim); + + // Leave enough space free for the requested block. + size_t available = GetOuterSize(kLayout.size()); + + auto* block = Preallocate( + bytes, + { + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + + // Check if we expect this to succeed. + auto can_alloc_last = block->CanAllocLast(kLayout); + ASSERT_EQ(can_alloc_last.status(), pw::OkStatus()); + EXPECT_EQ(can_alloc_last.size(), 0U); + // Allocate from the back of the block. auto result = BlockType::AllocLast(block, kLayout); ASSERT_EQ(result.status(), pw::OkStatus()); - EXPECT_EQ(*result, BlockAllocType::kNewPrev); + EXPECT_EQ(*result, BlockAllocType::kExact); EXPECT_GE(block->InnerSize(), kLayout.size()); auto addr = reinterpret_cast(block->UsableSpace()); EXPECT_EQ(addr % kAlign, 0U); EXPECT_TRUE(block->Used()); - - // Extra was split from the front of the block. - EXPECT_FALSE(block->Prev()->Used()); - EXPECT_TRUE(block->Last()); + CheckAllReachableBlock(block); } -TEST_FOR_EACH_BLOCK_TYPE(CanAllocLastWithResizedPrevBlock) { +TEST_FOR_EACH_BLOCK_TYPE(CanAllocLast_Exact_SubsequentBlock) { constexpr Layout kLayout(256, kAlign); - // Make sure the block's usable space is not aligned. - size_t outer_size = GetLastAlignedOffset(bytes_, kLayout); - outer_size -= BlockType::kAlignment; + // Preallocate a first block so that the next block is aligned. + size_t leading = GetFirstAlignedOffset(bytes_, kLayout); + + // Leave enough space free for the requested block. + size_t available = GetOuterSize(kLayout.size()); + auto* block = Preallocate( bytes_, { - {outer_size, Preallocation::kUsed}, - {Preallocation::kSizeRemaining, Preallocation::kFree}, + {leading, Preallocation::kUsed}, + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); block = block->Next(); - BlockType* next = block->Next(); - BlockType* prev = block->Prev(); - size_t prev_inner_size = prev->InnerSize(); + + // Check if we expect this to succeed. + auto can_alloc_last = block->CanAllocLast(kLayout); + ASSERT_EQ(can_alloc_last.status(), pw::OkStatus()); + EXPECT_EQ(can_alloc_last.size(), 0U); // Allocate from the back of the block. auto result = BlockType::AllocLast(block, kLayout); ASSERT_EQ(result.status(), pw::OkStatus()); - EXPECT_EQ(*result, BlockAllocType::kShiftToPrev); + EXPECT_EQ(*result, BlockAllocType::kExact); EXPECT_GE(block->InnerSize(), kLayout.size()); auto addr = reinterpret_cast(block->UsableSpace()); EXPECT_EQ(addr % kAlign, 0U); EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); +} - /// Less than a minimum block was added to the previous block. - EXPECT_EQ(prev, block->Prev()); - EXPECT_EQ(prev->InnerSize() - prev_inner_size, BlockType::kAlignment); +TEST_FOR_EACH_BLOCK_TYPE(CanAllocLast_NewPrev_FirstBlock) { + constexpr Layout kLayout(256, kAlign); - // No new trailing block was created. - EXPECT_EQ(next, block->Next()); + // Trim the front of the buffer so that there is room for a block before the + // first alignment boundary. + size_t trim = + GetAlignedOffsetAfter(bytes_.data(), kAlign, BlockType::kBlockOverhead) + + kAlign - GetOuterSize(1); + pw::ByteSpan bytes = bytes_.subspan(trim); - // On freeing the block, the previous block goes back to its original size. - BlockType::Free(block); - EXPECT_EQ(prev->InnerSize(), prev_inner_size); + // Leave enough space free for a block and the requested block. + size_t available = + GetOuterSize(1) + GetOuterSize(kLayout.size()); + + auto* block = Preallocate( + bytes, + { + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, + }); + + // Check if we expect this to succeed. + auto can_alloc_last = block->CanAllocLast(kLayout); + ASSERT_EQ(can_alloc_last.status(), pw::OkStatus()); + EXPECT_EQ(can_alloc_last.size(), GetOuterSize(1)); + + // Allocate from the back of the block. + auto result = BlockType::AllocLast(block, kLayout); + ASSERT_EQ(result.status(), pw::OkStatus()); + EXPECT_EQ(*result, BlockAllocType::kNewPrev); + + EXPECT_GE(block->InnerSize(), kLayout.size()); + auto addr = reinterpret_cast(block->UsableSpace()); + EXPECT_EQ(addr % kAlign, 0U); + EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); } -TEST_FOR_EACH_BLOCK_TYPE(CannotAllocLastIfTooSmallForAlignment) { +TEST_FOR_EACH_BLOCK_TYPE(CanAllocLast_NewPrev_SubsequentBlock) { constexpr Layout kLayout(256, kAlign); - constexpr size_t kOuterSize = BlockType::kBlockOverhead + kLayout.size(); - // Make sure the block's usable space is not aligned. - size_t outer_size = GetFirstAlignedOffset(bytes_, kLayout) + 1; + // Preallocate a first block with room for another block before the next + // alignment boundary. + size_t leading = GetFirstAlignedOffset(bytes_, kLayout) + kAlign - + GetOuterSize(1); + + // Leave enough space free for a block and the requested block. + size_t available = + GetOuterSize(1) + GetOuterSize(kLayout.size()); + auto* block = Preallocate( bytes_, { - {outer_size, Preallocation::kUsed}, - {kOuterSize, Preallocation::kFree}, + {leading, Preallocation::kUsed}, + {available, Preallocation::kFree}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); block = block->Next(); - // Cannot allocate without room to a split a block for alignment. - auto result = BlockType::AllocLast(block, kLayout); - EXPECT_EQ(result.status(), pw::Status::ResourceExhausted()); -} + // Check if we expect this to succeed. + auto can_alloc_last = block->CanAllocLast(kLayout); + ASSERT_EQ(can_alloc_last.status(), pw::OkStatus()); + EXPECT_EQ(can_alloc_last.size(), GetOuterSize(1)); -TEST_FOR_EACH_BLOCK_TYPE(CannotAllocLastFromNull) { - BlockType* block = nullptr; - constexpr Layout kLayout(1, 1); + // Allocate from the back of the block. auto result = BlockType::AllocLast(block, kLayout); - EXPECT_EQ(result.status(), pw::Status::InvalidArgument()); + ASSERT_EQ(result.status(), pw::OkStatus()); + EXPECT_EQ(*result, BlockAllocType::kNewPrev); + + EXPECT_GE(block->InnerSize(), kLayout.size()); + auto addr = reinterpret_cast(block->UsableSpace()); + EXPECT_EQ(addr % kAlign, 0U); + EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); } -TEST_FOR_EACH_BLOCK_TYPE(CannotAllocLastZeroSize) { +TEST_FOR_EACH_BLOCK_TYPE(CannotAllocLast_ShiftToPrev_FirstBlock) { + constexpr Layout kLayout(256, kAlign); + + // Trim the front of the buffer so that there is `kAlignment` bytes before + // where the aligned block would start. + size_t trim = + GetAlignedOffsetAfter(bytes_.data(), kAlign, BlockType::kBlockOverhead) + + kAlign - BlockType::kAlignment; + pw::ByteSpan bytes = bytes_.subspan(trim); + + // Leave enough space free for the `kAlignment` bytes and the requested block. + size_t available = + BlockType::kAlignment + GetOuterSize(kLayout.size()); + auto* block = Preallocate( - bytes_, + bytes, { - {Preallocation::kSizeRemaining, Preallocation::kFree}, + {available, Preallocation::kFree}, + {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); - constexpr Layout kLayout(0, 1); - auto result = BlockType::AllocLast(block, kLayout); - EXPECT_EQ(result.status(), pw::Status::InvalidArgument()); + + // Check if we expect this to succeed. + auto can_alloc_last = block->CanAllocLast(kLayout); + EXPECT_EQ(can_alloc_last.status(), pw::Status::ResourceExhausted()); + + // Attempt and fail to allocate from the back of the block. + auto result = BlockType::AllocFirst(block, kLayout); + EXPECT_EQ(result.status(), pw::Status::ResourceExhausted()); + CheckAllReachableBlock(block); } -TEST_FOR_EACH_BLOCK_TYPE(CannotAllocLastFromUsed) { +TEST_FOR_EACH_BLOCK_TYPE(CanAllocLast_ShiftToPrev_SubsequentBlock) { + constexpr Layout kLayout(256, kAlign); + + // Preallocate a first block so that there is `kAlignment` bytes before + // where the aligned block would start. + size_t leading = GetFirstAlignedOffset(bytes_, kLayout) + kAlign - + BlockType::kAlignment; + + // Leave enough space free for the `kAlignment` bytes and the requested block. + size_t available = + BlockType::kAlignment + GetOuterSize(kLayout.size()); + auto* block = Preallocate( bytes_, { + {leading, Preallocation::kUsed}, + {available, Preallocation::kFree}, {Preallocation::kSizeRemaining, Preallocation::kUsed}, }); - constexpr Layout kLayout(1, 1); + block = block->Next(); + + // Check if we expect this to succeed. + auto can_alloc_last = block->CanAllocLast(kLayout); + ASSERT_EQ(can_alloc_last.status(), pw::OkStatus()); + EXPECT_EQ(can_alloc_last.size(), BlockType::kAlignment); + + // Allocate from the back of the block. auto result = BlockType::AllocLast(block, kLayout); - EXPECT_EQ(result.status(), pw::Status::FailedPrecondition()); + ASSERT_EQ(result.status(), pw::OkStatus()); + EXPECT_EQ(*result, BlockAllocType::kShiftToPrev); + + EXPECT_GE(block->InnerSize(), kLayout.size()); + auto addr = reinterpret_cast(block->UsableSpace()); + EXPECT_EQ(addr % kAlign, 0U); + EXPECT_TRUE(block->Used()); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(FreeingNullDoesNothing) { @@ -486,6 +902,7 @@ TEST_FOR_EACH_BLOCK_TYPE(FreeingFreeBlockDoesNothing) { {Preallocation::kSizeRemaining, Preallocation::kFree}, }); BlockType::Free(block); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CanFree) { @@ -498,6 +915,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanFree) { BlockType::Free(block); EXPECT_FALSE(block->Used()); EXPECT_EQ(block->OuterSize(), kN); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CanFreeBlockWithoutMerging) { @@ -518,6 +936,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanFreeBlockWithoutMerging) { EXPECT_FALSE(block->Used()); EXPECT_EQ(next, block->Next()); EXPECT_EQ(prev, block->Prev()); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CanFreeBlockAndMergeWithPrev) { @@ -537,6 +956,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanFreeBlockAndMergeWithPrev) { EXPECT_FALSE(block->Used()); EXPECT_EQ(block->Prev(), nullptr); EXPECT_EQ(block->Next(), next); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CanFreeBlockAndMergeWithNext) { @@ -556,6 +976,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanFreeBlockAndMergeWithNext) { EXPECT_FALSE(block->Used()); EXPECT_EQ(block->Prev(), prev); EXPECT_TRUE(block->Last()); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CanFreeBlockAndMergeWithBoth) { @@ -574,6 +995,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanFreeBlockAndMergeWithBoth) { EXPECT_FALSE(block->Used()); EXPECT_EQ(block->Prev(), nullptr); EXPECT_TRUE(block->Last()); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CanResizeBlockSameSize) { @@ -589,6 +1011,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanResizeBlockSameSize) { block = block->Next(); EXPECT_EQ(BlockType::Resize(block, block->InnerSize()), pw::OkStatus()); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CannotResizeFreeBlock) { @@ -605,6 +1028,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CannotResizeFreeBlock) { EXPECT_EQ(BlockType::Resize(block, block->InnerSize()), pw::Status::FailedPrecondition()); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CanResizeBlockSmallerWithNextFree) { @@ -630,6 +1054,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanResizeBlockSmallerWithNextFree) { BlockType* next = block->Next(); EXPECT_FALSE(next->Used()); EXPECT_EQ(next->InnerSize(), next_inner_size + delta); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CanResizeBlockLargerWithNextFree) { @@ -654,6 +1079,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanResizeBlockLargerWithNextFree) { BlockType* next = block->Next(); EXPECT_FALSE(next->Used()); EXPECT_EQ(next->InnerSize(), next_inner_size - delta); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CannotResizeBlockMuchLargerWithNextFree) { @@ -669,6 +1095,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CannotResizeBlockMuchLargerWithNextFree) { size_t new_inner_size = block->InnerSize() + kOuterSize + 1; EXPECT_EQ(BlockType::Resize(block, new_inner_size), pw::Status::OutOfRange()); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CanResizeBlockSmallerWithNextUsed) { @@ -691,6 +1118,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanResizeBlockSmallerWithNextUsed) { BlockType* next = block->Next(); EXPECT_FALSE(next->Used()); EXPECT_EQ(next->OuterSize(), delta); + CheckAllReachableBlock(block); } TEST_FOR_EACH_BLOCK_TYPE(CannotResizeBlockLargerWithNextUsed) { @@ -831,7 +1259,7 @@ TEST_FOR_EACH_BLOCK_TYPE(CanGetAlignmentFromUsedBlock) { EXPECT_EQ(block2->Alignment(), kAlign * 2); } -TEST_FOR_EACH_BLOCK_TYPE(FreeBlockAlignmentIsAlwaysOne) { +TEST_FOR_EACH_BLOCK_TYPE(FreeBlocksHaveDefaultAlignment) { constexpr Layout kLayout1(128, kAlign); constexpr Layout kLayout2(384, kAlign * 2); @@ -850,7 +1278,7 @@ TEST_FOR_EACH_BLOCK_TYPE(FreeBlockAlignmentIsAlwaysOne) { EXPECT_EQ(block1->Alignment(), kAlign); BlockType::Free(block1); - EXPECT_EQ(block1->Alignment(), 1U); + EXPECT_EQ(block1->Alignment(), BlockType::kAlignment); } } // namespace diff --git a/pw_allocator/freelist_heap_test.cc b/pw_allocator/freelist_heap_test.cc index 2676139659..a77c096e65 100644 --- a/pw_allocator/freelist_heap_test.cc +++ b/pw_allocator/freelist_heap_test.cc @@ -14,6 +14,7 @@ #include "pw_allocator/freelist_heap.h" +#include "pw_allocator/block_testing.h" #include "pw_bytes/alignment.h" #include "pw_unit_test/framework.h" @@ -22,6 +23,7 @@ namespace { // Test fixtures. using ::pw::allocator::FreeListHeapBuffer; +using ::pw::allocator::test::GetAlignedOffsetAfter; class FreeListHeapBufferTest : public ::testing::Test { protected: @@ -29,7 +31,7 @@ class FreeListHeapBufferTest : public ::testing::Test { static constexpr size_t kN = 2048; - alignas(BlockType) std::array buffer_; + alignas(BlockType::kAlignment) std::array buffer_; }; // Unit tests. @@ -98,13 +100,13 @@ TEST_F(FreeListHeapBufferTest, ReturnsNullWhenAllocationTooLarge) { } TEST_F(FreeListHeapBufferTest, ReturnsNullWhenFull) { - FreeListHeapBuffer allocator(buffer_); - - auto start = reinterpret_cast(buffer_.data()); - uintptr_t usable = - pw::AlignUp(start + BlockType::kBlockOverhead, alignof(std::max_align_t)); + size_t offset = GetAlignedOffsetAfter( + buffer_.data(), alignof(std::max_align_t), BlockType::kBlockOverhead); + auto buffer = pw::ByteSpan(buffer_).subspan(offset); + size_t inner_size = buffer.size() - BlockType::kBlockOverhead; - void* ptr1 = allocator.Allocate(kN - (usable - start)); + FreeListHeapBuffer allocator(buffer); + void* ptr1 = allocator.Allocate(inner_size); ASSERT_NE(ptr1, nullptr); void* ptr2 = allocator.Allocate(1); @@ -161,7 +163,8 @@ TEST_F(FreeListHeapBufferTest, ReallocHasSameContent) { ASSERT_NE(ptr2, nullptr); std::memcpy(&val2, ptr2, sizeof(size_t)); - // Verify that data inside the allocated and reallocated chunks are the same. + // Verify that data inside the allocated and reallocated chunks are the + // same. EXPECT_EQ(val1, val2); // All pointers must be freed before the allocator goes out of scope. diff --git a/pw_allocator/public/pw_allocator/block.h b/pw_allocator/public/pw_allocator/block.h index 70c6c4ec1e..674687d39f 100644 --- a/pw_allocator/public/pw_allocator/block.h +++ b/pw_allocator/public/pw_allocator/block.h @@ -35,7 +35,7 @@ namespace internal { // Types of corrupted blocks, and functions to crash with an error message // corresponding to each type. These functions are implemented independent of // any template parameters to allow them to use `PW_CHECK`. -enum BlockStatus { +enum class BlockStatus { kValid, kMisaligned, kPrevMismatched, @@ -50,7 +50,7 @@ void CrashPoisonCorrupted(uintptr_t addr); } // namespace internal /// Describes the side effects of fulfilling and allocation request. -enum BlockAllocType { +enum class BlockAllocType { /// The allocation fit exactly, and no other blocks were affected. kExact, @@ -76,11 +76,11 @@ enum BlockAllocType { /// /// The blocks do not encode their size directly. Instead, they encode offsets /// to the next and previous blocks using the type given by the `OffsetType` -/// template parameter. The encoded offsets are simply the offsets divded by the -/// minimum block alignment, `kAlignment`. +/// template parameter. The encoded offsets are simply the offsets divided by +/// the minimum block alignment, `kAlignment`. /// -/// The `kAlignment` constant provided by the derived block is typically the -/// minimum value of `alignof(OffsetType)`. Since the addressable range of a +/// `kAlignment` is set by the `kAlign` template parameter, which defaults to +/// its minimum value of `alignof(OffsetType)`. Since the addressable range of a /// block is given by `std::numeric_limits::max() * kAlignment`, it /// may be advantageous to set a higher alignment if it allows using a smaller /// offset type, even if this wastes some bytes in order to align block headers. @@ -94,7 +94,7 @@ enum BlockAllocType { /// use-after-frees. /// /// As an example, the diagram below represents two contiguous -/// `Block`s. The indices indicate byte offsets: +/// `Block`s. The indices indicate byte offsets: /// /// @code{.unparsed} /// Block 1: @@ -103,7 +103,7 @@ enum BlockAllocType { /// +----------+----------+------+--------------+ /// | Prev | Next | | | /// | 0......3 | 4......7 | 8..9 | 10.......280 | -/// | 00000000 | 00000046 | 8008 | | +/// | 00000000 | 00000046 | 8004 | | /// +----------+----------+------+--------------+ /// Block 2: /// +---------------------+------+--------------+ @@ -111,32 +111,31 @@ enum BlockAllocType { /// +----------+----------+------+--------------+ /// | Prev | Next | | | /// | 0......3 | 4......7 | 8..9 | 10......1056 | -/// | 00000046 | 00000106 | 6008 | f7f7....f7f7 | +/// | 00000046 | 00000106 | 6004 | f7f7....f7f7 | /// +----------+----------+------+--------------+ /// @endcode /// /// The overall size of the block (e.g. 280 bytes) is given by its next offset -/// multiplied by the alignment (e.g. 0x106 * 4). Also, the next offset of a +/// multiplied by the alignment (e.g. 0x46 * 4). Also, the next offset of a /// block matches the previous offset of its next block. The first block in a /// list is denoted by having a previous offset of `0`. /// /// @tparam OffsetType Unsigned integral type used to encode offsets. Larger /// types can address more memory, but consume greater /// overhead. -/// @tparam kCanPoison Indicates whether to enable poisoning free blocks. /// @tparam kAlign Sets the overall alignment for blocks. Minimum is /// `alignof(OffsetType)` (the default). Larger values can /// address more memory, but consume greater overhead. -template class Block { public: - using offset_type = OffsetType; - static_assert(std::is_unsigned_v, - "offset type must be unsigned"); + using OffsetType = OffsetType_; + static_assert(std::is_unsigned_v, "offset type must be unsigned"); - static constexpr size_t kAlignment = std::max(kAlign, alignof(offset_type)); + static constexpr size_t kAlignment = std::max(kAlign, alignof(OffsetType)); static constexpr size_t kBlockOverhead = AlignUp(sizeof(Block), kAlignment); // No copy or move. @@ -240,8 +239,8 @@ class Block { /// /// .. pw-status-codes:: /// - /// OK: Returns the number of bytes to shift this block in order to align - /// its usable space. + /// OK: Returns the number of bytes from this block that would precede an + /// a block allocated from this one and aligned according to `layout`. /// /// FAILED_PRECONDITION: This block is in use and cannot be split. /// @@ -347,7 +346,7 @@ class Block { } /// Returns the current alignment of a block. - size_t Alignment() const { return Used() ? info_.alignment : 1; } + size_t Alignment() const { return Used() ? info_.alignment : kAlignment; } /// Indicates whether the block is in use. /// @@ -372,7 +371,7 @@ class Block { void MarkLast() { info_.last = 1; } /// Clears the last bit from this block. - void ClearLast() { info_.last = 1; } + void ClearLast() { info_.last = 0; } /// Poisons the block's usable space. /// @@ -392,7 +391,9 @@ class Block { /// * The block is aligned. /// * The prev/next fields match with the previous and next blocks. /// * The poisoned bytes are not damaged (if poisoning is enabled). - bool IsValid() const { return CheckStatus() == internal::kValid; } + bool IsValid() const { + return CheckStatus() == internal::BlockStatus::kValid; + } /// @brief Crashes with an informtaional message if a block is invalid. /// @@ -460,12 +461,12 @@ class Block { /// Offset (in increments of the minimum alignment) from this block to the /// previous block. 0 if this is the first block. - offset_type prev_ = 0; + OffsetType prev_ = 0; /// Offset (in increments of the minimum alignment) from this block to the /// next block. Valid even if this is the last block, since it equals the /// size of the block. - offset_type next_ = 0; + OffsetType next_ = 0; /// Information about the current state of the block: /// * If the `used` flag is set, the block's usable memory has been allocated @@ -485,7 +486,7 @@ class Block { } info_; /// Number of bytes allocated beyond what was requested. This will be at most - /// the minimum alignment, i.e. `alignof(offset_type).` + /// the minimum alignment, i.e. `alignof(OffsetType).` uint16_t padding_ = 0; public: @@ -609,6 +610,9 @@ Result Block::AllocFirst( return Status::FailedPrecondition(); } Block* prev = block->Prev(); + if (block->InnerSize() < layout.size()) { + return Status::OutOfRange(); + } // Check if padding will be needed at the front to align the usable space. size_t alignment = std::max(layout.alignment(), kAlignment); @@ -630,7 +634,7 @@ Result Block::AllocFirst( // Make sure everything fits. size_t inner_size = AlignUp(layout.size(), kAlignment); if (block->InnerSize() < pad_size + inner_size) { - return Status::OutOfRange(); + return Status::ResourceExhausted(); } BlockAllocType alloc_type = ShiftBlock(block, pad_size); @@ -641,13 +645,13 @@ Result Block::AllocFirst( trailing->Poison(should_poison); switch (alloc_type) { case BlockAllocType::kExact: - alloc_type = kNewNext; + alloc_type = BlockAllocType::kNewNext; break; case BlockAllocType::kNewPrev: - alloc_type = kNewPrevAndNewNext; + alloc_type = BlockAllocType::kNewPrevAndNewNext; break; case BlockAllocType::kShiftToPrev: - alloc_type = kShiftToPrevAndNewNext; + alloc_type = BlockAllocType::kShiftToPrevAndNewNext; break; case BlockAllocType::kNewNext: case BlockAllocType::kNewPrevAndNewNext: @@ -667,6 +671,9 @@ Result Block::AllocFirst( template StatusWithSize Block::CanAllocLast( Layout layout) const { + if (layout.size() == 0) { + return StatusWithSize::InvalidArgument(); + } if (Used()) { return StatusWithSize::FailedPrecondition(); } @@ -684,13 +691,26 @@ StatusWithSize Block::CanAllocLast( // Requested size does not fit. return StatusWithSize::ResourceExhausted(); } - return StatusWithSize(next - addr); + size_t extra = next - addr; + if (extra > kBlockOverhead) { + // Sufficient extra room for a new block. + return StatusWithSize(extra); + } + if (Prev() != nullptr) { + // Extra can be shifted to the previous block. + return StatusWithSize(extra); + } + if (extra % alignment == 0) { + // Pad the end of the block. + return StatusWithSize(); + } + return StatusWithSize::ResourceExhausted(); } template Result Block::AllocLast( Block*& block, Layout layout) { - if (block == nullptr || layout.size() == 0) { + if (block == nullptr) { return Status::InvalidArgument(); } size_t pad_size = 0; @@ -711,20 +731,14 @@ BlockAllocType Block::ShiftBlock( return BlockAllocType::kExact; } - // Check if this is the first block. - Block* prev = block->Prev(); - if (prev == nullptr && pad_size <= kBlockOverhead) { - pad_size += kBlockOverhead; - } - bool should_poison = block->info_.poisoned; + Block* prev = block->Prev(); if (pad_size <= kBlockOverhead) { // The small amount of padding can be appended to the previous block. Block::Resize(prev, prev->InnerSize() + pad_size).IgnoreError(); prev->padding_ += pad_size; block = prev->Next(); return BlockAllocType::kShiftToPrev; - } else { // Split the large padding off the front. Block* leading = block; @@ -925,38 +939,38 @@ template internal::BlockStatus Block::CheckStatus() const { if (reinterpret_cast(this) % kAlignment != 0) { - return internal::kMisaligned; + return internal::BlockStatus::kMisaligned; } if (!Last() && (this >= Next() || this != Next()->Prev())) { - return internal::kNextMismatched; + return internal::BlockStatus::kNextMismatched; } if (Prev() && (this <= Prev() || this != Prev()->Next())) { - return internal::kPrevMismatched; + return internal::BlockStatus::kPrevMismatched; } if (!Used() && !CheckPoison()) { - return internal::kPoisonCorrupted; + return internal::BlockStatus::kPoisonCorrupted; } - return internal::kValid; + return internal::BlockStatus::kValid; } template void Block::CrashIfInvalid() const { uintptr_t addr = reinterpret_cast(this); switch (CheckStatus()) { - case internal::kValid: + case internal::BlockStatus::kValid: break; - case internal::kMisaligned: + case internal::BlockStatus::kMisaligned: internal::CrashMisaligned(addr); break; - case internal::kNextMismatched: + case internal::BlockStatus::kNextMismatched: internal::CrashNextMismatched( addr, reinterpret_cast(Next()->Prev())); break; - case internal::kPrevMismatched: + case internal::BlockStatus::kPrevMismatched: internal::CrashPrevMismatched( addr, reinterpret_cast(Prev()->Next())); break; - case internal::kPoisonCorrupted: + case internal::BlockStatus::kPoisonCorrupted: internal::CrashPoisonCorrupted(addr); break; } diff --git a/pw_allocator/public/pw_allocator/block_testing.h b/pw_allocator/public/pw_allocator/block_testing.h index 9e1f6957b8..09193a4900 100644 --- a/pw_allocator/public/pw_allocator/block_testing.h +++ b/pw_allocator/public/pw_allocator/block_testing.h @@ -24,6 +24,31 @@ namespace pw::allocator::test { +/// Utility function that returns the offset from an addres a given number of +/// bytes `after` a given `ptr` to the next address that has a given +/// `alignment`. +/// +/// In other words, if offset is `GetAlignedOffsetAfter(ptr, alignment, after)`, +/// then `((uintptr_t)ptr + after + offset) % alignment` is 0. +/// +/// This is useful when dealing with blocks that need their usable space to be +/// aligned, e.g. +/// GetAlignedOffsetAfter(bytes_.data(), layout.alignment(), kBlockOverhead); +inline size_t GetAlignedOffsetAfter(const void* ptr, + size_t alignment, + size_t after) { + auto addr = reinterpret_cast(ptr) + after; + return pw::AlignUp(addr, alignment) - addr; +} + +/// Returns the minimum outer size for a block allocated from a layout with the +/// given `min_inner_size`. +template +size_t GetOuterSize(size_t min_inner_size) { + return BlockType::kBlockOverhead + + pw::AlignUp(min_inner_size, BlockType::kAlignment); +} + /// Represents an initial state for a memory block. /// /// Unit tests can specify an initial block layout by passing a list of these diff --git a/pw_allocator/public/pw_allocator/freelist_heap.h b/pw_allocator/public/pw_allocator/freelist_heap.h index 7f381cd22e..5bd2249cdc 100644 --- a/pw_allocator/public/pw_allocator/freelist_heap.h +++ b/pw_allocator/public/pw_allocator/freelist_heap.h @@ -51,7 +51,7 @@ class FreeListHeapBuffer { } private: - using OffsetType = Block<>::offset_type; + using OffsetType = Block<>::OffsetType; static constexpr size_t kMinChunkSize = 16; BucketBlockAllocator allocator_; };