Skip to content

Commit

Permalink
Overhaul Unique<T>
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 640889773
  • Loading branch information
jcking authored and copybara-github committed Jun 10, 2024
1 parent ad1deff commit a4cf957
Show file tree
Hide file tree
Showing 19 changed files with 380 additions and 1,582 deletions.
12 changes: 4 additions & 8 deletions common/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -342,24 +342,17 @@ cc_library(
hdrs = ["memory.h"],
deps = [
":allocator",
":casting",
":arena",
":data",
":native_type",
":reference_count",
"//common/internal:metadata",
"//common/internal:reference_count",
"//internal:align",
"//internal:exceptions",
"//internal:new",
"//internal:page_size",
"@com_google_absl//absl/base:config",
"@com_google_absl//absl/base:core_headers",
"@com_google_absl//absl/base:no_destructor",
"@com_google_absl//absl/base:nullability",
"@com_google_absl//absl/log:absl_check",
"@com_google_absl//absl/log:absl_log",
"@com_google_absl//absl/log:die_if_null",
"@com_google_absl//absl/meta:type_traits",
"@com_google_absl//absl/numeric:bits",
"@com_google_protobuf//:protobuf",
],
Expand Down Expand Up @@ -392,6 +385,7 @@ cc_library(
"//internal:testing",
"@com_google_absl//absl/strings",
"@com_google_absl//absl/types:optional",
"@com_google_protobuf//:protobuf",
],
)

Expand Down Expand Up @@ -582,6 +576,7 @@ cc_test(
deps = [
":casting",
":memory",
":memory_testing",
":native_type",
":type",
"//internal:testing",
Expand Down Expand Up @@ -688,6 +683,7 @@ cc_test(
":casting",
":json",
":memory",
":memory_testing",
":native_type",
":type",
":value",
Expand Down
298 changes: 7 additions & 291 deletions common/memory.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,292 +14,18 @@

#include "common/memory.h"

#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <cstring>
#include <deque>
#include <limits>
#include <memory>
#include <new> // IWYU pragma: keep
#include <new>
#include <ostream>

#include "absl/base/attributes.h"
#include "absl/base/config.h" // IWYU pragma: keep
#include "absl/base/no_destructor.h"
#include "absl/base/nullability.h"
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/log/absl_log.h"
#include "absl/log/die_if_null.h"
#include "absl/numeric/bits.h"
#include "common/native_type.h"
#include "internal/align.h"
#include "internal/new.h"
#include "internal/page_size.h"

#ifdef ABSL_HAVE_ADDRESS_SANITIZER
#include <sanitizer/asan_interface.h>
#else
#define ASAN_POISON_MEMORY_REGION(p, n)
#define ASAN_UNPOISON_MEMORY_REGION(p, n)
#endif
#include "google/protobuf/arena.h"

namespace cel {

namespace {

static_assert(sizeof(char) == 1);

struct CleanupAction final {
void* pointer;
void (*destruct)(void*);
};

struct Region final {
static Region* Create(size_t size, Region* prev) {
auto sized_ptr = internal::SizeReturningNew(size + sizeof(Region));
return ::new (sized_ptr.first)
Region(sized_ptr.second - sizeof(Region), prev);
}

const size_t size;
Region* const prev;

Region(size_t size, Region* prev) noexcept : size(size), prev(prev) {
ASAN_POISON_MEMORY_REGION(reinterpret_cast<void*>(begin()), size);
}

uintptr_t begin() const noexcept {
return reinterpret_cast<uintptr_t>(this) + sizeof(Region);
}

uintptr_t end() const noexcept { return begin() + size; }

bool Contains(uintptr_t address) const noexcept {
return address >= begin() && address < end();
}

void Destroy() noexcept {
ASAN_UNPOISON_MEMORY_REGION(reinterpret_cast<void*>(begin()), size);
void* const address = this;
const auto total_size = size + sizeof(Region);
this->~Region();
internal::SizedDelete(address, total_size);
}
};

ABSL_ATTRIBUTE_NORETURN void ThrowStdBadAlloc() {
#ifdef ABSL_HAVE_EXCEPTIONS
throw std::bad_alloc();
#else
ABSL_LOG(FATAL) << "std::bad_alloc";
#endif
}

constexpr bool IsSizeTooLarge(size_t size) {
return size > static_cast<size_t>(std::numeric_limits<ptrdiff_t>::max());
}

bool IsAlignmentTooLarge(size_t alignment) {
return alignment > internal::PageSize();
}

class ThreadCompatiblePoolingMemoryManager final : public PoolingMemoryManager {
public:
~ThreadCompatiblePoolingMemoryManager() override {
while (!cleanup_actions_.empty()) {
auto cleanup_action = cleanup_actions_.front();
cleanup_actions_.pop_front();
(*cleanup_action.destruct)(cleanup_action.pointer);
}
auto* last = last_;
while (last != nullptr) {
auto* prev = last->prev;
last->Destroy();
last = prev;
}
}

private:
size_t CalculateRegionSize(size_t min_capacity) const {
if (min_capacity <= min_region_size_) {
return min_region_size_;
}
if (min_capacity >= max_region_size_) {
return min_capacity;
}
size_t capacity = min_region_size_;
while (capacity < min_capacity) {
capacity *= 2;
}
return capacity;
}

absl::Nonnull<void*> AllocateImpl(size_t size, size_t align) override {
ABSL_DCHECK_NE(size, 0);
ABSL_DCHECK(absl::has_single_bit(align));
if (ABSL_PREDICT_FALSE(IsSizeTooLarge(size))) {
ThrowStdBadAlloc();
}
if (ABSL_PREDICT_FALSE(IsAlignmentTooLarge(align))) {
ThrowStdBadAlloc();
}
ABSL_ATTRIBUTE_UNUSED auto prev = prev_;
prev_ = next_;
#ifdef ABSL_HAVE_EXCEPTIONS
try {
#endif
if (ABSL_PREDICT_FALSE(next_ == 0)) {
// Allocate first region.
ABSL_DCHECK(first_ == nullptr);
ABSL_DCHECK(last_ == nullptr);
const size_t capacity = CalculateRegionSize(
internal::AlignUp(size + sizeof(Region), align));
first_ = last_ = Region::Create(capacity - sizeof(Region), nullptr);
prev_ = next_ = last_->begin();
}
uintptr_t address = internal::AlignUp(next_, align);
if (ABSL_PREDICT_FALSE(address < next_ || address >= last_->end() ||
last_->end() - address < size)) {
// Allocate new region.
const size_t capacity = CalculateRegionSize(
internal::AlignUp(size + sizeof(Region), align));
min_region_size_ = std::min(min_region_size_ * 2, max_region_size_);
last_ = Region::Create(capacity - sizeof(Region), last_);
address = internal::AlignUp(last_->begin(), align);
}
void* pointer = reinterpret_cast<void*>(address);
ABSL_DCHECK(internal::IsAligned(pointer, align));
next_ = address + size;
ASAN_UNPOISON_MEMORY_REGION(reinterpret_cast<void*>(address), size);
return pointer;
#ifdef ABSL_HAVE_EXCEPTIONS
} catch (...) {
prev_ = prev;
throw;
}
#endif
}

bool DeallocateImpl(absl::Nonnull<void*> pointer, size_t size,
size_t align) noexcept override {
ABSL_DCHECK(absl::has_single_bit(align));
ABSL_DCHECK_NE(size, 0);
ABSL_DCHECK(internal::IsAligned(pointer, align));
ABSL_DCHECK(!IsSizeTooLarge(size));
ABSL_DCHECK(!IsAlignmentTooLarge(align));
auto address = reinterpret_cast<uintptr_t>(pointer);
ABSL_DCHECK(address != 0);
if (next_ == 0 || prev_ == 0 || next_ == prev_ || address + size != next_) {
return false;
}
if (!last_->Contains(prev_)) {
auto* second_to_last = ABSL_DIE_IF_NULL(last_->prev); // Crash OK
ABSL_CHECK(second_to_last->Contains(prev_)); // Crash OK
last_->Destroy();
last_ = second_to_last;
}
next_ = prev_;
ASAN_POISON_MEMORY_REGION(reinterpret_cast<void*>(next_),
last_->end() - next_);
return true;
}

void OwnCustomDestructorImpl(
void* object, absl::Nonnull<void (*)(void*)> destruct) override {
ABSL_DCHECK(object != nullptr);
ABSL_DCHECK(destruct != nullptr);
cleanup_actions_.push_back(CleanupAction{object, destruct});
}

NativeTypeId GetNativeTypeId() const noexcept override {
return NativeTypeId::For<ThreadCompatiblePoolingMemoryManager>();
}

uintptr_t next_ = 0;
uintptr_t prev_ = 0;
Region* last_ = nullptr;
std::deque<CleanupAction> cleanup_actions_;
Region* first_ = nullptr;
// Currently we use the same constants that protobuf does for their arena. We
// could allow these to be tunable.
size_t min_region_size_ = 256;
const size_t max_region_size_ = 32768;
};

class UnreachablePoolingMemoryManager final : public PoolingMemoryManager {
private:
absl::Nonnull<void*> AllocateImpl(size_t, size_t) override {
ABSL_LOG(FATAL) << "MemoryManager used after being moved";
}

bool DeallocateImpl(absl::Nonnull<void*>, size_t, size_t) noexcept override {
ABSL_LOG(FATAL) << "MemoryManager used after being moved";
}

void OwnCustomDestructorImpl(void*, absl::Nonnull<void (*)(void*)>) override {
ABSL_LOG(FATAL) << "MemoryManager used after being moved";
}

NativeTypeId GetNativeTypeId() const noexcept override {
return NativeTypeId::For<UnreachablePoolingMemoryManager>();
}
};

struct UnmanagedPoolingMemoryManager {
UnmanagedPoolingMemoryManager() = default;
UnmanagedPoolingMemoryManager(const UnmanagedPoolingMemoryManager&) = delete;
UnmanagedPoolingMemoryManager(UnmanagedPoolingMemoryManager&&) = delete;
UnmanagedPoolingMemoryManager& operator=(
const UnmanagedPoolingMemoryManager&) = delete;
UnmanagedPoolingMemoryManager& operator=(UnmanagedPoolingMemoryManager&&) =
delete;
};

absl::Nonnull<void*> UnmanagedPoolingMemoryManagerAllocate(absl::Nonnull<void*>,
size_t size,
size_t align) {
if (align <= __STDCPP_DEFAULT_NEW_ALIGNMENT__) {
return ::operator new(size);
}
return ::operator new(size, static_cast<std::align_val_t>(align));
}

bool UnmanagedPoolingMemoryManagerDeallocate(absl::Nonnull<void*>,
absl::Nonnull<void*> ptr,
size_t size,
size_t alignment) noexcept {
if (alignment <= __STDCPP_DEFAULT_NEW_ALIGNMENT__) {
#if defined(__cpp_sized_deallocation) && __cpp_sized_deallocation >= 201309L
::operator delete(ptr, size);
#else
::operator delete(ptr);
#endif
} else {
#if defined(__cpp_sized_deallocation) && __cpp_sized_deallocation >= 201309L
::operator delete(ptr, size, static_cast<std::align_val_t>(alignment));
#else
::operator delete(ptr, static_cast<std::align_val_t>(alignment));
#endif
}
return true;
}

void UnmanagedPoolingMemoryManagerOwnCustomDestructor(
absl::Nonnull<void*>, void*, absl::Nonnull<void (*)(void*)>) {}

const PoolingMemoryManagerVirtualTable& UnmanagedMemoryManagerVirtualTable() {
static const PoolingMemoryManagerVirtualTable vtable{
NativeTypeId::For<UnmanagedPoolingMemoryManager>(),
&UnmanagedPoolingMemoryManagerAllocate,
&UnmanagedPoolingMemoryManagerDeallocate,
&UnmanagedPoolingMemoryManagerOwnCustomDestructor};
return vtable;
}

} // namespace

std::ostream& operator<<(std::ostream& out,
MemoryManagement memory_management) {
switch (memory_management) {
Expand Down Expand Up @@ -347,21 +73,11 @@ bool ReferenceCountingMemoryManager::Deallocate(void* ptr, size_t size,
return true;
}

absl::Nonnull<std::unique_ptr<PoolingMemoryManager>>
NewThreadCompatiblePoolingMemoryManager() {
return std::make_unique<ThreadCompatiblePoolingMemoryManager>();
}

absl::Nonnull<PoolingMemoryManager*>
MemoryManager::UnreachablePooling() noexcept {
static absl::NoDestructor<UnreachablePoolingMemoryManager> instance;
return &*instance;
}

MemoryManagerRef MemoryManagerRef::Unmanaged() {
static UnmanagedPoolingMemoryManager instance;
return MemoryManagerRef::Pooling(UnmanagedMemoryManagerVirtualTable(),
instance);
MemoryManager MemoryManager::Unmanaged() {
// A static singleton arena, using `absl::NoDestructor` to avoid warnings
// related static variables without trivial destructors.
static absl::NoDestructor<google::protobuf::Arena> arena;
return MemoryManager::Pooling(&*arena);
}

} // namespace cel
Loading

0 comments on commit a4cf957

Please sign in to comment.