summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Abseil Team <absl-team@google.com>2022-09-19 09:30:00 -0700
committerGravatar Copybara-Service <copybara-worker@google.com>2022-09-19 09:30:43 -0700
commitd4607b3f05f3f22dda383efcb0aba7e7d9359823 (patch)
tree7e194615cf3beeeb43b041f7cbb5d7f914a81709
parent5937b7f9d123e6b01064fdb488d6c96d28d99a75 (diff)
Make BTrees work with custom allocators that recycle memory.
The btree data structure poisons regions of memory it's not using. It leaves these regions poisoned when it frees memory. This means that a custom memory allocator that tries to reuse freed memory will trigger an ASAN use-after-poison error. The fix is to unpoison each memory region right before freeing it. PiperOrigin-RevId: 475309671 Change-Id: I29d55c298d3d89a83e1f960deb6e93118891ff83
-rw-r--r--absl/container/btree_test.cc71
-rw-r--r--absl/container/internal/btree.h1
2 files changed, 72 insertions, 0 deletions
diff --git a/absl/container/btree_test.cc b/absl/container/btree_test.cc
index 4dae02c3..9386a6b1 100644
--- a/absl/container/btree_test.cc
+++ b/absl/container/btree_test.cc
@@ -3249,6 +3249,77 @@ TEST(Btree, NotAssignableType) {
}
}
+struct ArenaLike {
+ void* recycled = nullptr;
+ size_t recycled_size = 0;
+};
+
+// A very simple implementation of arena allocation.
+template <typename T>
+class ArenaLikeAllocator : public std::allocator<T> {
+ public:
+ // Standard library containers require the ability to allocate objects of
+ // different types which they can do so via rebind.other.
+ template <typename U>
+ struct rebind {
+ using other = ArenaLikeAllocator<U>;
+ };
+
+ explicit ArenaLikeAllocator(ArenaLike* arena) noexcept : arena_(arena) {}
+
+ ~ArenaLikeAllocator() {
+ if (arena_->recycled != nullptr) {
+ delete [] static_cast<T*>(arena_->recycled);
+ arena_->recycled = nullptr;
+ }
+ }
+
+ template<typename U>
+ explicit ArenaLikeAllocator(const ArenaLikeAllocator<U>& other) noexcept
+ : arena_(other.arena_) {}
+
+ T* allocate(size_t num_objects, const void* = nullptr) {
+ size_t size = num_objects * sizeof(T);
+ if (arena_->recycled != nullptr && arena_->recycled_size == size) {
+ T* result = static_cast<T*>(arena_->recycled);
+ arena_->recycled = nullptr;
+ return result;
+ }
+ return new T[num_objects];
+ }
+
+ void deallocate(T* p, size_t num_objects) {
+ size_t size = num_objects * sizeof(T);
+
+ // Simulate writing to the freed memory as an actual arena allocator might
+ // do. This triggers an error report if the memory is poisoned.
+ memset(p, 0xde, size);
+
+ if (arena_->recycled == nullptr) {
+ arena_->recycled = p;
+ arena_->recycled_size = size;
+ } else {
+ delete [] p;
+ }
+ }
+
+ ArenaLike* arena_;
+};
+
+// This test verifies that an arena allocator that reuses memory will not be
+// asked to free poisoned BTree memory.
+TEST(Btree, ReusePoisonMemory) {
+ using Alloc = ArenaLikeAllocator<int64_t>;
+ using Set = absl::btree_set<int64_t, std::less<int64_t>, Alloc>;
+ ArenaLike arena;
+ Alloc alloc(&arena);
+ Set set(alloc);
+
+ set.insert(0);
+ set.erase(0);
+ set.insert(0);
+}
+
} // namespace
} // namespace container_internal
ABSL_NAMESPACE_END
diff --git a/absl/container/internal/btree.h b/absl/container/internal/btree.h
index bdf2446e..e559bece 100644
--- a/absl/container/internal/btree.h
+++ b/absl/container/internal/btree.h
@@ -958,6 +958,7 @@ class btree_node {
static void deallocate(const size_type size, btree_node *node,
allocator_type *alloc) {
+ absl::container_internal::SanitizerUnpoisonMemoryRegion(node, size);
absl::container_internal::Deallocate<Alignment()>(alloc, node, size);
}