diff options
-rw-r--r-- | absl/container/btree_test.cc | 71 | ||||
-rw-r--r-- | absl/container/internal/btree.h | 1 |
2 files changed, 72 insertions, 0 deletions
diff --git a/absl/container/btree_test.cc b/absl/container/btree_test.cc index 4dae02c3..9386a6b1 100644 --- a/absl/container/btree_test.cc +++ b/absl/container/btree_test.cc @@ -3249,6 +3249,77 @@ TEST(Btree, NotAssignableType) { } } +struct ArenaLike { + void* recycled = nullptr; + size_t recycled_size = 0; +}; + +// A very simple implementation of arena allocation. +template <typename T> +class ArenaLikeAllocator : public std::allocator<T> { + public: + // Standard library containers require the ability to allocate objects of + // different types which they can do so via rebind.other. + template <typename U> + struct rebind { + using other = ArenaLikeAllocator<U>; + }; + + explicit ArenaLikeAllocator(ArenaLike* arena) noexcept : arena_(arena) {} + + ~ArenaLikeAllocator() { + if (arena_->recycled != nullptr) { + delete [] static_cast<T*>(arena_->recycled); + arena_->recycled = nullptr; + } + } + + template<typename U> + explicit ArenaLikeAllocator(const ArenaLikeAllocator<U>& other) noexcept + : arena_(other.arena_) {} + + T* allocate(size_t num_objects, const void* = nullptr) { + size_t size = num_objects * sizeof(T); + if (arena_->recycled != nullptr && arena_->recycled_size == size) { + T* result = static_cast<T*>(arena_->recycled); + arena_->recycled = nullptr; + return result; + } + return new T[num_objects]; + } + + void deallocate(T* p, size_t num_objects) { + size_t size = num_objects * sizeof(T); + + // Simulate writing to the freed memory as an actual arena allocator might + // do. This triggers an error report if the memory is poisoned. + memset(p, 0xde, size); + + if (arena_->recycled == nullptr) { + arena_->recycled = p; + arena_->recycled_size = size; + } else { + delete [] p; + } + } + + ArenaLike* arena_; +}; + +// This test verifies that an arena allocator that reuses memory will not be +// asked to free poisoned BTree memory. +TEST(Btree, ReusePoisonMemory) { + using Alloc = ArenaLikeAllocator<int64_t>; + using Set = absl::btree_set<int64_t, std::less<int64_t>, Alloc>; + ArenaLike arena; + Alloc alloc(&arena); + Set set(alloc); + + set.insert(0); + set.erase(0); + set.insert(0); +} + } // namespace } // namespace container_internal ABSL_NAMESPACE_END diff --git a/absl/container/internal/btree.h b/absl/container/internal/btree.h index bdf2446e..e559bece 100644 --- a/absl/container/internal/btree.h +++ b/absl/container/internal/btree.h @@ -958,6 +958,7 @@ class btree_node { static void deallocate(const size_type size, btree_node *node, allocator_type *alloc) { + absl::container_internal::SanitizerUnpoisonMemoryRegion(node, size); absl::container_internal::Deallocate<Alignment()>(alloc, node, size); } |