summaryrefslogtreecommitdiff
path: root/absl/container
diff options
context:
space:
mode:
Diffstat (limited to 'absl/container')
-rw-r--r--absl/container/inlined_vector.h6
-rw-r--r--absl/container/internal/raw_hash_set.h13
-rw-r--r--absl/container/internal/raw_hash_set_test.cc41
3 files changed, 55 insertions, 5 deletions
diff --git a/absl/container/inlined_vector.h b/absl/container/inlined_vector.h
index 0f066860..9b699b57 100644
--- a/absl/container/inlined_vector.h
+++ b/absl/container/inlined_vector.h
@@ -1303,10 +1303,10 @@ bool operator>=(const absl::InlinedVector<T, N, A>& a,
return !(a < b);
}
-// AbslHashValue()
+// `AbslHashValue()`
//
-// Provides `absl::Hash` support for inlined vectors. You do not normally call
-// this function directly.
+// Provides `absl::Hash` support for `absl::InlinedVector`. You do not normally
+// call this function directly.
template <typename H, typename TheT, size_t TheN, typename TheA>
H AbslHashValue(H h, const absl::InlinedVector<TheT, TheN, TheA>& a) {
auto a_data = a.data();
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index 85e33344..9c926812 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -1437,7 +1437,18 @@ class raw_hash_set {
void initialize_slots() {
assert(capacity_);
- if (slots_ == nullptr) {
+ // Folks with custom allocators often make unwaranted assumptions about the
+ // behavior of their classes vis-a-vis trivial destructability and what
+ // calls they will or wont make. Avoid sampling for people with custom
+ // allocators to get us out of this mess. This is not a hard guarntee but a
+ // workaround while we plan the exact guarantee we want to provide.
+ //
+ // People are often sloppy with the exact type of their allocator (sometimes
+ // it has an extra const or is missing the pair, but rebinds made it work
+ // anyway). To avoid the ambiguitity, we work off SlotAlloc which we have
+ // bound more carefully.
+ if (std::is_same<SlotAlloc, std::allocator<slot_type>>::value &&
+ slots_ == nullptr) {
infoz_ = Sample();
}
diff --git a/absl/container/internal/raw_hash_set_test.cc b/absl/container/internal/raw_hash_set_test.cc
index 7d96ed90..871d85f1 100644
--- a/absl/container/internal/raw_hash_set_test.cc
+++ b/absl/container/internal/raw_hash_set_test.cc
@@ -343,7 +343,25 @@ struct IntTable
: raw_hash_set<IntPolicy, container_internal::hash_default_hash<int64_t>,
std::equal_to<int64_t>, std::allocator<int64_t>> {
using Base = typename IntTable::raw_hash_set;
- IntTable() {}
+ using Base::Base;
+};
+
+template <typename T>
+struct CustomAlloc : std::allocator<T> {
+ CustomAlloc() {}
+
+ template <typename U>
+ CustomAlloc(const CustomAlloc<U>& other) {}
+
+ template<class U> struct rebind {
+ using other = CustomAlloc<U>;
+ };
+};
+
+struct CustomAllocIntTable
+ : raw_hash_set<IntPolicy, container_internal::hash_default_hash<int64_t>,
+ std::equal_to<int64_t>, CustomAlloc<int64_t>> {
+ using Base = typename CustomAllocIntTable::raw_hash_set;
using Base::Base;
};
@@ -1869,6 +1887,27 @@ TEST(RawHashSamplerTest, Sample) {
0.01, 0.005);
}
+TEST(RawHashSamplerTest, DoNotSampleCustomAllocators) {
+ // Enable the feature even if the prod default is off.
+ SetHashtablezEnabled(true);
+ SetHashtablezSampleParameter(100);
+
+ auto& sampler = HashtablezSampler::Global();
+ size_t start_size = 0;
+ start_size += sampler.Iterate([&](const HashtablezInfo&) { ++start_size; });
+
+ std::vector<CustomAllocIntTable> tables;
+ for (int i = 0; i < 1000000; ++i) {
+ tables.emplace_back();
+ tables.back().insert(1);
+ }
+ size_t end_size = 0;
+ end_size += sampler.Iterate([&](const HashtablezInfo&) { ++end_size; });
+
+ EXPECT_NEAR((end_size - start_size) / static_cast<double>(tables.size()),
+ 0.00, 0.001);
+}
+
#ifdef ADDRESS_SANITIZER
TEST(Sanitizer, PoisoningUnused) {
IntTable t;