summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--absl/container/internal/raw_hash_set.h22
-rw-r--r--absl/container/internal/raw_hash_set_allocator_test.cc25
-rw-r--r--absl/container/internal/raw_hash_set_test.cc13
3 files changed, 45 insertions, 15 deletions
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index 9eee4fc7..a8fd80ca 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -846,9 +846,10 @@ class CommonFieldsGenerationInfoEnabled {
if (reserved_growth_ > 0) {
if (--reserved_growth_ == 0) reserved_growth_ = kReservedGrowthJustRanOut;
} else {
- *generation_ = NextGeneration(*generation_);
+ increment_generation();
}
}
+ void increment_generation() { *generation_ = NextGeneration(*generation_); }
void reset_reserved_growth(size_t reservation, size_t size) {
reserved_growth_ = reservation - size;
}
@@ -895,6 +896,7 @@ class CommonFieldsGenerationInfoDisabled {
return false;
}
void maybe_increment_generation_on_insert() {}
+ void increment_generation() {}
void reset_reserved_growth(size_t, size_t) {}
size_t reserved_growth() const { return 0; }
void set_reserved_growth(size_t) {}
@@ -1066,6 +1068,10 @@ class CommonFields : public CommonFieldsGenerationInfo {
return CommonFieldsGenerationInfo::
should_rehash_for_bug_detection_on_insert(control(), capacity());
}
+ void maybe_increment_generation_on_move() {
+ if (capacity() == 0) return;
+ increment_generation();
+ }
void reset_reserved_growth(size_t reservation) {
CommonFieldsGenerationInfo::reset_reserved_growth(reservation, size());
}
@@ -1219,7 +1225,7 @@ inline void AssertIsFull(const ctrl_t* ctrl, GenerationType generation,
if (ABSL_PREDICT_FALSE(generation != *generation_ptr)) {
ABSL_RAW_LOG(FATAL,
"%s called on invalid iterator. The table could have "
- "rehashed since this iterator was initialized.",
+ "rehashed or moved since this iterator was initialized.",
operation);
}
if (ABSL_PREDICT_FALSE(!IsFull(*ctrl))) {
@@ -1250,8 +1256,8 @@ inline void AssertIsValidForComparison(const ctrl_t* ctrl,
if (SwisstableGenerationsEnabled()) {
if (ABSL_PREDICT_FALSE(generation != *generation_ptr)) {
ABSL_RAW_LOG(FATAL,
- "Invalid iterator comparison. The table could have "
- "rehashed since this iterator was initialized.");
+ "Invalid iterator comparison. The table could have rehashed "
+ "or moved since this iterator was initialized.");
}
if (ABSL_PREDICT_FALSE(!ctrl_is_valid_for_comparison)) {
ABSL_RAW_LOG(
@@ -1338,8 +1344,8 @@ inline void AssertSameContainer(const ctrl_t* ctrl_a, const ctrl_t* ctrl_b,
ABSL_HARDENING_ASSERT(
AreItersFromSameContainer(ctrl_a, ctrl_b, slot_a, slot_b) &&
"Invalid iterator comparison. The iterators may be from different "
- "containers or the container might have rehashed. Consider running "
- "with --config=asan to diagnose rehashing issues.");
+ "containers or the container might have rehashed or moved. Consider "
+ "running with --config=asan to diagnose issues.");
}
}
@@ -1926,6 +1932,7 @@ class raw_hash_set {
settings_(std::move(that.common()), that.hash_ref(), that.eq_ref(),
that.alloc_ref()) {
that.common() = CommonFields{};
+ common().maybe_increment_generation_on_move();
}
raw_hash_set(raw_hash_set&& that, const allocator_type& a)
@@ -1935,6 +1942,7 @@ class raw_hash_set {
} else {
move_elements_allocs_unequal(std::move(that));
}
+ common().maybe_increment_generation_on_move();
}
raw_hash_set& operator=(const raw_hash_set& that) {
@@ -2707,6 +2715,7 @@ class raw_hash_set {
CopyAlloc(alloc_ref(), that.alloc_ref(),
std::integral_constant<bool, propagate_alloc>());
that.common() = CommonFields{};
+ common().maybe_increment_generation_on_move();
return *this;
}
@@ -2741,6 +2750,7 @@ class raw_hash_set {
// TODO(b/296061262): move instead of copying hash/eq.
hash_ref() = that.hash_ref();
eq_ref() = that.eq_ref();
+ common().maybe_increment_generation_on_move();
return move_elements_allocs_unequal(std::move(that));
}
diff --git a/absl/container/internal/raw_hash_set_allocator_test.cc b/absl/container/internal/raw_hash_set_allocator_test.cc
index 63448f4c..0bb61cff 100644
--- a/absl/container/internal/raw_hash_set_allocator_test.cc
+++ b/absl/container/internal/raw_hash_set_allocator_test.cc
@@ -284,32 +284,36 @@ TEST_F(NoPropagateOnCopy, CopyConstructorWithDifferentAlloc) {
}
TEST_F(PropagateOnAll, MoveConstructor) {
- auto it = t1.insert(0).first;
+ t1.insert(0);
Table u(std::move(t1));
+ auto it = u.begin();
EXPECT_EQ(1, a1.num_allocs());
EXPECT_EQ(0, it->num_moves());
EXPECT_EQ(0, it->num_copies());
}
TEST_F(NoPropagateOnMove, MoveConstructor) {
- auto it = t1.insert(0).first;
+ t1.insert(0);
Table u(std::move(t1));
+ auto it = u.begin();
EXPECT_EQ(1, a1.num_allocs());
EXPECT_EQ(0, it->num_moves());
EXPECT_EQ(0, it->num_copies());
}
TEST_F(PropagateOnAll, MoveConstructorWithSameAlloc) {
- auto it = t1.insert(0).first;
+ t1.insert(0);
Table u(std::move(t1), a1);
+ auto it = u.begin();
EXPECT_EQ(1, a1.num_allocs());
EXPECT_EQ(0, it->num_moves());
EXPECT_EQ(0, it->num_copies());
}
TEST_F(NoPropagateOnMove, MoveConstructorWithSameAlloc) {
- auto it = t1.insert(0).first;
+ t1.insert(0);
Table u(std::move(t1), a1);
+ auto it = u.begin();
EXPECT_EQ(1, a1.num_allocs());
EXPECT_EQ(0, it->num_moves());
EXPECT_EQ(0, it->num_copies());
@@ -378,9 +382,10 @@ TEST_F(NoPropagateOnCopy, CopyAssignmentWithDifferentAlloc) {
}
TEST_F(PropagateOnAll, MoveAssignmentWithSameAlloc) {
- auto it = t1.insert(0).first;
+ t1.insert(0);
Table u(0, a1);
u = std::move(t1);
+ auto it = u.begin();
EXPECT_EQ(a1, u.get_allocator());
EXPECT_EQ(1, a1.num_allocs());
EXPECT_EQ(0, it->num_moves());
@@ -388,9 +393,10 @@ TEST_F(PropagateOnAll, MoveAssignmentWithSameAlloc) {
}
TEST_F(NoPropagateOnMove, MoveAssignmentWithSameAlloc) {
- auto it = t1.insert(0).first;
+ t1.insert(0);
Table u(0, a1);
u = std::move(t1);
+ auto it = u.begin();
EXPECT_EQ(a1, u.get_allocator());
EXPECT_EQ(1, a1.num_allocs());
EXPECT_EQ(0, it->num_moves());
@@ -398,9 +404,10 @@ TEST_F(NoPropagateOnMove, MoveAssignmentWithSameAlloc) {
}
TEST_F(PropagateOnAll, MoveAssignmentWithDifferentAlloc) {
- auto it = t1.insert(0).first;
+ t1.insert(0);
Table u(0, a2);
u = std::move(t1);
+ auto it = u.begin();
EXPECT_EQ(a1, u.get_allocator());
EXPECT_EQ(1, a1.num_allocs());
EXPECT_EQ(0, a2.num_allocs());
@@ -409,10 +416,10 @@ TEST_F(PropagateOnAll, MoveAssignmentWithDifferentAlloc) {
}
TEST_F(NoPropagateOnMove, MoveAssignmentWithDifferentAlloc) {
- auto it = t1.insert(0).first;
+ t1.insert(0);
Table u(0, a2);
u = std::move(t1);
- it = u.find(0);
+ auto it = u.find(0);
EXPECT_EQ(a2, u.get_allocator());
EXPECT_EQ(1, a1.num_allocs());
EXPECT_EQ(1, a2.num_allocs());
diff --git a/absl/container/internal/raw_hash_set_test.cc b/absl/container/internal/raw_hash_set_test.cc
index 61eb6d95..d194ca1b 100644
--- a/absl/container/internal/raw_hash_set_test.cc
+++ b/absl/container/internal/raw_hash_set_test.cc
@@ -2370,6 +2370,19 @@ TEST(Iterator, InvalidUseWithReserveCrashesWithSanitizers) {
#endif
}
+TEST(Iterator, InvalidUseWithMoveCrashesWithSanitizers) {
+ if (!SwisstableGenerationsEnabled()) GTEST_SKIP() << "Generations disabled.";
+ if (kMsvc) GTEST_SKIP() << "MSVC doesn't support | in regexp.";
+
+ IntTable t1, t2;
+ t1.insert(1);
+ auto it = t1.begin();
+ t2 = std::move(t1);
+ EXPECT_DEATH_IF_SUPPORTED(*it, kInvalidIteratorDeathMessage);
+ EXPECT_DEATH_IF_SUPPORTED(void(it == t2.begin()),
+ kInvalidIteratorDeathMessage);
+}
+
TEST(Table, ReservedGrowthUpdatesWhenTableDoesntGrow) {
IntTable t;
for (int i = 0; i < 8; ++i) t.insert(i);