summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Abseil Team <absl-team@google.com>2023-12-12 12:40:28 -0800
committerGravatar Copybara-Service <copybara-worker@google.com>2023-12-12 12:41:18 -0800
commitad0a6d2faf803645c8126f0b67eee2eaad98bc3f (patch)
treecfa0b4934ff762c9715542e4e7adc5a90811153f
parent011aeedefed45e37789671cd7c6cda08e5efca70 (diff)
Add `MaskFull` to `Group`.
It is not used at the moment. Usage is planned to be submitted separately. Useful for faster iterating over all slots in the internal functions. PiperOrigin-RevId: 590300049 Change-Id: I081f33113268761db868771d29796d94c24e4e7a
-rw-r--r--absl/container/internal/raw_hash_set.h27
-rw-r--r--absl/container/internal/raw_hash_set_test.cc44
2 files changed, 59 insertions, 12 deletions
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index b6d2cf93..3b3c5aa3 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -637,6 +637,14 @@ struct GroupSse2Impl {
#endif
}
+ // Returns a bitmask representing the positions of full slots.
+ // Note: for `is_small()` tables group may contain the "same" slot twice:
+ // original and mirrored.
+ BitMask<uint16_t, kWidth> MaskFull() const {
+ return BitMask<uint16_t, kWidth>(
+ static_cast<uint16_t>(_mm_movemask_epi8(ctrl) ^ 0xffff));
+ }
+
// Returns a bitmask representing the positions of empty or deleted slots.
NonIterableBitMask<uint16_t, kWidth> MaskEmptyOrDeleted() const {
auto special = _mm_set1_epi8(static_cast<char>(ctrl_t::kSentinel));
@@ -692,6 +700,17 @@ struct GroupAArch64Impl {
return NonIterableBitMask<uint64_t, kWidth, 3>(mask);
}
+ // Returns a bitmask representing the positions of full slots.
+ // Note: for `is_small()` tables group may contain the "same" slot twice:
+ // original and mirrored.
+ BitMask<uint64_t, kWidth, 3> MaskFull() const {
+ uint64_t mask = vget_lane_u64(
+ vreinterpret_u64_u8(vcge_s8(vreinterpret_s8_u8(ctrl),
+ vdup_n_s8(static_cast<int8_t>(0)))),
+ 0);
+ return BitMask<uint64_t, kWidth, 3>(mask);
+ }
+
NonIterableBitMask<uint64_t, kWidth, 3> MaskEmptyOrDeleted() const {
uint64_t mask =
vget_lane_u64(vreinterpret_u64_u8(vcgt_s8(
@@ -760,6 +779,14 @@ struct GroupPortableImpl {
msbs);
}
+ // Returns a bitmask representing the positions of full slots.
+ // Note: for `is_small()` tables group may contain the "same" slot twice:
+ // original and mirrored.
+ BitMask<uint64_t, kWidth, 3> MaskFull() const {
+ constexpr uint64_t msbs = 0x8080808080808080ULL;
+ return BitMask<uint64_t, kWidth, 3>((ctrl ^ msbs) & msbs);
+ }
+
NonIterableBitMask<uint64_t, kWidth, 3> MaskEmptyOrDeleted() const {
constexpr uint64_t msbs = 0x8080808080808080ULL;
return NonIterableBitMask<uint64_t, kWidth, 3>((ctrl & ~(ctrl << 7)) &
diff --git a/absl/container/internal/raw_hash_set_test.cc b/absl/container/internal/raw_hash_set_test.cc
index 756b9df4..1a236e9a 100644
--- a/absl/container/internal/raw_hash_set_test.cc
+++ b/absl/container/internal/raw_hash_set_test.cc
@@ -232,6 +232,25 @@ TEST(Group, MaskEmpty) {
}
}
+TEST(Group, MaskFull) {
+ if (Group::kWidth == 16) {
+ ctrl_t group[] = {
+ ctrl_t::kEmpty, CtrlT(1), ctrl_t::kDeleted, CtrlT(3),
+ ctrl_t::kEmpty, CtrlT(5), ctrl_t::kSentinel, CtrlT(7),
+ CtrlT(7), CtrlT(5), ctrl_t::kDeleted, CtrlT(1),
+ CtrlT(1), ctrl_t::kSentinel, ctrl_t::kEmpty, CtrlT(1)};
+ EXPECT_THAT(Group{group}.MaskFull(),
+ ElementsAre(1, 3, 5, 7, 8, 9, 11, 12, 15));
+ } else if (Group::kWidth == 8) {
+ ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kEmpty,
+ ctrl_t::kDeleted, CtrlT(2), ctrl_t::kSentinel,
+ ctrl_t::kSentinel, CtrlT(1)};
+ EXPECT_THAT(Group{group}.MaskFull(), ElementsAre(1, 4, 7));
+ } else {
+ FAIL() << "No test coverage for Group::kWidth==" << Group::kWidth;
+ }
+}
+
TEST(Group, MaskEmptyOrDeleted) {
if (Group::kWidth == 16) {
ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kEmpty, CtrlT(3),
@@ -294,8 +313,8 @@ TEST(Group, CountLeadingEmptyOrDeleted) {
std::vector<ctrl_t> f(Group::kWidth, empty);
f[Group::kWidth * 2 / 3] = full;
f[Group::kWidth / 2] = full;
- EXPECT_EQ(
- Group::kWidth / 2, Group{f.data()}.CountLeadingEmptyOrDeleted());
+ EXPECT_EQ(Group::kWidth / 2,
+ Group{f.data()}.CountLeadingEmptyOrDeleted());
}
}
}
@@ -433,7 +452,8 @@ struct CustomAlloc : std::allocator<T> {
template <typename U>
explicit CustomAlloc(const CustomAlloc<U>& /*other*/) {}
- template<class U> struct rebind {
+ template <class U>
+ struct rebind {
using other = CustomAlloc<U>;
};
};
@@ -1361,15 +1381,15 @@ ExpectedStats XorSeedExpectedStats() {
switch (container_internal::Group::kWidth) {
case 8:
if (kRandomizesInserts) {
- return {0.05,
- 1.0,
- {{0.95, 0.5}},
- {{0.95, 0}, {0.99, 2}, {0.999, 4}, {0.9999, 10}}};
+ return {0.05,
+ 1.0,
+ {{0.95, 0.5}},
+ {{0.95, 0}, {0.99, 2}, {0.999, 4}, {0.9999, 10}}};
} else {
- return {0.05,
- 2.0,
- {{0.95, 0.1}},
- {{0.95, 0}, {0.99, 2}, {0.999, 4}, {0.9999, 10}}};
+ return {0.05,
+ 2.0,
+ {{0.95, 0.1}},
+ {{0.95, 0}, {0.99, 2}, {0.999, 4}, {0.9999, 10}}};
}
case 16:
if (kRandomizesInserts) {
@@ -1415,7 +1435,7 @@ ProbeStats CollectProbeStatsOnLinearlyTransformedKeys(
std::random_device rd;
std::mt19937 rng(rd());
auto linear_transform = [](size_t x, size_t y) { return x * 17 + y * 13; };
- std::uniform_int_distribution<size_t> dist(0, keys.size()-1);
+ std::uniform_int_distribution<size_t> dist(0, keys.size() - 1);
while (num_iters--) {
IntTable t1;
size_t num_keys = keys.size() / 10;