diff options
Diffstat (limited to 'absl/container')
-rw-r--r-- | absl/container/internal/raw_hash_set.cc | 10 | ||||
-rw-r--r-- | absl/container/internal/raw_hash_set.h | 106 | ||||
-rw-r--r-- | absl/container/internal/raw_hash_set_benchmark.cc | 25 | ||||
-rw-r--r-- | absl/container/internal/raw_hash_set_test.cc | 57 |
4 files changed, 121 insertions, 77 deletions
diff --git a/absl/container/internal/raw_hash_set.cc b/absl/container/internal/raw_hash_set.cc index f5a9ce68..a5dee6aa 100644 --- a/absl/container/internal/raw_hash_set.cc +++ b/absl/container/internal/raw_hash_set.cc @@ -37,25 +37,23 @@ inline size_t RandomSeed() { return value ^ static_cast<size_t>(reinterpret_cast<uintptr_t>(&counter)); } -bool ShouldInsertBackwards(size_t hash, ctrl_t* ctrl) { +bool ShouldInsertBackwards(size_t hash, const ctrl_t* ctrl) { // To avoid problems with weak hashes and single bit tests, we use % 13. // TODO(kfm,sbenza): revisit after we do unconditional mixing return (H1(hash, ctrl) ^ RandomSeed()) % 13 > 6; } -void ConvertDeletedToEmptyAndFullToDeleted( - ctrl_t* ctrl, size_t capacity) { - assert(ctrl[capacity] == kSentinel); +void ConvertDeletedToEmptyAndFullToDeleted(ctrl_t* ctrl, size_t capacity) { + assert(ctrl[capacity] == ctrl_t::kSentinel); assert(IsValidCapacity(capacity)); for (ctrl_t* pos = ctrl; pos < ctrl + capacity; pos += Group::kWidth) { Group{pos}.ConvertSpecialToEmptyAndFullToDeleted(pos); } // Copy the cloned ctrl bytes. std::memcpy(ctrl + capacity + 1, ctrl, NumClonedBytes()); - ctrl[capacity] = kSentinel; + ctrl[capacity] = ctrl_t::kSentinel; } - } // namespace container_internal ABSL_NAMESPACE_END } // namespace absl diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h index 675c9148..d7783263 100644 --- a/absl/container/internal/raw_hash_set.h +++ b/absl/container/internal/raw_hash_set.h @@ -252,48 +252,57 @@ class BitMask { T mask_; }; -using ctrl_t = signed char; using h2_t = uint8_t; // The values here are selected for maximum performance. See the static asserts -// below for details. -enum Ctrl : ctrl_t { +// below for details. We use an enum class so that when strict aliasing is +// enabled, the compiler knows ctrl_t doesn't alias other types. +enum class ctrl_t : int8_t { kEmpty = -128, // 0b10000000 kDeleted = -2, // 0b11111110 kSentinel = -1, // 0b11111111 }; static_assert( - kEmpty & kDeleted & kSentinel & 0x80, + (static_cast<int8_t>(ctrl_t::kEmpty) & + static_cast<int8_t>(ctrl_t::kDeleted) & + static_cast<int8_t>(ctrl_t::kSentinel) & 0x80) != 0, "Special markers need to have the MSB to make checking for them efficient"); -static_assert(kEmpty < kSentinel && kDeleted < kSentinel, - "kEmpty and kDeleted must be smaller than kSentinel to make the " - "SIMD test of IsEmptyOrDeleted() efficient"); -static_assert(kSentinel == -1, - "kSentinel must be -1 to elide loading it from memory into SIMD " - "registers (pcmpeqd xmm, xmm)"); -static_assert(kEmpty == -128, - "kEmpty must be -128 to make the SIMD check for its " +static_assert( + ctrl_t::kEmpty < ctrl_t::kSentinel && ctrl_t::kDeleted < ctrl_t::kSentinel, + "ctrl_t::kEmpty and ctrl_t::kDeleted must be smaller than " + "ctrl_t::kSentinel to make the SIMD test of IsEmptyOrDeleted() efficient"); +static_assert( + ctrl_t::kSentinel == static_cast<ctrl_t>(-1), + "ctrl_t::kSentinel must be -1 to elide loading it from memory into SIMD " + "registers (pcmpeqd xmm, xmm)"); +static_assert(ctrl_t::kEmpty == static_cast<ctrl_t>(-128), + "ctrl_t::kEmpty must be -128 to make the SIMD check for its " "existence efficient (psignb xmm, xmm)"); -static_assert(~kEmpty & ~kDeleted & kSentinel & 0x7F, - "kEmpty and kDeleted must share an unset bit that is not shared " - "by kSentinel to make the scalar test for MatchEmptyOrDeleted() " - "efficient"); -static_assert(kDeleted == -2, - "kDeleted must be -2 to make the implementation of " +static_assert( + (~static_cast<int8_t>(ctrl_t::kEmpty) & + ~static_cast<int8_t>(ctrl_t::kDeleted) & + static_cast<int8_t>(ctrl_t::kSentinel) & 0x7F) != 0, + "ctrl_t::kEmpty and ctrl_t::kDeleted must share an unset bit that is not " + "shared by ctrl_t::kSentinel to make the scalar test for " + "MatchEmptyOrDeleted() efficient"); +static_assert(ctrl_t::kDeleted == static_cast<ctrl_t>(-2), + "ctrl_t::kDeleted must be -2 to make the implementation of " "ConvertSpecialToEmptyAndFullToDeleted efficient"); // A single block of empty control bytes for tables without any slots allocated. // This enables removing a branch in the hot path of find(). inline ctrl_t* EmptyGroup() { alignas(16) static constexpr ctrl_t empty_group[] = { - kSentinel, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, - kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty}; + ctrl_t::kSentinel, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, + ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, + ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, + ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty}; return const_cast<ctrl_t*>(empty_group); } // Mixes a randomly generated per-process seed with `hash` and `ctrl` to // randomize insertion order within groups. -bool ShouldInsertBackwards(size_t hash, ctrl_t* ctrl); +bool ShouldInsertBackwards(size_t hash, const ctrl_t* ctrl); // Returns a hash seed. // @@ -309,12 +318,12 @@ inline size_t HashSeed(const ctrl_t* ctrl) { inline size_t H1(size_t hash, const ctrl_t* ctrl) { return (hash >> 7) ^ HashSeed(ctrl); } -inline ctrl_t H2(size_t hash) { return hash & 0x7F; } +inline h2_t H2(size_t hash) { return hash & 0x7F; } -inline bool IsEmpty(ctrl_t c) { return c == kEmpty; } -inline bool IsFull(ctrl_t c) { return c >= 0; } -inline bool IsDeleted(ctrl_t c) { return c == kDeleted; } -inline bool IsEmptyOrDeleted(ctrl_t c) { return c < kSentinel; } +inline bool IsEmpty(ctrl_t c) { return c == ctrl_t::kEmpty; } +inline bool IsFull(ctrl_t c) { return c >= static_cast<ctrl_t>(0); } +inline bool IsDeleted(ctrl_t c) { return c == ctrl_t::kDeleted; } +inline bool IsEmptyOrDeleted(ctrl_t c) { return c < ctrl_t::kSentinel; } #if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 @@ -351,24 +360,24 @@ struct GroupSse2Impl { // Returns a bitmask representing the positions of empty slots. BitMask<uint32_t, kWidth> MatchEmpty() const { #if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 - // This only works because kEmpty is -128. + // This only works because ctrl_t::kEmpty is -128. return BitMask<uint32_t, kWidth>( _mm_movemask_epi8(_mm_sign_epi8(ctrl, ctrl))); #else - return Match(static_cast<h2_t>(kEmpty)); + return Match(static_cast<h2_t>(ctrl_t::kEmpty)); #endif } // Returns a bitmask representing the positions of empty or deleted slots. BitMask<uint32_t, kWidth> MatchEmptyOrDeleted() const { - auto special = _mm_set1_epi8(kSentinel); + auto special = _mm_set1_epi8(static_cast<int8_t>(ctrl_t::kSentinel)); return BitMask<uint32_t, kWidth>( _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl))); } // Returns the number of trailing empty or deleted elements in the group. uint32_t CountLeadingEmptyOrDeleted() const { - auto special = _mm_set1_epi8(kSentinel); + auto special = _mm_set1_epi8(static_cast<int8_t>(ctrl_t::kSentinel)); return TrailingZeros(static_cast<uint32_t>( _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)) + 1)); } @@ -403,7 +412,7 @@ struct GroupPortableImpl { // // Caveat: there are false positives but: // - they only occur if there is a real match - // - they never occur on kEmpty, kDeleted, kSentinel + // - they never occur on ctrl_t::kEmpty, ctrl_t::kDeleted, ctrl_t::kSentinel // - they will be handled gracefully by subsequent checks in code // // Example: @@ -459,8 +468,8 @@ inline bool IsValidCapacity(size_t n) { return ((n + 1) & n) == 0 && n > 0; } // PRECONDITION: // IsValidCapacity(capacity) -// ctrl[capacity] == kSentinel -// ctrl[i] != kSentinel for all i < capacity +// ctrl[capacity] == ctrl_t::kSentinel +// ctrl[i] != ctrl_t::kSentinel for all i < capacity // Applies mapping for every byte in ctrl: // DELETED -> EMPTY // EMPTY -> EMPTY @@ -545,27 +554,28 @@ struct FindInfo { // This is important to make 1 a valid capacity. // // - In small mode only the first `capacity()` control bytes after the -// sentinel are valid. The rest contain dummy kEmpty values that do not +// sentinel are valid. The rest contain dummy ctrl_t::kEmpty values that do not // represent a real slot. This is important to take into account on // find_first_non_full(), where we never try ShouldInsertBackwards() for // small tables. inline bool is_small(size_t capacity) { return capacity < Group::kWidth - 1; } -inline probe_seq<Group::kWidth> probe(ctrl_t* ctrl, size_t hash, +inline probe_seq<Group::kWidth> probe(const ctrl_t* ctrl, size_t hash, size_t capacity) { return probe_seq<Group::kWidth>(H1(hash, ctrl), capacity); } // Probes the raw_hash_set with the probe sequence for hash and returns the // pointer to the first empty or deleted slot. -// NOTE: this function must work with tables having both kEmpty and kDelete -// in one group. Such tables appears during drop_deletes_without_resize. +// NOTE: this function must work with tables having both ctrl_t::kEmpty and +// ctrl_t::kDeleted in one group. Such tables appears during +// drop_deletes_without_resize. // // This function is very useful when insertions happen and: // - the input is already a set // - there are enough slots // - the element with the hash is not in the table -inline FindInfo find_first_non_full(ctrl_t* ctrl, size_t hash, +inline FindInfo find_first_non_full(const ctrl_t* ctrl, size_t hash, size_t capacity) { auto seq = probe(ctrl, hash, capacity); while (true) { @@ -588,11 +598,12 @@ inline FindInfo find_first_non_full(ctrl_t* ctrl, size_t hash, } } -// Reset all ctrl bytes back to kEmpty, except the sentinel. +// Reset all ctrl bytes back to ctrl_t::kEmpty, except the sentinel. inline void ResetCtrl(size_t capacity, ctrl_t* ctrl, const void* slot, size_t slot_size) { - std::memset(ctrl, kEmpty, capacity + 1 + NumClonedBytes()); - ctrl[capacity] = kSentinel; + std::memset(ctrl, static_cast<int8_t>(ctrl_t::kEmpty), + capacity + 1 + NumClonedBytes()); + ctrl[capacity] = ctrl_t::kSentinel; SanitizerPoisonMemoryRegion(slot, slot_size * capacity); } @@ -613,6 +624,11 @@ inline void SetCtrl(size_t i, ctrl_t h, size_t capacity, ctrl_t* ctrl, ctrl[((i - NumClonedBytes()) & capacity) + (NumClonedBytes() & capacity)] = h; } +inline void SetCtrl(size_t i, h2_t h, size_t capacity, ctrl_t* ctrl, + const void* slot, size_t slot_size) { + SetCtrl(i, static_cast<ctrl_t>(h), capacity, ctrl, slot, slot_size); +} + // Policy: a policy defines how to perform different operations on // the slots of the hashtable (see hash_policy_traits.h for the full interface // of policy). @@ -780,7 +796,7 @@ class raw_hash_set { ctrl_ += shift; slot_ += shift; } - if (ABSL_PREDICT_FALSE(*ctrl_ == kSentinel)) ctrl_ = nullptr; + if (ABSL_PREDICT_FALSE(*ctrl_ == ctrl_t::kSentinel)) ctrl_ = nullptr; } ctrl_t* ctrl_ = nullptr; @@ -1575,8 +1591,8 @@ class raw_hash_set { static_cast<size_t>(empty_after.TrailingZeros() + empty_before.LeadingZeros()) < Group::kWidth; - SetCtrl(index, was_never_full ? kEmpty : kDeleted, capacity_, ctrl_, slots_, - sizeof(slot_type)); + SetCtrl(index, was_never_full ? ctrl_t::kEmpty : ctrl_t::kDeleted, + capacity_, ctrl_, slots_, sizeof(slot_type)); growth_left() += was_never_full; infoz().RecordErase(); } @@ -1706,7 +1722,7 @@ class raw_hash_set { // right time. SetCtrl(new_i, H2(hash), capacity_, ctrl_, slots_, sizeof(slot_type)); PolicyTraits::transfer(&alloc_ref(), slots_ + new_i, slots_ + i); - SetCtrl(i, kEmpty, capacity_, ctrl_, slots_, sizeof(slot_type)); + SetCtrl(i, ctrl_t::kEmpty, capacity_, ctrl_, slots_, sizeof(slot_type)); } else { assert(IsDeleted(ctrl_[new_i])); SetCtrl(new_i, H2(hash), capacity_, ctrl_, slots_, sizeof(slot_type)); diff --git a/absl/container/internal/raw_hash_set_benchmark.cc b/absl/container/internal/raw_hash_set_benchmark.cc index 977ff4c0..c886d3ad 100644 --- a/absl/container/internal/raw_hash_set_benchmark.cc +++ b/absl/container/internal/raw_hash_set_benchmark.cc @@ -315,9 +315,17 @@ void BM_ReserveStringTable(benchmark::State& state) { } BENCHMARK(BM_ReserveStringTable)->Range(128, 4096); +// Like std::iota, except that ctrl_t doesn't support operator++. +template <typename CtrlIter> +void Iota(CtrlIter begin, CtrlIter end, int value) { + for (; begin != end; ++begin, ++value) { + *begin = static_cast<ctrl_t>(value); + } +} + void BM_Group_Match(benchmark::State& state) { std::array<ctrl_t, Group::kWidth> group; - std::iota(group.begin(), group.end(), -4); + Iota(group.begin(), group.end(), -4); Group g{group.data()}; h2_t h = 1; for (auto _ : state) { @@ -329,7 +337,7 @@ BENCHMARK(BM_Group_Match); void BM_Group_MatchEmpty(benchmark::State& state) { std::array<ctrl_t, Group::kWidth> group; - std::iota(group.begin(), group.end(), -4); + Iota(group.begin(), group.end(), -4); Group g{group.data()}; for (auto _ : state) ::benchmark::DoNotOptimize(g.MatchEmpty()); } @@ -337,7 +345,7 @@ BENCHMARK(BM_Group_MatchEmpty); void BM_Group_MatchEmptyOrDeleted(benchmark::State& state) { std::array<ctrl_t, Group::kWidth> group; - std::iota(group.begin(), group.end(), -4); + Iota(group.begin(), group.end(), -4); Group g{group.data()}; for (auto _ : state) ::benchmark::DoNotOptimize(g.MatchEmptyOrDeleted()); } @@ -345,7 +353,7 @@ BENCHMARK(BM_Group_MatchEmptyOrDeleted); void BM_Group_CountLeadingEmptyOrDeleted(benchmark::State& state) { std::array<ctrl_t, Group::kWidth> group; - std::iota(group.begin(), group.end(), -2); + Iota(group.begin(), group.end(), -2); Group g{group.data()}; for (auto _ : state) ::benchmark::DoNotOptimize(g.CountLeadingEmptyOrDeleted()); @@ -354,7 +362,7 @@ BENCHMARK(BM_Group_CountLeadingEmptyOrDeleted); void BM_Group_MatchFirstEmptyOrDeleted(benchmark::State& state) { std::array<ctrl_t, Group::kWidth> group; - std::iota(group.begin(), group.end(), -2); + Iota(group.begin(), group.end(), -2); Group g{group.data()}; for (auto _ : state) ::benchmark::DoNotOptimize(*g.MatchEmptyOrDeleted()); } @@ -363,8 +371,11 @@ BENCHMARK(BM_Group_MatchFirstEmptyOrDeleted); void BM_DropDeletes(benchmark::State& state) { constexpr size_t capacity = (1 << 20) - 1; std::vector<ctrl_t> ctrl(capacity + 1 + Group::kWidth); - ctrl[capacity] = kSentinel; - std::vector<ctrl_t> pattern = {kEmpty, 2, kDeleted, 2, kEmpty, 1, kDeleted}; + ctrl[capacity] = ctrl_t::kSentinel; + std::vector<ctrl_t> pattern = {ctrl_t::kEmpty, static_cast<ctrl_t>(2), + ctrl_t::kDeleted, static_cast<ctrl_t>(2), + ctrl_t::kEmpty, static_cast<ctrl_t>(1), + ctrl_t::kDeleted}; for (size_t i = 0; i != capacity; ++i) { ctrl[i] = pattern[i % pattern.size()]; } diff --git a/absl/container/internal/raw_hash_set_test.cc b/absl/container/internal/raw_hash_set_test.cc index a191bff9..4fb31fad 100644 --- a/absl/container/internal/raw_hash_set_test.cc +++ b/absl/container/internal/raw_hash_set_test.cc @@ -58,6 +58,9 @@ using ::testing::Lt; using ::testing::Pair; using ::testing::UnorderedElementsAre; +// Convenience function to static cast to ctrl_t. +ctrl_t CtrlT(int i) { return static_cast<ctrl_t>(i); } + TEST(Util, NormalizeCapacity) { EXPECT_EQ(1, NormalizeCapacity(0)); EXPECT_EQ(1, NormalizeCapacity(1)); @@ -170,15 +173,19 @@ TEST(Group, EmptyGroup) { TEST(Group, Match) { if (Group::kWidth == 16) { - ctrl_t group[] = {kEmpty, 1, kDeleted, 3, kEmpty, 5, kSentinel, 7, - 7, 5, 3, 1, 1, 1, 1, 1}; + ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kDeleted, CtrlT(3), + ctrl_t::kEmpty, CtrlT(5), ctrl_t::kSentinel, CtrlT(7), + CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1), + CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)}; EXPECT_THAT(Group{group}.Match(0), ElementsAre()); EXPECT_THAT(Group{group}.Match(1), ElementsAre(1, 11, 12, 13, 14, 15)); EXPECT_THAT(Group{group}.Match(3), ElementsAre(3, 10)); EXPECT_THAT(Group{group}.Match(5), ElementsAre(5, 9)); EXPECT_THAT(Group{group}.Match(7), ElementsAre(7, 8)); } else if (Group::kWidth == 8) { - ctrl_t group[] = {kEmpty, 1, 2, kDeleted, 2, 1, kSentinel, 1}; + ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), CtrlT(2), + ctrl_t::kDeleted, CtrlT(2), CtrlT(1), + ctrl_t::kSentinel, CtrlT(1)}; EXPECT_THAT(Group{group}.Match(0), ElementsAre()); EXPECT_THAT(Group{group}.Match(1), ElementsAre(1, 5, 7)); EXPECT_THAT(Group{group}.Match(2), ElementsAre(2, 4)); @@ -189,11 +196,15 @@ TEST(Group, Match) { TEST(Group, MatchEmpty) { if (Group::kWidth == 16) { - ctrl_t group[] = {kEmpty, 1, kDeleted, 3, kEmpty, 5, kSentinel, 7, - 7, 5, 3, 1, 1, 1, 1, 1}; + ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kDeleted, CtrlT(3), + ctrl_t::kEmpty, CtrlT(5), ctrl_t::kSentinel, CtrlT(7), + CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1), + CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)}; EXPECT_THAT(Group{group}.MatchEmpty(), ElementsAre(0, 4)); } else if (Group::kWidth == 8) { - ctrl_t group[] = {kEmpty, 1, 2, kDeleted, 2, 1, kSentinel, 1}; + ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), CtrlT(2), + ctrl_t::kDeleted, CtrlT(2), CtrlT(1), + ctrl_t::kSentinel, CtrlT(1)}; EXPECT_THAT(Group{group}.MatchEmpty(), ElementsAre(0)); } else { FAIL() << "No test coverage for Group::kWidth==" << Group::kWidth; @@ -202,11 +213,15 @@ TEST(Group, MatchEmpty) { TEST(Group, MatchEmptyOrDeleted) { if (Group::kWidth == 16) { - ctrl_t group[] = {kEmpty, 1, kDeleted, 3, kEmpty, 5, kSentinel, 7, - 7, 5, 3, 1, 1, 1, 1, 1}; + ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kDeleted, CtrlT(3), + ctrl_t::kEmpty, CtrlT(5), ctrl_t::kSentinel, CtrlT(7), + CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1), + CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)}; EXPECT_THAT(Group{group}.MatchEmptyOrDeleted(), ElementsAre(0, 2, 4)); } else if (Group::kWidth == 8) { - ctrl_t group[] = {kEmpty, 1, 2, kDeleted, 2, 1, kSentinel, 1}; + ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), CtrlT(2), + ctrl_t::kDeleted, CtrlT(2), CtrlT(1), + ctrl_t::kSentinel, CtrlT(1)}; EXPECT_THAT(Group{group}.MatchEmptyOrDeleted(), ElementsAre(0, 3)); } else { FAIL() << "No test coverage for Group::kWidth==" << Group::kWidth; @@ -217,28 +232,32 @@ TEST(Batch, DropDeletes) { constexpr size_t kCapacity = 63; constexpr size_t kGroupWidth = container_internal::Group::kWidth; std::vector<ctrl_t> ctrl(kCapacity + 1 + kGroupWidth); - ctrl[kCapacity] = kSentinel; - std::vector<ctrl_t> pattern = {kEmpty, 2, kDeleted, 2, kEmpty, 1, kDeleted}; + ctrl[kCapacity] = ctrl_t::kSentinel; + std::vector<ctrl_t> pattern = { + ctrl_t::kEmpty, CtrlT(2), ctrl_t::kDeleted, CtrlT(2), + ctrl_t::kEmpty, CtrlT(1), ctrl_t::kDeleted}; for (size_t i = 0; i != kCapacity; ++i) { ctrl[i] = pattern[i % pattern.size()]; if (i < kGroupWidth - 1) ctrl[i + kCapacity + 1] = pattern[i % pattern.size()]; } ConvertDeletedToEmptyAndFullToDeleted(ctrl.data(), kCapacity); - ASSERT_EQ(ctrl[kCapacity], kSentinel); + ASSERT_EQ(ctrl[kCapacity], ctrl_t::kSentinel); for (size_t i = 0; i < kCapacity + kGroupWidth; ++i) { ctrl_t expected = pattern[i % (kCapacity + 1) % pattern.size()]; - if (i == kCapacity) expected = kSentinel; - if (expected == kDeleted) expected = kEmpty; - if (IsFull(expected)) expected = kDeleted; + if (i == kCapacity) expected = ctrl_t::kSentinel; + if (expected == ctrl_t::kDeleted) expected = ctrl_t::kEmpty; + if (IsFull(expected)) expected = ctrl_t::kDeleted; EXPECT_EQ(ctrl[i], expected) - << i << " " << int{pattern[i % pattern.size()]}; + << i << " " << static_cast<int>(pattern[i % pattern.size()]); } } TEST(Group, CountLeadingEmptyOrDeleted) { - const std::vector<ctrl_t> empty_examples = {kEmpty, kDeleted}; - const std::vector<ctrl_t> full_examples = {0, 1, 2, 3, 5, 9, 127, kSentinel}; + const std::vector<ctrl_t> empty_examples = {ctrl_t::kEmpty, ctrl_t::kDeleted}; + const std::vector<ctrl_t> full_examples = { + CtrlT(0), CtrlT(1), CtrlT(2), CtrlT(3), + CtrlT(5), CtrlT(9), CtrlT(127), ctrl_t::kSentinel}; for (ctrl_t empty : empty_examples) { std::vector<ctrl_t> e(Group::kWidth, empty); @@ -871,7 +890,7 @@ TEST(Table, RehashWithNoResize) { const size_t capacity = t.capacity(); // Remove elements from all groups except the first and the last one. - // All elements removed from full groups will be marked as kDeleted. + // All elements removed from full groups will be marked as ctrl_t::kDeleted. const size_t erase_begin = Group::kWidth / 2; const size_t erase_end = (t.size() / Group::kWidth - 1) * Group::kWidth; for (size_t i = erase_begin; i < erase_end; ++i) { |