// Copyright 2018 The Abseil Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "absl/container/internal/raw_hash_set.h" #include #include #include "absl/base/internal/raw_logging.h" #include "absl/container/internal/hash_function_defaults.h" #include "absl/strings/str_format.h" #include "benchmark/benchmark.h" namespace absl { ABSL_NAMESPACE_BEGIN namespace container_internal { struct RawHashSetTestOnlyAccess { template static auto GetSlots(const C& c) -> decltype(c.slots_) { return c.slots_; } }; namespace { struct IntPolicy { using slot_type = int64_t; using key_type = int64_t; using init_type = int64_t; static void construct(void*, int64_t* slot, int64_t v) { *slot = v; } static void destroy(void*, int64_t*) {} static void transfer(void*, int64_t* new_slot, int64_t* old_slot) { *new_slot = *old_slot; } static int64_t& element(slot_type* slot) { return *slot; } template static auto apply(F&& f, int64_t x) -> decltype(std::forward(f)(x, x)) { return std::forward(f)(x, x); } }; class StringPolicy { template ::value>::type> decltype(std::declval()( std::declval(), std::piecewise_construct, std::declval>(), std::declval())) static apply_impl(F&& f, std::pair, V> p) { const absl::string_view& key = std::get<0>(p.first); return std::forward(f)(key, std::piecewise_construct, std::move(p.first), std::move(p.second)); } public: struct slot_type { struct ctor {}; template slot_type(ctor, Ts&&... ts) : pair(std::forward(ts)...) {} std::pair pair; }; using key_type = std::string; using init_type = std::pair; template static void construct(allocator_type* alloc, slot_type* slot, Args... args) { std::allocator_traits::construct( *alloc, slot, typename slot_type::ctor(), std::forward(args)...); } template static void destroy(allocator_type* alloc, slot_type* slot) { std::allocator_traits::destroy(*alloc, slot); } template static void transfer(allocator_type* alloc, slot_type* new_slot, slot_type* old_slot) { construct(alloc, new_slot, std::move(old_slot->pair)); destroy(alloc, old_slot); } static std::pair& element(slot_type* slot) { return slot->pair; } template static auto apply(F&& f, Args&&... args) -> decltype(apply_impl(std::forward(f), PairArgs(std::forward(args)...))) { return apply_impl(std::forward(f), PairArgs(std::forward(args)...)); } }; struct StringHash : container_internal::hash_default_hash { using is_transparent = void; }; struct StringEq : std::equal_to { using is_transparent = void; }; struct StringTable : raw_hash_set> { using Base = typename StringTable::raw_hash_set; StringTable() {} using Base::Base; }; struct IntTable : raw_hash_set, std::equal_to, std::allocator> { using Base = typename IntTable::raw_hash_set; IntTable() {} using Base::Base; }; struct string_generator { template std::string operator()(RNG& rng) const { std::string res; res.resize(12); std::uniform_int_distribution printable_ascii(0x20, 0x7E); std::generate(res.begin(), res.end(), [&] { return printable_ascii(rng); }); return res; } size_t size; }; // Model a cache in steady state. // // On a table of size N, keep deleting the LRU entry and add a random one. void BM_CacheInSteadyState(benchmark::State& state) { std::random_device rd; std::mt19937 rng(rd()); string_generator gen{12}; StringTable t; std::deque keys; while (t.size() < state.range(0)) { auto x = t.emplace(gen(rng), gen(rng)); if (x.second) keys.push_back(x.first->first); } ABSL_RAW_CHECK(state.range(0) >= 10, ""); while (state.KeepRunning()) { // Some cache hits. std::deque::const_iterator it; for (int i = 0; i != 90; ++i) { if (i % 10 == 0) it = keys.end(); ::benchmark::DoNotOptimize(t.find(*--it)); } // Some cache misses. for (int i = 0; i != 10; ++i) ::benchmark::DoNotOptimize(t.find(gen(rng))); ABSL_RAW_CHECK(t.erase(keys.front()), keys.front().c_str()); keys.pop_front(); while (true) { auto x = t.emplace(gen(rng), gen(rng)); if (x.second) { keys.push_back(x.first->first); break; } } } state.SetItemsProcessed(state.iterations()); state.SetLabel(absl::StrFormat("load_factor=%.2f", t.load_factor())); } template void CacheInSteadyStateArgs(Benchmark* bm) { // The default. const float max_load_factor = 0.875; // When the cache is at the steady state, the probe sequence will equal // capacity if there is no reclamation of deleted slots. Pick a number large // enough to make the benchmark slow for that case. const size_t capacity = 1 << 10; // Check N data points to cover load factors in [0.4, 0.8). const size_t kNumPoints = 10; for (size_t i = 0; i != kNumPoints; ++i) bm->Arg(std::ceil( capacity * (max_load_factor + i * max_load_factor / kNumPoints) / 2)); } BENCHMARK(BM_CacheInSteadyState)->Apply(CacheInSteadyStateArgs); void BM_EndComparison(benchmark::State& state) { std::random_device rd; std::mt19937 rng(rd()); string_generator gen{12}; StringTable t; while (t.size() < state.range(0)) { t.emplace(gen(rng), gen(rng)); } for (auto _ : state) { for (auto it = t.begin(); it != t.end(); ++it) { benchmark::DoNotOptimize(it); benchmark::DoNotOptimize(t); benchmark::DoNotOptimize(it != t.end()); } } } BENCHMARK(BM_EndComparison)->Arg(400); void BM_CopyCtor(benchmark::State& state) { std::random_device rd; std::mt19937 rng(rd()); IntTable t; std::uniform_int_distribution dist(0, ~uint64_t{}); while (t.size() < state.range(0)) { t.emplace(dist(rng)); } for (auto _ : state) { IntTable t2 = t; benchmark::DoNotOptimize(t2); } } BENCHMARK(BM_CopyCtor)->Range(128, 4096); void BM_CopyAssign(benchmark::State& state) { std::random_device rd; std::mt19937 rng(rd()); IntTable t; std::uniform_int_distribution dist(0, ~uint64_t{}); while (t.size() < state.range(0)) { t.emplace(dist(rng)); } IntTable t2; for (auto _ : state) { t2 = t; benchmark::DoNotOptimize(t2); } } BENCHMARK(BM_CopyAssign)->Range(128, 4096); void BM_NoOpReserveIntTable(benchmark::State& state) { IntTable t; t.reserve(100000); for (auto _ : state) { benchmark::DoNotOptimize(t); t.reserve(100000); } } BENCHMARK(BM_NoOpReserveIntTable); void BM_NoOpReserveStringTable(benchmark::State& state) { StringTable t; t.reserve(100000); for (auto _ : state) { benchmark::DoNotOptimize(t); t.reserve(100000); } } BENCHMARK(BM_NoOpReserveStringTable); void BM_ReserveIntTable(benchmark::State& state) { int reserve_size = state.range(0); for (auto _ : state) { state.PauseTiming(); IntTable t; state.ResumeTiming(); benchmark::DoNotOptimize(t); t.reserve(reserve_size); } } BENCHMARK(BM_ReserveIntTable)->Range(128, 4096); void BM_ReserveStringTable(benchmark::State& state) { int reserve_size = state.range(0); for (auto _ : state) { state.PauseTiming(); StringTable t; state.ResumeTiming(); benchmark::DoNotOptimize(t); t.reserve(reserve_size); } } BENCHMARK(BM_ReserveStringTable)->Range(128, 4096); void BM_Group_Match(benchmark::State& state) { std::array group; std::iota(group.begin(), group.end(), -4); Group g{group.data()}; h2_t h = 1; for (auto _ : state) { ::benchmark::DoNotOptimize(h); ::benchmark::DoNotOptimize(g.Match(h)); } } BENCHMARK(BM_Group_Match); void BM_Group_MatchEmpty(benchmark::State& state) { std::array group; std::iota(group.begin(), group.end(), -4); Group g{group.data()}; for (auto _ : state) ::benchmark::DoNotOptimize(g.MatchEmpty()); } BENCHMARK(BM_Group_MatchEmpty); void BM_Group_MatchEmptyOrDeleted(benchmark::State& state) { std::array group; std::iota(group.begin(), group.end(), -4); Group g{group.data()}; for (auto _ : state) ::benchmark::DoNotOptimize(g.MatchEmptyOrDeleted()); } BENCHMARK(BM_Group_MatchEmptyOrDeleted); void BM_Group_CountLeadingEmptyOrDeleted(benchmark::State& state) { std::array group; std::iota(group.begin(), group.end(), -2); Group g{group.data()}; for (auto _ : state) ::benchmark::DoNotOptimize(g.CountLeadingEmptyOrDeleted()); } BENCHMARK(BM_Group_CountLeadingEmptyOrDeleted); void BM_Group_MatchFirstEmptyOrDeleted(benchmark::State& state) { std::array group; std::iota(group.begin(), group.end(), -2); Group g{group.data()}; for (auto _ : state) ::benchmark::DoNotOptimize(*g.MatchEmptyOrDeleted()); } BENCHMARK(BM_Group_MatchFirstEmptyOrDeleted); void BM_DropDeletes(benchmark::State& state) { constexpr size_t capacity = (1 << 20) - 1; std::vector ctrl(capacity + 1 + Group::kWidth); ctrl[capacity] = kSentinel; std::vector pattern = {kEmpty, 2, kDeleted, 2, kEmpty, 1, kDeleted}; for (size_t i = 0; i != capacity; ++i) { ctrl[i] = pattern[i % pattern.size()]; } while (state.KeepRunning()) { state.PauseTiming(); std::vector ctrl_copy = ctrl; state.ResumeTiming(); ConvertDeletedToEmptyAndFullToDeleted(ctrl_copy.data(), capacity); ::benchmark::DoNotOptimize(ctrl_copy[capacity]); } } BENCHMARK(BM_DropDeletes); } // namespace } // namespace container_internal ABSL_NAMESPACE_END } // namespace absl // These methods are here to make it easy to examine the assembly for targeted // parts of the API. auto CodegenAbslRawHashSetInt64Find(absl::container_internal::IntTable* table, int64_t key) -> decltype(table->find(key)) { return table->find(key); } bool CodegenAbslRawHashSetInt64FindNeEnd( absl::container_internal::IntTable* table, int64_t key) { return table->find(key) != table->end(); } bool CodegenAbslRawHashSetInt64Contains( absl::container_internal::IntTable* table, int64_t key) { return table->contains(key); } void CodegenAbslRawHashSetInt64Iterate( absl::container_internal::IntTable* table) { for (auto x : *table) benchmark::DoNotOptimize(x); } int odr = (::benchmark::DoNotOptimize(std::make_tuple( &CodegenAbslRawHashSetInt64Find, &CodegenAbslRawHashSetInt64FindNeEnd, &CodegenAbslRawHashSetInt64Contains, &CodegenAbslRawHashSetInt64Iterate)), 1);