summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Evan Brown <ezb@google.com>2022-12-08 14:15:12 -0800
committerGravatar Copybara-Service <copybara-worker@google.com>2022-12-08 14:15:46 -0800
commit522606b7fae37836c138e83f6eec0eabb9947dc0 (patch)
treea2db21893f057137686fdf460b2e192c3677d2d5
parentec583f2df279c86a8e8ba123b8929e92d2ee00f2 (diff)
Fix some ClangTidy warnings in raw_hash_set code.
PiperOrigin-RevId: 493993005 Change-Id: I0705be8678022a9e08a1af9972687b7955593994
-rw-r--r--absl/container/internal/raw_hash_set.cc9
-rw-r--r--absl/container/internal/raw_hash_set.h7
-rw-r--r--absl/container/internal/raw_hash_set_test.cc29
3 files changed, 18 insertions, 27 deletions
diff --git a/absl/container/internal/raw_hash_set.cc b/absl/container/internal/raw_hash_set.cc
index 1beab92f..79220836 100644
--- a/absl/container/internal/raw_hash_set.cc
+++ b/absl/container/internal/raw_hash_set.cc
@@ -16,6 +16,7 @@
#include <atomic>
#include <cstddef>
+#include <cstring>
#include "absl/base/config.h"
@@ -182,10 +183,10 @@ void EraseMetaOnly(CommonFields& c, ctrl_t* it, size_t slot_size) {
// We count how many consecutive non empties we have to the right and to the
// left of `it`. If the sum is >= kWidth then there is at least one probe
// window that might have seen a full group.
- bool was_never_full =
- empty_before && empty_after &&
- static_cast<size_t>(empty_after.TrailingZeros() +
- empty_before.LeadingZeros()) < Group::kWidth;
+ bool was_never_full = empty_before && empty_after &&
+ static_cast<size_t>(empty_after.TrailingZeros()) +
+ empty_before.LeadingZeros() <
+ Group::kWidth;
SetCtrl(c, index, was_never_full ? ctrl_t::kEmpty : ctrl_t::kDeleted,
slot_size);
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index ddb8f6be..8a33106f 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -1300,9 +1300,9 @@ class raw_hash_set {
using pointer = typename raw_hash_set::const_pointer;
using difference_type = typename raw_hash_set::difference_type;
- const_iterator() {}
+ const_iterator() = default;
// Implicit construction from iterator.
- const_iterator(iterator i) : inner_(std::move(i)) {}
+ const_iterator(iterator i) : inner_(std::move(i)) {} // NOLINT
reference operator*() const { return *inner_; }
pointer operator->() const { return inner_.operator->(); }
@@ -1330,6 +1330,8 @@ class raw_hash_set {
using node_type = node_handle<Policy, hash_policy_traits<Policy>, Alloc>;
using insert_return_type = InsertReturnType<iterator, node_type>;
+ // Note: can't use `= default` due to non-default noexcept (causes
+ // problems for some compilers). NOLINTNEXTLINE
raw_hash_set() noexcept(
std::is_nothrow_default_constructible<hasher>::value&&
std::is_nothrow_default_constructible<key_equal>::value&&
@@ -1494,6 +1496,7 @@ class raw_hash_set {
std::is_nothrow_move_assignable<key_equal>::value) {
// TODO(sbenza): We should only use the operations from the noexcept clause
// to make sure we actually adhere to that contract.
+ // NOLINTNEXTLINE: not returning *this for performance.
return move_assign(
std::move(that),
typename AllocTraits::propagate_on_container_move_assignment());
diff --git a/absl/container/internal/raw_hash_set_test.cc b/absl/container/internal/raw_hash_set_test.cc
index 74351c09..eb0757b2 100644
--- a/absl/container/internal/raw_hash_set_test.cc
+++ b/absl/container/internal/raw_hash_set_test.cc
@@ -399,7 +399,7 @@ struct StringEq : std::equal_to<absl::string_view> {
struct StringTable
: raw_hash_set<StringPolicy, StringHash, StringEq, std::allocator<int>> {
using Base = typename StringTable::raw_hash_set;
- StringTable() {}
+ StringTable() = default;
using Base::Base;
};
@@ -419,7 +419,7 @@ struct Uint8Table
template <typename T>
struct CustomAlloc : std::allocator<T> {
- CustomAlloc() {}
+ CustomAlloc() = default;
template <typename U>
explicit CustomAlloc(const CustomAlloc<U>& /*other*/) {}
@@ -446,7 +446,7 @@ struct BadFastHash {
struct BadTable : raw_hash_set<IntPolicy, BadFastHash, std::equal_to<int>,
std::allocator<int>> {
using Base = typename BadTable::raw_hash_set;
- BadTable() {}
+ BadTable() = default;
using Base::Base;
};
@@ -1003,7 +1003,7 @@ TEST(Table, ClearBug) {
// We are checking that original and second are close enough to each other
// that they are probably still in the same group. This is not strictly
// guaranteed.
- EXPECT_LT(std::abs(original - second),
+ EXPECT_LT(static_cast<size_t>(std::abs(original - second)),
capacity * sizeof(IntTable::value_type));
}
@@ -1080,19 +1080,6 @@ struct ProbeStats {
// Ratios total_probe_length/size for every tested table.
std::vector<double> single_table_ratios;
- friend ProbeStats operator+(const ProbeStats& a, const ProbeStats& b) {
- ProbeStats res = a;
- res.all_probes_histogram.resize(std::max(res.all_probes_histogram.size(),
- b.all_probes_histogram.size()));
- std::transform(b.all_probes_histogram.begin(), b.all_probes_histogram.end(),
- res.all_probes_histogram.begin(),
- res.all_probes_histogram.begin(), std::plus<size_t>());
- res.single_table_ratios.insert(res.single_table_ratios.end(),
- b.single_table_ratios.begin(),
- b.single_table_ratios.end());
- return res;
- }
-
// Average ratio total_probe_length/size over tables.
double AvgRatio() const {
return std::accumulate(single_table_ratios.begin(),
@@ -1555,7 +1542,7 @@ TEST(Table, CopyConstructWithAlloc) {
struct ExplicitAllocIntTable
: raw_hash_set<IntPolicy, container_internal::hash_default_hash<int64_t>,
std::equal_to<int64_t>, Alloc<int64_t>> {
- ExplicitAllocIntTable() {}
+ ExplicitAllocIntTable() = default;
};
TEST(Table, AllocWithExplicitCtor) {
@@ -1943,7 +1930,7 @@ TEST(Nodes, ExtractInsert) {
EXPECT_FALSE(res.inserted);
EXPECT_THAT(*res.position, Pair(k0, ""));
EXPECT_TRUE(res.node);
- EXPECT_FALSE(node);
+ EXPECT_FALSE(node); // NOLINT(bugprone-use-after-move)
}
TEST(Nodes, HintInsert) {
@@ -1953,7 +1940,7 @@ TEST(Nodes, HintInsert) {
auto it = t.insert(t.begin(), std::move(node));
EXPECT_THAT(t, UnorderedElementsAre(1, 2, 3));
EXPECT_EQ(*it, 1);
- EXPECT_FALSE(node);
+ EXPECT_FALSE(node); // NOLINT(bugprone-use-after-move)
node = t.extract(2);
EXPECT_THAT(t, UnorderedElementsAre(1, 3));
@@ -1963,7 +1950,7 @@ TEST(Nodes, HintInsert) {
it = t.insert(t.begin(), std::move(node));
EXPECT_EQ(*it, 2);
// The node was not emptied by the insert call.
- EXPECT_TRUE(node);
+ EXPECT_TRUE(node); // NOLINT(bugprone-use-after-move)
}
IntTable MakeSimpleTable(size_t size) {