diff options
-rw-r--r-- | absl/container/btree_test.cc | 41 | ||||
-rw-r--r-- | absl/random/discrete_distribution_test.cc | 7 | ||||
-rw-r--r-- | absl/strings/cord.cc | 17 |
3 files changed, 62 insertions, 3 deletions
diff --git a/absl/container/btree_test.cc b/absl/container/btree_test.cc index 74337df2..464dabac 100644 --- a/absl/container/btree_test.cc +++ b/absl/container/btree_test.cc @@ -2893,6 +2893,47 @@ TEST(Btree, AllocMoveConstructor_DifferentAlloc) { EXPECT_EQ(bytes_used2, original_bytes_used); } +bool IntCmp(const int a, const int b) { return a < b; } + +TEST(Btree, SupportsFunctionPtrComparator) { + absl::btree_set<int, decltype(IntCmp) *> set(IntCmp); + set.insert({1, 2, 3}); + EXPECT_THAT(set, ElementsAre(1, 2, 3)); + EXPECT_TRUE(set.key_comp()(1, 2)); + EXPECT_TRUE(set.value_comp()(1, 2)); + + absl::btree_map<int, int, decltype(IntCmp) *> map(&IntCmp); + map[1] = 1; + EXPECT_THAT(map, ElementsAre(Pair(1, 1))); + EXPECT_TRUE(map.key_comp()(1, 2)); + // TODO(ezb): support value_comp() in this case and uncomment. + // EXPECT_TRUE(map.value_comp()(std::make_pair(1, 1), std::make_pair(2, 2))); +} + +template <typename Compare> +struct TransparentPassThroughComp { + using is_transparent = void; + + // This will fail compilation if we attempt a comparison that Compare does not + // support, and the failure will happen inside the function implementation so + // it can't be avoided by using SFINAE on this comparator. + template <typename T, typename U> + bool operator()(const T &lhs, const U &rhs) const { + return Compare()(lhs, rhs); + } +}; + +TEST(Btree, + SupportsTransparentComparatorThatDoesNotImplementAllVisibleOperators) { + absl::btree_set<MultiKey, TransparentPassThroughComp<MultiKeyComp>> set; + set.insert(MultiKey{1, 2}); + EXPECT_TRUE(set.contains(1)); +} + +TEST(Btree, ConstructImplicitlyWithUnadaptedComparator) { + absl::btree_set<MultiKey, MultiKeyComp> set = {{}, MultiKeyComp{}}; +} + } // namespace } // namespace container_internal ABSL_NAMESPACE_END diff --git a/absl/random/discrete_distribution_test.cc b/absl/random/discrete_distribution_test.cc index 6d007006..415b14cc 100644 --- a/absl/random/discrete_distribution_test.cc +++ b/absl/random/discrete_distribution_test.cc @@ -99,6 +99,7 @@ TYPED_TEST(DiscreteDistributionTypeTest, Constructor) { } TEST(DiscreteDistributionTest, InitDiscreteDistribution) { + using testing::_; using testing::Pair; { @@ -111,8 +112,8 @@ TEST(DiscreteDistributionTest, InitDiscreteDistribution) { // Each bucket is p=1/3, so bucket 0 will send half it's traffic // to bucket 2, while the rest will retain all of their traffic. EXPECT_THAT(q, testing::ElementsAre(Pair(0.5, 2), // - Pair(1.0, 1), // - Pair(1.0, 2))); + Pair(1.0, _), // + Pair(1.0, _))); } { @@ -135,7 +136,7 @@ TEST(DiscreteDistributionTest, InitDiscreteDistribution) { EXPECT_THAT(q, testing::ElementsAre(Pair(b0, 3), // Pair(b1, 3), // - Pair(1.0, 2), // + Pair(1.0, _), // Pair(b3, 2), // Pair(b1, 3))); } diff --git a/absl/strings/cord.cc b/absl/strings/cord.cc index 238532f9..5dad781e 100644 --- a/absl/strings/cord.cc +++ b/absl/strings/cord.cc @@ -535,6 +535,23 @@ void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) { EmplaceTree(CordRep::Ref(src.as_tree()), src.data_, method); return; } + + // See b/187581164: unsample cord if already sampled + // TODO(b/117940323): continuously 'assigned to' cords would reach 100% + // sampling probability. Imagine a cord x in some cache: + // cache.SetCord(const Cord& foo) { + // x = foo; + // } + // CordzInfo::MaybeTrackCord does: + // x.profiled = foo.profiled | x.profiled | random(cordz_mean_interval) + // Which means it will on the long run converge to 'always samples' + // The real fix is in CordzMaybeTrackCord, but the below is a low risk + // forward fix for b/187581164 and similar BT benchmark regressions. + if (ABSL_PREDICT_FALSE(is_profiled())) { + cordz_info()->Untrack(); + clear_cordz_info(); + } + CordRep* tree = as_tree(); if (CordRep* src_tree = src.tree()) { data_.set_tree(CordRep::Ref(src_tree)); |