summaryrefslogtreecommitdiff
path: root/absl/strings/cord.cc
diff options
context:
space:
mode:
Diffstat (limited to 'absl/strings/cord.cc')
-rw-r--r--absl/strings/cord.cc18
1 files changed, 2 insertions, 16 deletions
diff --git a/absl/strings/cord.cc b/absl/strings/cord.cc
index 5dad781e..f5aa6e47 100644
--- a/absl/strings/cord.cc
+++ b/absl/strings/cord.cc
@@ -536,24 +536,10 @@ void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
return;
}
- // See b/187581164: unsample cord if already sampled
- // TODO(b/117940323): continuously 'assigned to' cords would reach 100%
- // sampling probability. Imagine a cord x in some cache:
- // cache.SetCord(const Cord& foo) {
- // x = foo;
- // }
- // CordzInfo::MaybeTrackCord does:
- // x.profiled = foo.profiled | x.profiled | random(cordz_mean_interval)
- // Which means it will on the long run converge to 'always samples'
- // The real fix is in CordzMaybeTrackCord, but the below is a low risk
- // forward fix for b/187581164 and similar BT benchmark regressions.
- if (ABSL_PREDICT_FALSE(is_profiled())) {
- cordz_info()->Untrack();
- clear_cordz_info();
- }
-
CordRep* tree = as_tree();
if (CordRep* src_tree = src.tree()) {
+ // Leave any existing `cordz_info` in place, and let MaybeTrackCord()
+ // decide if this cord should be (or remains to be) sampled or not.
data_.set_tree(CordRep::Ref(src_tree));
CordzInfo::MaybeTrackCord(data_, src.data_, method);
} else {