summaryrefslogtreecommitdiff
path: root/absl/strings/internal/cord_internal.h
diff options
context:
space:
mode:
Diffstat (limited to 'absl/strings/internal/cord_internal.h')
-rw-r--r--absl/strings/internal/cord_internal.h28
1 files changed, 15 insertions, 13 deletions
diff --git a/absl/strings/internal/cord_internal.h b/absl/strings/internal/cord_internal.h
index e6f0d544..20dd008c 100644
--- a/absl/strings/internal/cord_internal.h
+++ b/absl/strings/internal/cord_internal.h
@@ -69,12 +69,6 @@ enum CordFeatureDefaults {
extern std::atomic<bool> cord_ring_buffer_enabled;
extern std::atomic<bool> shallow_subcords_enabled;
-// `cord_btree_exhaustive_validation` can be set to force exhaustive validation
-// in debug assertions, and code that calls `IsValid()` explicitly. By default,
-// assertions should be relatively cheap and AssertValid() can easily lead to
-// O(n^2) complexity as recursive / full tree validation is O(n).
-extern std::atomic<bool> cord_btree_exhaustive_validation;
-
inline void enable_cord_ring_buffer(bool enable) {
cord_ring_buffer_enabled.store(enable, std::memory_order_relaxed);
}
@@ -163,20 +157,19 @@ class RefcountAndFlags {
// false will be visible to a thread that just observed this method returning
// false. Always returns false when the immortal bit is set.
inline bool Decrement() {
- int32_t refcount = count_.load(std::memory_order_acquire) & kRefcountMask;
- assert(refcount > 0 || refcount & kImmortalFlag);
+ int32_t refcount = count_.load(std::memory_order_acquire);
+ assert((refcount & kRefcountMask) > 0 || refcount & kImmortalFlag);
return refcount != kRefIncrement &&
(count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel) &
- kRefcountMask) != kRefIncrement;
+ kHighRefcountMask) != 0;
}
// Same as Decrement but expect that refcount is greater than 1.
inline bool DecrementExpectHighRefcount() {
int32_t refcount =
- count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel) &
- kRefcountMask;
- assert(refcount > 0 || refcount & kImmortalFlag);
- return refcount != kRefIncrement;
+ count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel);
+ assert((refcount & kRefcountMask) > 0 || refcount & kImmortalFlag);
+ return (refcount & kHighRefcountMask) != 0;
}
// Returns the current reference count using acquire semantics.
@@ -220,6 +213,15 @@ class RefcountAndFlags {
// purposes of equality. (A refcount of 0 or 1 does not count as 0 or 1
// if the immortal bit is set.)
kRefcountMask = ~kReservedFlag,
+
+ // Bitmask to use when checking if refcount is equal to 1 and not
+ // immortal when decrementing the refcount. This masks out kRefIncrement and
+ // all flags except kImmortalFlag. If the masked RefcountAndFlags is 0, we
+ // assume the refcount is equal to 1, since we know it's not immortal and
+ // not greater than 1. If the masked RefcountAndFlags is not 0, we can
+ // assume the refcount is not equal to 1 since either a higher bit in the
+ // refcount is set, or kImmortal is set.
+ kHighRefcountMask = kRefcountMask & ~kRefIncrement,
};
std::atomic<int32_t> count_;