summaryrefslogtreecommitdiff
path: root/absl
diff options
context:
space:
mode:
Diffstat (limited to 'absl')
-rw-r--r--absl/status/status.cc7
-rw-r--r--absl/strings/BUILD.bazel2
-rw-r--r--absl/strings/CMakeLists.txt2
-rw-r--r--absl/strings/cord.cc114
-rw-r--r--absl/strings/cord.h36
-rw-r--r--absl/strings/cord_analysis.cc237
-rw-r--r--absl/strings/cord_analysis.h44
-rw-r--r--absl/strings/cord_test.cc236
-rw-r--r--absl/strings/substitute.h8
-rw-r--r--absl/strings/substitute_test.cc48
-rw-r--r--absl/synchronization/mutex.cc18
-rw-r--r--absl/synchronization/mutex.h7
12 files changed, 562 insertions, 197 deletions
diff --git a/absl/status/status.cc b/absl/status/status.cc
index bcf3413e..6b316ac6 100644
--- a/absl/status/status.cc
+++ b/absl/status/status.cc
@@ -185,8 +185,11 @@ void Status::ForEachPayload(
}
const std::string* Status::EmptyString() {
- static std::string* empty_string = new std::string();
- return empty_string;
+ static union EmptyString {
+ std::string str;
+ ~EmptyString() {}
+ } empty = {{}};
+ return &empty.str;
}
constexpr const char Status::kMovedFromString[];
diff --git a/absl/strings/BUILD.bazel b/absl/strings/BUILD.bazel
index 5d433cce..0dc667e0 100644
--- a/absl/strings/BUILD.bazel
+++ b/absl/strings/BUILD.bazel
@@ -412,6 +412,8 @@ cc_library(
name = "cord",
srcs = [
"cord.cc",
+ "cord_analysis.cc",
+ "cord_analysis.h",
],
hdrs = [
"cord.h",
diff --git a/absl/strings/CMakeLists.txt b/absl/strings/CMakeLists.txt
index 0eef91df..4dd605ec 100644
--- a/absl/strings/CMakeLists.txt
+++ b/absl/strings/CMakeLists.txt
@@ -834,6 +834,8 @@ absl_cc_library(
"cord.h"
SRCS
"cord.cc"
+ "cord_analysis.cc"
+ "cord_analysis.h"
COPTS
${ABSL_DEFAULT_COPTS}
DEPS
diff --git a/absl/strings/cord.cc b/absl/strings/cord.cc
index 425b4bee..ddd14ef4 100644
--- a/absl/strings/cord.cc
+++ b/absl/strings/cord.cc
@@ -469,51 +469,6 @@ static inline bool PrepareAppendRegion(CordRep* root, char** region,
return true;
}
-// Computes the memory side of the provided edge which must be a valid data edge
-// for a btrtee, i.e., a FLAT, EXTERNAL or SUBSTRING of a FLAT or EXTERNAL node.
-static bool RepMemoryUsageDataEdge(const CordRep* rep,
- size_t* total_mem_usage) {
- size_t maybe_sub_size = 0;
- if (ABSL_PREDICT_FALSE(rep->IsSubstring())) {
- maybe_sub_size = sizeof(cord_internal::CordRepSubstring);
- rep = rep->substring()->child;
- }
- if (rep->IsFlat()) {
- *total_mem_usage += maybe_sub_size + rep->flat()->AllocatedSize();
- return true;
- }
- if (rep->IsExternal()) {
- // We don't know anything about the embedded / bound data, but we can safely
- // assume it is 'at least' a word / pointer to data. In the future we may
- // choose to use the 'data' byte as a tag to identify the types of some
- // well-known externals, such as a std::string instance.
- *total_mem_usage += maybe_sub_size +
- sizeof(cord_internal::CordRepExternalImpl<intptr_t>) +
- rep->length;
- return true;
- }
- return false;
-}
-
-// If the rep is a leaf, this will increment the value at total_mem_usage and
-// will return true.
-static bool RepMemoryUsageLeaf(const CordRep* rep, size_t* total_mem_usage) {
- if (rep->IsFlat()) {
- *total_mem_usage += rep->flat()->AllocatedSize();
- return true;
- }
- if (rep->IsExternal()) {
- // We don't know anything about the embedded / bound data, but we can safely
- // assume it is 'at least' a word / pointer to data. In the future we may
- // choose to use the 'data' byte as a tag to identify the types of some
- // well-known externals, such as a std::string instance.
- *total_mem_usage +=
- sizeof(cord_internal::CordRepExternalImpl<intptr_t>) + rep->length;
- return true;
- }
- return false;
-}
-
void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
assert(&src != this);
assert(is_tree() || src.is_tree());
@@ -1968,75 +1923,6 @@ static bool VerifyNode(CordRep* root, CordRep* start_node,
return true;
}
-// Traverses the tree and computes the total memory allocated.
-/* static */ size_t Cord::MemoryUsageAux(const CordRep* rep) {
- size_t total_mem_usage = 0;
-
- if (rep->IsCrc()) {
- total_mem_usage += sizeof(CordRepCrc);
- rep = rep->crc()->child;
- }
-
- // Allow a quick exit for the common case that the root is a leaf.
- if (RepMemoryUsageLeaf(rep, &total_mem_usage)) {
- return total_mem_usage;
- }
-
- // Iterate over the tree. cur_node is never a leaf node and leaf nodes will
- // never be appended to tree_stack. This reduces overhead from manipulating
- // tree_stack.
- absl::InlinedVector<const CordRep*, kInlinedVectorSize> tree_stack;
- const CordRep* cur_node = rep;
- while (true) {
- const CordRep* next_node = nullptr;
-
- if (cur_node->IsConcat()) {
- total_mem_usage += sizeof(CordRepConcat);
- const CordRep* left = cur_node->concat()->left;
- if (!RepMemoryUsageLeaf(left, &total_mem_usage)) {
- next_node = left;
- }
-
- const CordRep* right = cur_node->concat()->right;
- if (!RepMemoryUsageLeaf(right, &total_mem_usage)) {
- if (next_node) {
- tree_stack.push_back(next_node);
- }
- next_node = right;
- }
- } else if (cur_node->IsBtree()) {
- total_mem_usage += sizeof(CordRepBtree);
- const CordRepBtree* node = cur_node->btree();
- if (node->height() == 0) {
- for (const CordRep* edge : node->Edges()) {
- RepMemoryUsageDataEdge(edge, &total_mem_usage);
- }
- } else {
- for (const CordRep* edge : node->Edges()) {
- tree_stack.push_back(edge);
- }
- }
- } else {
- // Since cur_node is not a leaf or a concat node it must be a substring.
- assert(cur_node->IsSubstring());
- total_mem_usage += sizeof(CordRepSubstring);
- next_node = cur_node->substring()->child;
- if (RepMemoryUsageLeaf(next_node, &total_mem_usage)) {
- next_node = nullptr;
- }
- }
-
- if (!next_node) {
- if (tree_stack.empty()) {
- return total_mem_usage;
- }
- next_node = tree_stack.back();
- tree_stack.pop_back();
- }
- cur_node = next_node;
- }
-}
-
std::ostream& operator<<(std::ostream& out, const Cord& cord) {
for (absl::string_view chunk : cord.Chunks()) {
out.write(chunk.data(), chunk.size());
diff --git a/absl/strings/cord.h b/absl/strings/cord.h
index 3bbd763e..49d51da2 100644
--- a/absl/strings/cord.h
+++ b/absl/strings/cord.h
@@ -79,6 +79,7 @@
#include "absl/container/inlined_vector.h"
#include "absl/functional/function_ref.h"
#include "absl/meta/type_traits.h"
+#include "absl/strings/cord_analysis.h"
#include "absl/strings/internal/cord_internal.h"
#include "absl/strings/internal/cord_rep_btree.h"
#include "absl/strings/internal/cord_rep_btree_reader.h"
@@ -102,6 +103,20 @@ template <typename Releaser>
Cord MakeCordFromExternal(absl::string_view, Releaser&&);
void CopyCordToString(const Cord& src, std::string* dst);
+// Cord memory accounting modes
+enum class CordMemoryAccounting {
+ // Counts the *approximate* number of bytes held in full or in part by this
+ // Cord (which may not remain the same between invocations). Cords that share
+ // memory could each be "charged" independently for the same shared memory.
+ kTotal,
+
+ // Counts the *approximate* number of bytes held in full or in part by this
+ // Cord weighted by the sharing ratio of that data. For example, if some data
+ // edge is shared by 4 different Cords, then each cord is attributed 1/4th of
+ // the total memory usage as a 'fair share' of the total memory usage.
+ kFairShare,
+};
+
// Cord
//
// A Cord is a sequence of characters, designed to be more efficient than a
@@ -272,11 +287,10 @@ class Cord {
// Cord::EstimatedMemoryUsage()
//
- // Returns the *approximate* number of bytes held in full or in part by this
- // Cord (which may not remain the same between invocations). Note that Cords
- // that share memory could each be "charged" independently for the same shared
- // memory.
- size_t EstimatedMemoryUsage() const;
+ // Returns the *approximate* number of bytes held by this cord.
+ // See CordMemoryAccounting for more information on accounting method used.
+ size_t EstimatedMemoryUsage(CordMemoryAccounting accounting_method =
+ CordMemoryAccounting::kTotal) const;
// Cord::Compare()
//
@@ -890,9 +904,6 @@ class Cord {
};
InlineRep contents_;
- // Helper for MemoryUsage().
- static size_t MemoryUsageAux(const absl::cord_internal::CordRep* rep);
-
// Helper for GetFlat() and TryFlat().
static bool GetFlatAux(absl::cord_internal::CordRep* rep,
absl::string_view* fragment);
@@ -1235,10 +1246,15 @@ inline size_t Cord::size() const {
inline bool Cord::empty() const { return contents_.empty(); }
-inline size_t Cord::EstimatedMemoryUsage() const {
+inline size_t Cord::EstimatedMemoryUsage(
+ CordMemoryAccounting accounting_method) const {
size_t result = sizeof(Cord);
if (const absl::cord_internal::CordRep* rep = contents_.tree()) {
- result += MemoryUsageAux(rep);
+ if (accounting_method == CordMemoryAccounting::kFairShare) {
+ result += cord_internal::GetEstimatedFairShareMemoryUsage(rep);
+ } else {
+ result += cord_internal::GetEstimatedMemoryUsage(rep);
+ }
}
return result;
}
diff --git a/absl/strings/cord_analysis.cc b/absl/strings/cord_analysis.cc
new file mode 100644
index 00000000..435b4c94
--- /dev/null
+++ b/absl/strings/cord_analysis.cc
@@ -0,0 +1,237 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <cstddef>
+#include <cstdint>
+
+#include "absl/base/attributes.h"
+#include "absl/base/config.h"
+#include "absl/container/inlined_vector.h"
+#include "absl/strings/cord_analysis.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cord_rep_btree.h"
+#include "absl/strings/internal/cord_rep_crc.h"
+#include "absl/strings/internal/cord_rep_flat.h"
+#include "absl/strings/internal/cord_rep_ring.h"
+//
+#include "absl/base/macros.h"
+#include "absl/base/port.h"
+#include "absl/functional/function_ref.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+// Accounting mode for analyzing memory usage.
+enum class Mode { kTotal, kFairShare };
+
+// CordRepRef holds a `const CordRep*` reference in rep, and depending on mode,
+// holds a 'fraction' representing a cumulative inverse refcount weight.
+template <Mode mode>
+struct CordRepRef {
+ // Instantiates a CordRepRef instance.
+ explicit CordRepRef(const CordRep* r) : rep(r) {}
+
+ // Creates a child reference holding the provided child.
+ // Overloaded to add cumulative reference count for kFairShare.
+ CordRepRef Child(const CordRep* child) const { return CordRepRef(child); }
+
+ const CordRep* rep;
+};
+
+// RawUsage holds the computed total number of bytes.
+template <Mode mode>
+struct RawUsage {
+ size_t total = 0;
+
+ // Add 'size' to total, ignoring the CordRepRef argument.
+ void Add(size_t size, CordRepRef<mode>) { total += size; }
+};
+
+// Returns n / refcount avoiding a div for the common refcount == 1.
+template <typename refcount_t>
+double MaybeDiv(double d, refcount_t refcount) {
+ return refcount == 1 ? d : d / refcount;
+}
+
+// Overloaded 'kFairShare' specialization for CordRepRef. This class holds a
+// `fraction` value which represents a cumulative inverse refcount weight.
+// For example, a top node with a reference count of 2 will have a fraction
+// value of 1/2 = 0.5, representing the 'fair share' of memory it references.
+// A node below such a node with a reference count of 5 then has a fraction of
+// 0.5 / 5 = 0.1 representing the fair share of memory below that node, etc.
+template <>
+struct CordRepRef<Mode::kFairShare> {
+ // Creates a CordRepRef with the provided rep and top (parent) fraction.
+ explicit CordRepRef(const CordRep* r, double frac = 1.0)
+ : rep(r), fraction(MaybeDiv(frac, r->refcount.Get())) {}
+
+ // Returns a CordRepRef with a fraction of `this->fraction / child.refcount`
+ CordRepRef Child(const CordRep* child) const {
+ return CordRepRef(child, fraction);
+ }
+
+ const CordRep* rep;
+ double fraction;
+};
+
+// Overloaded 'kFairShare' specialization for RawUsage
+template <>
+struct RawUsage<Mode::kFairShare> {
+ double total = 0;
+
+ // Adds `size` multiplied by `rep.fraction` to the total size.
+ void Add(size_t size, CordRepRef<Mode::kFairShare> rep) {
+ total += static_cast<double>(size) * rep.fraction;
+ }
+};
+
+// Returns true if the provided rep is a valid data edge.
+bool IsDataEdge(const CordRep* rep) {
+ // The fast path is that `rep` is an EXTERNAL or FLAT node, making the below
+ // if a single, well predicted branch. We then repeat the FLAT or EXTERNAL
+ // check in the slow path the SUBSTRING check to optimize for the hot path.
+ if (rep->tag == EXTERNAL || rep->tag >= FLAT) return true;
+ if (rep->tag == SUBSTRING) rep = rep->substring()->child;
+ return rep->tag == EXTERNAL || rep->tag >= FLAT;
+}
+
+// Computes the estimated memory size of the provided data edge.
+// External reps are assumed 'heap allocated at their exact size'.
+template <Mode mode>
+void AnalyzeDataEdge(CordRepRef<mode> rep, RawUsage<mode>& raw_usage) {
+ assert(IsDataEdge(rep.rep));
+
+ // Consume all substrings
+ if (rep.rep->tag == SUBSTRING) {
+ raw_usage.Add(sizeof(CordRepSubstring), rep);
+ rep = rep.Child(rep.rep->substring()->child);
+ }
+
+ // Consume FLAT / EXTERNAL
+ const size_t size =
+ rep.rep->tag >= FLAT
+ ? rep.rep->flat()->AllocatedSize()
+ : rep.rep->length + sizeof(CordRepExternalImpl<intptr_t>);
+ raw_usage.Add(size, rep);
+}
+
+// Computes the memory size of the provided Concat tree.
+template <Mode mode>
+void AnalyzeConcat(CordRepRef<mode> rep, RawUsage<mode>& raw_usage) {
+ absl::InlinedVector<CordRepRef<mode>, 47> pending;
+
+ while (rep.rep != nullptr) {
+ const CordRepConcat* concat = rep.rep->concat();
+ CordRepRef<mode> left = rep.Child(concat->left);
+ CordRepRef<mode> right = rep.Child(concat->right);
+
+ raw_usage.Add(sizeof(CordRepConcat), rep);
+
+ switch ((IsDataEdge(left.rep) ? 1 : 0) | (IsDataEdge(right.rep) ? 2 : 0)) {
+ case 0: // neither left or right are data edges
+ rep = left;
+ pending.push_back(right);
+ break;
+ case 1: // only left is a data edge
+ AnalyzeDataEdge(left, raw_usage);
+ rep = right;
+ break;
+ case 2: // only right is a data edge
+ AnalyzeDataEdge(right, raw_usage);
+ rep = left;
+ break;
+ case 3: // left and right are data edges
+ AnalyzeDataEdge(right, raw_usage);
+ AnalyzeDataEdge(left, raw_usage);
+ if (!pending.empty()) {
+ rep = pending.back();
+ pending.pop_back();
+ } else {
+ rep.rep = nullptr;
+ }
+ break;
+ }
+ }
+}
+
+// Computes the memory size of the provided Ring tree.
+template <Mode mode>
+void AnalyzeRing(CordRepRef<mode> rep, RawUsage<mode>& raw_usage) {
+ const CordRepRing* ring = rep.rep->ring();
+ raw_usage.Add(CordRepRing::AllocSize(ring->capacity()), rep);
+ ring->ForEach([&](CordRepRing::index_type pos) {
+ AnalyzeDataEdge(rep.Child(ring->entry_child(pos)), raw_usage);
+ });
+}
+
+// Computes the memory size of the provided Btree tree.
+template <Mode mode>
+void AnalyzeBtree(CordRepRef<mode> rep, RawUsage<mode>& raw_usage) {
+ raw_usage.Add(sizeof(CordRepBtree), rep);
+ const CordRepBtree* tree = rep.rep->btree();
+ if (tree->height() > 0) {
+ for (CordRep* edge : tree->Edges()) {
+ AnalyzeBtree(rep.Child(edge), raw_usage);
+ }
+ } else {
+ for (CordRep* edge : tree->Edges()) {
+ AnalyzeDataEdge(rep.Child(edge), raw_usage);
+ }
+ }
+}
+
+template <Mode mode>
+size_t GetEstimatedUsage(const CordRep* rep) {
+ // Zero initialized memory usage totals.
+ RawUsage<mode> raw_usage;
+
+ // Capture top level node and refcount into a CordRepRef.
+ CordRepRef<mode> repref(rep);
+
+ // Consume the top level CRC node if present.
+ if (repref.rep->tag == CRC) {
+ raw_usage.Add(sizeof(CordRepCrc), repref);
+ repref = repref.Child(repref.rep->crc()->child);
+ }
+
+ if (IsDataEdge(repref.rep)) {
+ AnalyzeDataEdge(repref, raw_usage);
+ } else if (repref.rep->tag == BTREE) {
+ AnalyzeBtree(repref, raw_usage);
+ } else if (repref.rep->tag == CONCAT) {
+ AnalyzeConcat(repref, raw_usage);
+ } else if (repref.rep->tag == RING) {
+ AnalyzeRing(repref, raw_usage);
+ } else {
+ assert(false);
+ }
+
+ return static_cast<size_t>(raw_usage.total);
+}
+
+} // namespace
+
+size_t GetEstimatedMemoryUsage(const CordRep* rep) {
+ return GetEstimatedUsage<Mode::kTotal>(rep);
+}
+
+size_t GetEstimatedFairShareMemoryUsage(const CordRep* rep) {
+ return GetEstimatedUsage<Mode::kFairShare>(rep);
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/cord_analysis.h b/absl/strings/cord_analysis.h
new file mode 100644
index 00000000..7041ad1a
--- /dev/null
+++ b/absl/strings/cord_analysis.h
@@ -0,0 +1,44 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORD_ANALYSIS_H_
+#define ABSL_STRINGS_CORD_ANALYSIS_H_
+
+#include <cstddef>
+#include <cstdint>
+
+#include "absl/base/config.h"
+#include "absl/strings/internal/cord_internal.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// Returns the *approximate* number of bytes held in full or in part by this
+// Cord (which may not remain the same between invocations). Cords that share
+// memory could each be "charged" independently for the same shared memory.
+size_t GetEstimatedMemoryUsage(const CordRep* rep);
+
+// Returns the *approximate* number of bytes held in full or in part by this
+// CordRep weighted by the sharing ratio of that data. For example, if some data
+// edge is shared by 4 different Cords, then each cord is attribute 1/4th of
+// the total memory usage as a 'fair share' of the total memory usage.
+size_t GetEstimatedFairShareMemoryUsage(const CordRep* rep);
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+
+#endif // ABSL_STRINGS_CORD_ANALYSIS_H_
diff --git a/absl/strings/cord_test.cc b/absl/strings/cord_test.cc
index a22db0bb..e499b55f 100644
--- a/absl/strings/cord_test.cc
+++ b/absl/strings/cord_test.cc
@@ -50,7 +50,12 @@ static constexpr auto MAX_FLAT_TAG = absl::cord_internal::MAX_FLAT_TAG;
typedef std::mt19937_64 RandomEngine;
using absl::cord_internal::CordRep;
+using absl::cord_internal::CordRepBtree;
+using absl::cord_internal::CordRepConcat;
+using absl::cord_internal::CordRepCrc;
+using absl::cord_internal::CordRepExternal;
using absl::cord_internal::CordRepFlat;
+using absl::cord_internal::CordRepSubstring;
using absl::cord_internal::kFlatOverhead;
using absl::cord_internal::kMaxFlatLength;
@@ -196,6 +201,7 @@ class CordTestPeer {
}
static bool IsTree(const Cord& c) { return c.contents_.is_tree(); }
+ static CordRep* Tree(const Cord& c) { return c.contents_.tree(); }
static cord_internal::CordzInfo* GetCordzInfo(const Cord& c) {
return c.contents_.cordz_info();
@@ -1474,76 +1480,186 @@ TEST_P(CordTest, ExternalMemoryGet) {
}
// CordMemoryUsage tests verify the correctness of the EstimatedMemoryUsage()
-// These tests take into account that the reported memory usage is approximate
-// and non-deterministic. For all tests, We verify that the reported memory
-// usage is larger than `size()`, and less than `size() * 1.5` as a cord should
-// never reserve more 'extra' capacity than half of its size as it grows.
-// Additionally we have some whiteboxed expectations based on our knowledge of
-// the layout and size of empty and inlined cords, and flat nodes.
+// We use whiteboxed expectations based on our knowledge of the layout and size
+// of empty and inlined cords, and flat nodes.
-TEST_P(CordTest, CordMemoryUsageEmpty) {
- EXPECT_EQ(sizeof(absl::Cord), absl::Cord().EstimatedMemoryUsage());
-}
-
-TEST_P(CordTest, CordMemoryUsageEmbedded) {
- absl::Cord a("hello");
- EXPECT_EQ(a.EstimatedMemoryUsage(), sizeof(absl::Cord));
-}
+constexpr auto kFairShare = absl::CordMemoryAccounting::kFairShare;
-TEST_P(CordTest, CordMemoryUsageEmbeddedAppend) {
- absl::Cord a("a");
- absl::Cord b("bcd");
- EXPECT_EQ(b.EstimatedMemoryUsage(), sizeof(absl::Cord));
- a.Append(b);
- EXPECT_EQ(a.EstimatedMemoryUsage(), sizeof(absl::Cord));
+// Creates a cord of `n` `c` values, making sure no string stealing occurs.
+absl::Cord MakeCord(size_t n, char c) {
+ const std::string s(n, c);
+ return absl::Cord(s);
}
-TEST_P(CordTest, CordMemoryUsageExternalMemory) {
- static const int kLength = 1000;
+TEST(CordTest, CordMemoryUsageEmpty) {
absl::Cord cord;
- AddExternalMemory(std::string(kLength, 'x'), &cord);
- EXPECT_GT(cord.EstimatedMemoryUsage(), kLength);
- EXPECT_LE(cord.EstimatedMemoryUsage(), kLength * 1.5);
-}
-
-TEST_P(CordTest, CordMemoryUsageFlat) {
- static const int kLength = 125;
- absl::Cord a(std::string(kLength, 'a'));
- EXPECT_GT(a.EstimatedMemoryUsage(), kLength);
- EXPECT_LE(a.EstimatedMemoryUsage(), kLength * 1.5);
+ EXPECT_EQ(sizeof(absl::Cord), cord.EstimatedMemoryUsage());
+ EXPECT_EQ(sizeof(absl::Cord), cord.EstimatedMemoryUsage(kFairShare));
}
-TEST_P(CordTest, CordMemoryUsageAppendFlat) {
- using absl::strings_internal::CordTestAccess;
- absl::Cord a(std::string(CordTestAccess::MaxFlatLength(), 'a'));
- size_t length = a.EstimatedMemoryUsage();
- a.Append(std::string(CordTestAccess::MaxFlatLength(), 'b'));
- size_t delta = a.EstimatedMemoryUsage() - length;
- EXPECT_GT(delta, CordTestAccess::MaxFlatLength());
- EXPECT_LE(delta, CordTestAccess::MaxFlatLength() * 1.5);
+TEST(CordTest, CordMemoryUsageInlined) {
+ absl::Cord a("hello");
+ EXPECT_EQ(a.EstimatedMemoryUsage(), sizeof(absl::Cord));
+ EXPECT_EQ(a.EstimatedMemoryUsage(kFairShare), sizeof(absl::Cord));
}
-TEST_P(CordTest, CordMemoryUsageAppendExternal) {
- static const int kLength = 1000;
- using absl::strings_internal::CordTestAccess;
- absl::Cord a(std::string(CordTestAccess::MaxFlatLength(), 'a'));
- size_t length = a.EstimatedMemoryUsage();
- AddExternalMemory(std::string(kLength, 'b'), &a);
- size_t delta = a.EstimatedMemoryUsage() - length;
- EXPECT_GT(delta, kLength);
- EXPECT_LE(delta, kLength * 1.5);
-}
+TEST(CordTest, CordMemoryUsageExternalMemory) {
+ absl::Cord cord;
+ AddExternalMemory(std::string(1000, 'x'), &cord);
+ const size_t expected =
+ sizeof(absl::Cord) + 1000 + sizeof(CordRepExternal) + sizeof(intptr_t);
+ EXPECT_EQ(cord.EstimatedMemoryUsage(), expected);
+ EXPECT_EQ(cord.EstimatedMemoryUsage(kFairShare), expected);
+}
+
+TEST(CordTest, CordMemoryUsageFlat) {
+ absl::Cord cord = MakeCord(1000, 'a');
+ const size_t flat_size =
+ absl::CordTestPeer::Tree(cord)->flat()->AllocatedSize();
+ EXPECT_EQ(cord.EstimatedMemoryUsage(), sizeof(absl::Cord) + flat_size);
+ EXPECT_EQ(cord.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + flat_size);
+}
+
+TEST(CordTest, CordMemoryUsageSubStringSharedFlat) {
+ absl::Cord flat = MakeCord(2000, 'a');
+ const size_t flat_size =
+ absl::CordTestPeer::Tree(flat)->flat()->AllocatedSize();
+ absl::Cord cord = flat.Subcord(500, 1000);
+ EXPECT_EQ(cord.EstimatedMemoryUsage(),
+ sizeof(absl::Cord) + sizeof(CordRepSubstring) + flat_size);
+ EXPECT_EQ(cord.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + sizeof(CordRepSubstring) + flat_size / 2);
+}
+
+TEST(CordTest, CordMemoryUsageFlatShared) {
+ absl::Cord shared = MakeCord(1000, 'a');
+ absl::Cord cord(shared);
+ const size_t flat_size =
+ absl::CordTestPeer::Tree(cord)->flat()->AllocatedSize();
+ EXPECT_EQ(cord.EstimatedMemoryUsage(), sizeof(absl::Cord) + flat_size);
+ EXPECT_EQ(cord.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + flat_size / 2);
+}
+
+TEST(CordTest, CordMemoryUsageFlatHardenedAndShared) {
+ absl::Cord shared = MakeCord(1000, 'a');
+ absl::Cord cord(shared);
+ const size_t flat_size =
+ absl::CordTestPeer::Tree(cord)->flat()->AllocatedSize();
+ cord.SetExpectedChecksum(1);
+ EXPECT_EQ(cord.EstimatedMemoryUsage(),
+ sizeof(absl::Cord) + sizeof(CordRepCrc) + flat_size);
+ EXPECT_EQ(cord.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + sizeof(CordRepCrc) + flat_size / 2);
+
+ absl::Cord cord2(cord);
+ EXPECT_EQ(cord2.EstimatedMemoryUsage(),
+ sizeof(absl::Cord) + sizeof(CordRepCrc) + flat_size);
+ EXPECT_EQ(cord2.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + (sizeof(CordRepCrc) + flat_size / 2) / 2);
+}
+
+TEST(CordTest, CordMemoryUsageBTree) {
+ absl::cord_internal::enable_cord_btree(true);
+
+ absl::Cord cord1;
+ size_t flats1_size = 0;
+ absl::Cord flats1[4] = {MakeCord(1000, 'a'), MakeCord(1100, 'a'),
+ MakeCord(1200, 'a'), MakeCord(1300, 'a')};
+ for (absl::Cord flat : flats1) {
+ flats1_size += absl::CordTestPeer::Tree(flat)->flat()->AllocatedSize();
+ cord1.Append(std::move(flat));
+ }
+
+ // Make sure the created cord is a BTREE tree. Under some builds such as
+ // windows DLL, we may have ODR like effects on the flag, meaning the DLL
+ // code will run with the picked up default.
+ if (!absl::CordTestPeer::Tree(cord1)->IsBtree()) {
+ ABSL_RAW_LOG(WARNING, "Cord library code not respecting btree flag");
+ return;
+ }
+
+ size_t rep1_size = sizeof(CordRepBtree) + flats1_size;
+ size_t rep1_shared_size = sizeof(CordRepBtree) + flats1_size / 2;
+
+ EXPECT_EQ(cord1.EstimatedMemoryUsage(), sizeof(absl::Cord) + rep1_size);
+ EXPECT_EQ(cord1.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + rep1_shared_size);
+
+ absl::Cord cord2;
+ size_t flats2_size = 0;
+ absl::Cord flats2[4] = {MakeCord(600, 'a'), MakeCord(700, 'a'),
+ MakeCord(800, 'a'), MakeCord(900, 'a')};
+ for (absl::Cord& flat : flats2) {
+ flats2_size += absl::CordTestPeer::Tree(flat)->flat()->AllocatedSize();
+ cord2.Append(std::move(flat));
+ }
+ size_t rep2_size = sizeof(CordRepBtree) + flats2_size;
+
+ EXPECT_EQ(cord2.EstimatedMemoryUsage(), sizeof(absl::Cord) + rep2_size);
+ EXPECT_EQ(cord2.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + rep2_size);
+
+ absl::Cord cord(cord1);
+ cord.Append(std::move(cord2));
+
+ EXPECT_EQ(cord.EstimatedMemoryUsage(),
+ sizeof(absl::Cord) + sizeof(CordRepBtree) + rep1_size + rep2_size);
+ EXPECT_EQ(cord.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + sizeof(CordRepBtree) + rep1_shared_size / 2 +
+ rep2_size);
+}
+
+TEST(CordTest, CordMemoryUsageConcat) {
+ absl::cord_internal::enable_cord_btree(false);
+
+ absl::Cord cord1;
+ size_t flats1_size = 0;
+ absl::Cord flats1[4] = {MakeCord(1000, 'a'), MakeCord(1100, 'a'),
+ MakeCord(1200, 'a'), MakeCord(1300, 'a')};
+ for (absl::Cord flat : flats1) {
+ flats1_size += absl::CordTestPeer::Tree(flat)->flat()->AllocatedSize();
+ cord1.Append(std::move(flat));
+ }
+
+ // Make sure the created cord is a CONCAT tree. Under some builds such as
+ // windows DLL, we may have ODR like effects on the flag, meaning the DLL
+ // code will run with the picked up default.
+ if (!absl::CordTestPeer::Tree(cord1)->IsConcat()) {
+ ABSL_RAW_LOG(WARNING, "Cord library code not respecting btree flag");
+ return;
+ }
+
+ size_t rep1_size = sizeof(CordRepConcat) * 3 + flats1_size;
+ size_t rep1_shared_size = sizeof(CordRepConcat) * 3 + flats1_size / 2;
+
+ EXPECT_EQ(cord1.EstimatedMemoryUsage(), sizeof(absl::Cord) + rep1_size);
+ EXPECT_EQ(cord1.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + rep1_shared_size);
+
+ absl::Cord cord2;
+ size_t flats2_size = 0;
+ absl::Cord flats2[4] = {MakeCord(600, 'a'), MakeCord(700, 'a'),
+ MakeCord(800, 'a'), MakeCord(900, 'a')};
+ for (absl::Cord& flat : flats2) {
+ flats2_size += absl::CordTestPeer::Tree(flat)->flat()->AllocatedSize();
+ cord2.Append(std::move(flat));
+ }
+
+ size_t rep2_size = sizeof(CordRepConcat) * 3 + flats2_size;
+
+ EXPECT_EQ(cord2.EstimatedMemoryUsage(), sizeof(absl::Cord) + rep2_size);
+ EXPECT_EQ(cord2.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + rep2_size);
-TEST_P(CordTest, CordMemoryUsageSubString) {
- static const int kLength = 2000;
- using absl::strings_internal::CordTestAccess;
- absl::Cord a(std::string(kLength, 'a'));
- size_t length = a.EstimatedMemoryUsage();
- AddExternalMemory(std::string(kLength, 'b'), &a);
- absl::Cord b = a.Subcord(0, kLength + kLength / 2);
- size_t delta = b.EstimatedMemoryUsage() - length;
- EXPECT_GT(delta, kLength);
- EXPECT_LE(delta, kLength * 1.5);
+ absl::Cord cord(cord1);
+ cord.Append(std::move(cord2));
+ EXPECT_EQ(cord.EstimatedMemoryUsage(),
+ sizeof(absl::Cord) + sizeof(CordRepConcat) + rep1_size + rep2_size);
+ EXPECT_EQ(cord.EstimatedMemoryUsage(kFairShare),
+ sizeof(absl::Cord) + sizeof(CordRepConcat) + rep1_shared_size / 2 +
+ rep2_size);
}
// Regtest for a change that had to be rolled back because it expanded out
diff --git a/absl/strings/substitute.h b/absl/strings/substitute.h
index dae4e63f..6d2b08ab 100644
--- a/absl/strings/substitute.h
+++ b/absl/strings/substitute.h
@@ -174,6 +174,14 @@ class Arg {
// "0x<hex value>". However, in the case of `nullptr`, "NULL" is printed.
Arg(const void* value); // NOLINT(runtime/explicit)
+ // Normal enums are already handled by the integer formatters.
+ // This overload matches only scoped enums.
+ template <typename T,
+ typename = typename std::enable_if<
+ std::is_enum<T>{} && !std::is_convertible<T, int>{}>::type>
+ Arg(T value) // NOLINT(google-explicit-constructor)
+ : Arg(static_cast<typename std::underlying_type<T>::type>(value)) {}
+
Arg(const Arg&) = delete;
Arg& operator=(const Arg&) = delete;
diff --git a/absl/strings/substitute_test.cc b/absl/strings/substitute_test.cc
index 442c9215..9e6b9403 100644
--- a/absl/strings/substitute_test.cc
+++ b/absl/strings/substitute_test.cc
@@ -184,6 +184,54 @@ TEST(SubstituteTest, VectorBoolRef) {
EXPECT_EQ("Logic be like: true false true false", str);
}
+TEST(SubstituteTest, Enums) {
+ enum UnscopedEnum { kEnum0 = 0, kEnum1 = 1 };
+ EXPECT_EQ("0 1", absl::Substitute("$0 $1", UnscopedEnum::kEnum0,
+ UnscopedEnum::kEnum1));
+
+ enum class ScopedEnum { kEnum0 = 0, kEnum1 = 1 };
+ EXPECT_EQ("0 1",
+ absl::Substitute("$0 $1", ScopedEnum::kEnum0, ScopedEnum::kEnum1));
+
+ enum class ScopedEnumInt32 : int32_t { kEnum0 = 989, kEnum1 = INT32_MIN };
+ EXPECT_EQ("989 -2147483648",
+ absl::Substitute("$0 $1", ScopedEnumInt32::kEnum0,
+ ScopedEnumInt32::kEnum1));
+
+ enum class ScopedEnumUInt32 : uint32_t { kEnum0 = 1, kEnum1 = UINT32_MAX };
+ EXPECT_EQ("1 4294967295", absl::Substitute("$0 $1", ScopedEnumUInt32::kEnum0,
+ ScopedEnumUInt32::kEnum1));
+
+ enum class ScopedEnumInt64 : int64_t { kEnum0 = -1, kEnum1 = 42949672950 };
+ EXPECT_EQ("-1 42949672950", absl::Substitute("$0 $1", ScopedEnumInt64::kEnum0,
+ ScopedEnumInt64::kEnum1));
+
+ enum class ScopedEnumUInt64 : uint64_t { kEnum0 = 1, kEnum1 = 42949672950 };
+ EXPECT_EQ("1 42949672950", absl::Substitute("$0 $1", ScopedEnumUInt64::kEnum0,
+ ScopedEnumUInt64::kEnum1));
+
+ enum class ScopedEnumChar : signed char { kEnum0 = -1, kEnum1 = 1 };
+ EXPECT_EQ("-1 1", absl::Substitute("$0 $1", ScopedEnumChar::kEnum0,
+ ScopedEnumChar::kEnum1));
+
+ enum class ScopedEnumUChar : unsigned char {
+ kEnum0 = 0,
+ kEnum1 = 1,
+ kEnumMax = 255
+ };
+ EXPECT_EQ("0 1 255", absl::Substitute("$0 $1 $2", ScopedEnumUChar::kEnum0,
+ ScopedEnumUChar::kEnum1,
+ ScopedEnumUChar::kEnumMax));
+
+ enum class ScopedEnumInt16 : int16_t { kEnum0 = -100, kEnum1 = 10000 };
+ EXPECT_EQ("-100 10000", absl::Substitute("$0 $1", ScopedEnumInt16::kEnum0,
+ ScopedEnumInt16::kEnum1));
+
+ enum class ScopedEnumUInt16 : uint16_t { kEnum0 = 0, kEnum1 = 10000 };
+ EXPECT_EQ("0 10000", absl::Substitute("$0 $1", ScopedEnumUInt16::kEnum0,
+ ScopedEnumUInt16::kEnum1));
+}
+
#ifdef GTEST_HAS_DEATH_TEST
TEST(SubstituteDeathTest, SubstituteDeath) {
diff --git a/absl/synchronization/mutex.cc b/absl/synchronization/mutex.cc
index 3af4cda9..376ea794 100644
--- a/absl/synchronization/mutex.cc
+++ b/absl/synchronization/mutex.cc
@@ -109,7 +109,7 @@ static inline bool EvalConditionAnnotated(const Condition *cond, Mutex *mu,
bool locking, bool trylock,
bool read_lock);
-void RegisterMutexProfiler(void (*fn)(int64_t wait_timestamp)) {
+void RegisterMutexProfiler(void (*fn)(int64_t wait_cycles)) {
submit_profile_data.Store(fn);
}
@@ -2315,16 +2315,18 @@ ABSL_ATTRIBUTE_NOINLINE void Mutex::UnlockSlow(SynchWaitParams *waitp) {
} // end of for(;;)-loop
if (wake_list != kPerThreadSynchNull) {
- int64_t enqueue_timestamp = wake_list->waitp->contention_start_cycles;
- bool cond_waiter = wake_list->cond_waiter;
+ int64_t wait_cycles = 0;
+ int64_t now = base_internal::CycleClock::Now();
do {
+ // Sample lock contention events only if the waiter was trying to acquire
+ // the lock, not waiting on a condition variable or Condition.
+ if (!wake_list->cond_waiter) {
+ wait_cycles += (now - wake_list->waitp->contention_start_cycles);
+ wake_list->waitp->contention_start_cycles = now;
+ }
wake_list = Wakeup(wake_list); // wake waiters
} while (wake_list != kPerThreadSynchNull);
- if (!cond_waiter) {
- // Sample lock contention events only if the (first) waiter was trying to
- // acquire the lock, not waiting on a condition variable or Condition.
- int64_t wait_cycles =
- base_internal::CycleClock::Now() - enqueue_timestamp;
+ if (wait_cycles > 0) {
mutex_tracer("slow release", this, wait_cycles);
ABSL_TSAN_MUTEX_PRE_DIVERT(this, 0);
submit_profile_data(wait_cycles);
diff --git a/absl/synchronization/mutex.h b/absl/synchronization/mutex.h
index 38338f24..9a3e438f 100644
--- a/absl/synchronization/mutex.h
+++ b/absl/synchronization/mutex.h
@@ -984,14 +984,15 @@ inline Condition::Condition(const T *object,
// Register a hook for profiling support.
//
// The function pointer registered here will be called whenever a mutex is
-// contended. The callback is given the absl/base/cycleclock.h timestamp when
-// waiting began.
+// contended. The callback is given the cycles for which waiting happened (as
+// measured by //absl/base/internal/cycleclock.h, and which may not
+// be real "cycle" counts.)
//
// Calls to this function do not race or block, but there is no ordering
// guaranteed between calls to this function and call to the provided hook.
// In particular, the previously registered hook may still be called for some
// time after this function returns.
-void RegisterMutexProfiler(void (*fn)(int64_t wait_timestamp));
+void RegisterMutexProfiler(void (*fn)(int64_t wait_cycles));
// Register a hook for Mutex tracing.
//