summaryrefslogtreecommitdiff
path: root/absl/container
diff options
context:
space:
mode:
authorGravatar Aaron Jacobs <jacobsa@google.com>2023-03-31 21:57:32 -0700
committerGravatar Copybara-Service <copybara-worker@google.com>2023-03-31 21:58:26 -0700
commitea980d1963457526f8094e86f8b3f59144ea3f31 (patch)
tree779c7888e269c0f6bd39a8cc950569ff9b117855 /absl/container
parentacfd33824aacbb6d0467fdbc2bd4900582966933 (diff)
inlined_vector: stop sharing the memcpy-based move-assignment path.
The fact that this is called from paths where the element type may not have a trivial destructor is preventing an optimization (see the TODO). Stop calling from those paths so that the optimization can be made in an upcoming CL. PiperOrigin-RevId: 521087730 Change-Id: Id2b66d8f36bb0d294784d0793fdd8f07e315739f
Diffstat (limited to 'absl/container')
-rw-r--r--absl/container/inlined_vector.h26
1 files changed, 20 insertions, 6 deletions
diff --git a/absl/container/inlined_vector.h b/absl/container/inlined_vector.h
index 5674ce99..1d4878b0 100644
--- a/absl/container/inlined_vector.h
+++ b/absl/container/inlined_vector.h
@@ -831,10 +831,9 @@ class InlinedVector {
friend H AbslHashValue(H h, const absl::InlinedVector<TheT, TheN, TheA>& a);
void MoveAssignment(MemcpyPolicy, InlinedVector&& other) {
- // TODO(b/274984172): we shouldn't need to do this. We already know the
- // elements are trivially destructible when our move-assignment policy is
- // MemcpyPolicy. Except the other overloads of MoveAssignment call this one.
- // Make them not.
+ // TODO(b/274984172): we shouldn't need to do this, since we already know
+ // the elements are trivially destructible when our move-assignment policy
+ // is MemcpyPolicy.
inlined_vector_internal::DestroyAdapter<A>::DestroyElements(
storage_.GetAllocator(), data(), size());
storage_.DeallocateIfAllocated();
@@ -843,12 +842,27 @@ class InlinedVector {
other.storage_.SetInlinedSize(0);
}
+ // Destroy our existing elements, if any, and adopt the heap-allocated
+ // elements of the other vector.
+ //
+ // REQUIRES: other.storage_.GetIsAllocated()
+ void DestroyExistingAndAdopt(InlinedVector&& other) {
+ ABSL_HARDENING_ASSERT(other.storage_.GetIsAllocated());
+
+ inlined_vector_internal::DestroyAdapter<A>::DestroyElements(
+ storage_.GetAllocator(), data(), size());
+ storage_.DeallocateIfAllocated();
+
+ storage_.MemcpyFrom(other.storage_);
+ other.storage_.SetInlinedSize(0);
+ }
+
void MoveAssignment(ElementwiseAssignPolicy, InlinedVector&& other) {
// Fast path: if the other vector is on the heap then we don't worry about
// actually move-assigning each element. Instead we only throw away our own
// existing elements and adopt the heap allocation of the other vector.
if (other.storage_.GetIsAllocated()) {
- MoveAssignment(MemcpyPolicy{}, std::move(other));
+ DestroyExistingAndAdopt(std::move(other));
return;
}
@@ -862,7 +876,7 @@ class InlinedVector {
// actually move-assigning each element. Instead we only throw away our own
// existing elements and adopt the heap allocation of the other vector.
if (other.storage_.GetIsAllocated()) {
- MoveAssignment(MemcpyPolicy{}, std::move(other));
+ DestroyExistingAndAdopt(std::move(other));
return;
}