aboutsummaryrefslogtreecommitdiffhomepage
path: root/absl/container/internal/inlined_vector.h
diff options
context:
space:
mode:
authorGravatar Abseil Team <absl-team@google.com>2019-07-23 12:42:09 -0700
committerGravatar Andy Getz <durandal@google.com>2019-07-23 19:58:12 -0400
commitad1485c8986246b2ae9105e512738d0e97aec887 (patch)
treed4e21608f23fe8328fd8058aaa4269be44e8b810 /absl/container/internal/inlined_vector.h
parentf3840bc5e33ce4932e35986cf3718450c6f02af2 (diff)
Export of internal Abseil changes.
-- 1f44f8f487aa3afe8248132e4081519e85671965 by CJ Johnson <johnsoncj@google.com>: Updates ScopedAllocatorWorks test for InlinedVector to not depend on specific byte counts of standard library vectors. It's too brittle in the face of capacity-changing changes to InlinedVector and does not provide signal in those breakages. PiperOrigin-RevId: 259590332 -- fef7589547e9cdd04a254f6ae06e2bd9ec2b35f0 by CJ Johnson <johnsoncj@google.com>: Updates the implementation of InlinedVector::insert(...) to be exception safe and adds an exception safety tests for insert(...) PiperOrigin-RevId: 259542968 GitOrigin-RevId: 1f44f8f487aa3afe8248132e4081519e85671965 Change-Id: I514beff56159c9c717f8d29197728011af1fecd7
Diffstat (limited to 'absl/container/internal/inlined_vector.h')
-rw-r--r--absl/container/internal/inlined_vector.h97
1 files changed, 97 insertions, 0 deletions
diff --git a/absl/container/internal/inlined_vector.h b/absl/container/internal/inlined_vector.h
index 0ab2d7d..7954b2b 100644
--- a/absl/container/internal/inlined_vector.h
+++ b/absl/container/internal/inlined_vector.h
@@ -380,6 +380,10 @@ class Storage {
template <typename ValueAdapter>
void Resize(ValueAdapter values, size_type new_size);
+ template <typename ValueAdapter>
+ iterator Insert(const_iterator pos, ValueAdapter values,
+ size_type insert_count);
+
template <typename... Args>
reference EmplaceBack(Args&&... args);
@@ -564,6 +568,99 @@ auto Storage<T, N, A>::Resize(ValueAdapter values, size_type new_size) -> void {
}
template <typename T, size_t N, typename A>
+template <typename ValueAdapter>
+auto Storage<T, N, A>::Insert(const_iterator pos, ValueAdapter values,
+ size_type insert_count) -> iterator {
+ StorageView storage_view = MakeStorageView();
+
+ size_type insert_index =
+ std::distance(const_iterator(storage_view.data), pos);
+ size_type insert_end_index = insert_index + insert_count;
+ size_type new_size = storage_view.size + insert_count;
+
+ if (new_size > storage_view.capacity) {
+ AllocationTransaction allocation_tx(GetAllocPtr());
+ ConstructionTransaction construction_tx(GetAllocPtr());
+ ConstructionTransaction move_construciton_tx(GetAllocPtr());
+
+ IteratorValueAdapter<MoveIterator> move_values(
+ MoveIterator(storage_view.data));
+
+ pointer new_data = allocation_tx.Allocate(
+ LegacyNextCapacityFrom(storage_view.capacity, new_size));
+
+ construction_tx.Construct(new_data + insert_index, &values, insert_count);
+
+ move_construciton_tx.Construct(new_data, &move_values, insert_index);
+
+ inlined_vector_internal::ConstructElements(
+ GetAllocPtr(), new_data + insert_end_index, &move_values,
+ storage_view.size - insert_index);
+
+ inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data,
+ storage_view.size);
+
+ construction_tx.Commit();
+ move_construciton_tx.Commit();
+ DeallocateIfAllocated();
+ AcquireAllocation(&allocation_tx);
+
+ SetAllocatedSize(new_size);
+ return iterator(new_data + insert_index);
+ } else {
+ size_type move_construction_destination_index =
+ (std::max)(insert_end_index, storage_view.size);
+
+ ConstructionTransaction move_construction_tx(GetAllocPtr());
+
+ IteratorValueAdapter<MoveIterator> move_construction_values(
+ MoveIterator(storage_view.data +
+ (move_construction_destination_index - insert_count)));
+ absl::Span<value_type> move_construction = {
+ storage_view.data + move_construction_destination_index,
+ new_size - move_construction_destination_index};
+
+ pointer move_assignment_values = storage_view.data + insert_index;
+ absl::Span<value_type> move_assignment = {
+ storage_view.data + insert_end_index,
+ move_construction_destination_index - insert_end_index};
+
+ absl::Span<value_type> insert_assignment = {move_assignment_values,
+ move_construction.size()};
+
+ absl::Span<value_type> insert_construction = {
+ insert_assignment.data() + insert_assignment.size(),
+ insert_count - insert_assignment.size()};
+
+ move_construction_tx.Construct(move_construction.data(),
+ &move_construction_values,
+ move_construction.size());
+
+ for (pointer destination = move_assignment.data() + move_assignment.size(),
+ last_destination = move_assignment.data(),
+ source = move_assignment_values + move_assignment.size();
+ ;) {
+ --destination;
+ --source;
+ if (destination < last_destination) break;
+ *destination = std::move(*source);
+ }
+
+ inlined_vector_internal::AssignElements(insert_assignment.data(), &values,
+ insert_assignment.size());
+
+ inlined_vector_internal::ConstructElements(
+ GetAllocPtr(), insert_construction.data(), &values,
+ insert_construction.size());
+
+ move_construction_tx.Commit();
+
+ AddSize(insert_count);
+ return iterator(storage_view.data + insert_index);
+ }
+}
+
+template <typename T, size_t N, typename A>
template <typename... Args>
auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> reference {
StorageView storage_view = MakeStorageView();