diff options
author | Abseil Team <absl-team@google.com> | 2023-03-24 16:52:03 -0700 |
---|---|---|
committer | Copybara-Service <copybara-worker@google.com> | 2023-03-24 16:52:57 -0700 |
commit | c92b6ce052ec4507e6f7ae44b435ab57e3e0fba6 (patch) | |
tree | d50111b80096a31ace6e990ff0a902853e26647e /absl | |
parent | 700e786e607788fb514a9021375191e47484fcb1 (diff) |
inlined_vector_test: add coverage of moving vectors of unique pointers.
std::unique_ptr is trivially relocatable, but not trivially destructible. This
will be important coverage to ensure correctness of upcoming commit(s) that
expand the use of memcpy to more trivially relocatable types.
PiperOrigin-RevId: 519270234
Change-Id: I8e584a405633dac89bf1f67eab8145971d2ddab2
Diffstat (limited to 'absl')
-rw-r--r-- | absl/container/inlined_vector_test.cc | 50 |
1 files changed, 46 insertions, 4 deletions
diff --git a/absl/container/inlined_vector_test.cc b/absl/container/inlined_vector_test.cc index 898b40db..808f97cc 100644 --- a/absl/container/inlined_vector_test.cc +++ b/absl/container/inlined_vector_test.cc @@ -15,6 +15,7 @@ #include "absl/container/inlined_vector.h" #include <algorithm> +#include <cstddef> #include <forward_list> #include <iterator> #include <list> @@ -51,15 +52,13 @@ using testing::ElementsAre; using testing::ElementsAreArray; using testing::Eq; using testing::Gt; +using testing::Pointee; using testing::Pointwise; using testing::PrintToString; +using testing::SizeIs; using IntVec = absl::InlinedVector<int, 8>; -MATCHER_P(SizeIs, n, "") { - return testing::ExplainMatchResult(n, arg.size(), result_listener); -} - MATCHER_P(CapacityIs, n, "") { return testing::ExplainMatchResult(n, arg.capacity(), result_listener); } @@ -262,6 +261,49 @@ TEST(IntVec, Hardened) { #endif } +// Move construction of a container of unique pointers should work fine, with no +// leaks, despite the fact that unique pointers are trivially relocatable but +// not trivially destructible. +TEST(UniquePtr, MoveConstruct) { + for (size_t size = 0; size < 16; ++size) { + SCOPED_TRACE(size); + + absl::InlinedVector<std::unique_ptr<size_t>, 2> a; + for (size_t i = 0; i < size; ++i) { + a.push_back(std::make_unique<size_t>(i)); + } + + absl::InlinedVector<std::unique_ptr<size_t>, 2> b(std::move(a)); + + ASSERT_THAT(b, SizeIs(size)); + for (size_t i = 0; i < size; ++i) { + ASSERT_THAT(b[i], Pointee(i)); + } + } +} + +// Move assignment of a container of unique pointers should work fine, with no +// leaks, despite the fact that unique pointers are trivially relocatable but +// not trivially destructible. +TEST(UniquePtr, MoveAssign) { + for (size_t size = 0; size < 16; ++size) { + SCOPED_TRACE(size); + + absl::InlinedVector<std::unique_ptr<size_t>, 2> a; + for (size_t i = 0; i < size; ++i) { + a.push_back(std::make_unique<size_t>(i)); + } + + absl::InlinedVector<std::unique_ptr<size_t>, 2> b; + b = std::move(a); + + ASSERT_THAT(b, SizeIs(size)); + for (size_t i = 0; i < size; ++i) { + ASSERT_THAT(b[i], Pointee(i)); + } + } +} + // At the end of this test loop, the elements between [erase_begin, erase_end) // should have reference counts == 0, and all others elements should have // reference counts == 1. |