summaryrefslogtreecommitdiff
path: root/absl/container/internal/compressed_tuple_test.cc
diff options
context:
space:
mode:
Diffstat (limited to 'absl/container/internal/compressed_tuple_test.cc')
-rw-r--r--absl/container/internal/compressed_tuple_test.cc77
1 files changed, 70 insertions, 7 deletions
diff --git a/absl/container/internal/compressed_tuple_test.cc b/absl/container/internal/compressed_tuple_test.cc
index 74111f97..c3edf542 100644
--- a/absl/container/internal/compressed_tuple_test.cc
+++ b/absl/container/internal/compressed_tuple_test.cc
@@ -15,7 +15,11 @@
#include "absl/container/internal/compressed_tuple.h"
#include <memory>
+#include <set>
#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
#include "gmock/gmock.h"
#include "gtest/gtest.h"
@@ -27,14 +31,22 @@
// These are declared at global scope purely so that error messages
// are smaller and easier to understand.
-enum class CallType { kConstRef, kConstMove };
+enum class CallType { kMutableRef, kConstRef, kMutableMove, kConstMove };
template <int>
struct Empty {
+ constexpr CallType value() & { return CallType::kMutableRef; }
constexpr CallType value() const& { return CallType::kConstRef; }
+ constexpr CallType value() && { return CallType::kMutableMove; }
constexpr CallType value() const&& { return CallType::kConstMove; }
};
+// Unconditionally return an lvalue reference to `t`.
+template <typename T>
+constexpr T& AsLValue(T&& t) {
+ return t;
+}
+
template <typename T>
struct NotEmpty {
T value;
@@ -54,6 +66,7 @@ namespace {
using absl::test_internal::CopyableMovableInstance;
using absl::test_internal::InstanceTracker;
+using ::testing::Each;
TEST(CompressedTupleTest, Sizeof) {
EXPECT_EQ(sizeof(int), sizeof(CompressedTuple<int>));
@@ -70,6 +83,30 @@ TEST(CompressedTupleTest, Sizeof) {
sizeof(CompressedTuple<int, Empty<0>, NotEmpty<double>, Empty<1>>));
}
+TEST(CompressedTupleTest, PointerToEmpty) {
+ auto to_void_ptrs = [](const auto&... objs) {
+ return std::vector<const void*>{static_cast<const void*>(&objs)...};
+ };
+ {
+ using Tuple = CompressedTuple<int, Empty<0>>;
+ EXPECT_EQ(sizeof(int), sizeof(Tuple));
+ Tuple t;
+ EXPECT_THAT(to_void_ptrs(t.get<1>()), Each(&t));
+ }
+ {
+ using Tuple = CompressedTuple<int, Empty<0>, Empty<1>>;
+ EXPECT_EQ(sizeof(int), sizeof(Tuple));
+ Tuple t;
+ EXPECT_THAT(to_void_ptrs(t.get<1>(), t.get<2>()), Each(&t));
+ }
+ {
+ using Tuple = CompressedTuple<int, Empty<0>, Empty<1>, Empty<2>>;
+ EXPECT_EQ(sizeof(int), sizeof(Tuple));
+ Tuple t;
+ EXPECT_THAT(to_void_ptrs(t.get<1>(), t.get<2>(), t.get<3>()), Each(&t));
+ }
+}
+
TEST(CompressedTupleTest, OneMoveOnRValueConstructionTemp) {
InstanceTracker tracker;
CompressedTuple<CopyableMovableInstance> x1(CopyableMovableInstance(1));
@@ -346,8 +383,24 @@ TEST(CompressedTupleTest, Constexpr) {
constexpr int value() const { return v; }
int v;
};
- constexpr CompressedTuple<int, double, CompressedTuple<int>, Empty<0>> x(
- 7, 1.25, CompressedTuple<int>(5), {});
+
+ using Tuple = CompressedTuple<int, double, CompressedTuple<int>, Empty<0>>;
+
+ constexpr int r0 =
+ AsLValue(Tuple(1, 0.75, CompressedTuple<int>(9), {})).get<0>();
+ constexpr double r1 =
+ AsLValue(Tuple(1, 0.75, CompressedTuple<int>(9), {})).get<1>();
+ constexpr int r2 =
+ AsLValue(Tuple(1, 0.75, CompressedTuple<int>(9), {})).get<2>().get<0>();
+ constexpr CallType r3 =
+ AsLValue(Tuple(1, 0.75, CompressedTuple<int>(9), {})).get<3>().value();
+
+ EXPECT_EQ(r0, 1);
+ EXPECT_EQ(r1, 0.75);
+ EXPECT_EQ(r2, 9);
+ EXPECT_EQ(r3, CallType::kMutableRef);
+
+ constexpr Tuple x(7, 1.25, CompressedTuple<int>(5), {});
constexpr int x0 = x.get<0>();
constexpr double x1 = x.get<1>();
constexpr int x2 = x.get<2>().get<0>();
@@ -358,7 +411,18 @@ TEST(CompressedTupleTest, Constexpr) {
EXPECT_EQ(x2, 5);
EXPECT_EQ(x3, CallType::kConstRef);
-#if !defined(__GNUC__) || defined(__clang__) || __GNUC__ > 4
+ constexpr int m0 = Tuple(5, 0.25, CompressedTuple<int>(3), {}).get<0>();
+ constexpr double m1 = Tuple(5, 0.25, CompressedTuple<int>(3), {}).get<1>();
+ constexpr int m2 =
+ Tuple(5, 0.25, CompressedTuple<int>(3), {}).get<2>().get<0>();
+ constexpr CallType m3 =
+ Tuple(5, 0.25, CompressedTuple<int>(3), {}).get<3>().value();
+
+ EXPECT_EQ(m0, 5);
+ EXPECT_EQ(m1, 0.25);
+ EXPECT_EQ(m2, 3);
+ EXPECT_EQ(m3, CallType::kMutableMove);
+
constexpr CompressedTuple<Empty<0>, TrivialStruct, int> trivial = {};
constexpr CallType trivial0 = trivial.get<0>().value();
constexpr int trivial1 = trivial.get<1>().value();
@@ -367,7 +431,6 @@ TEST(CompressedTupleTest, Constexpr) {
EXPECT_EQ(trivial0, CallType::kConstRef);
EXPECT_EQ(trivial1, 0);
EXPECT_EQ(trivial2, 0);
-#endif
constexpr CompressedTuple<Empty<0>, NonTrivialStruct, absl::optional<int>>
non_trivial = {};
@@ -386,8 +449,8 @@ TEST(CompressedTupleTest, Constexpr) {
#if defined(__clang__)
// An apparent bug in earlier versions of gcc claims these are ambiguous.
- constexpr int x2m = absl::move(x.get<2>()).get<0>();
- constexpr CallType x3m = absl::move(x).get<3>().value();
+ constexpr int x2m = std::move(x.get<2>()).get<0>();
+ constexpr CallType x3m = std::move(x).get<3>().value();
EXPECT_EQ(x2m, 5);
EXPECT_EQ(x3m, CallType::kConstMove);
#endif