summaryrefslogtreecommitdiff
path: root/absl/container
diff options
context:
space:
mode:
authorGravatar Abseil Team <absl-team@google.com>2019-08-20 11:39:40 -0700
committerGravatar Xiaoyi Zhang <zhangxy@google.com>2019-08-20 15:59:49 -0400
commitf0afae0d49af3e15a7169e019634d7719143d94d (patch)
tree25923b3b33eb9c5a644fab14193195df7225401b /absl/container
parent0e7afdcbd24c7e5b7cab4e0217d8886f1525b520 (diff)
Export of internal Abseil changes
-- 0f6565955231dc74ebad62ef32a18c457afa2dc7 by Abseil Team <absl-team@google.com>: Document guarantee that we do not move from rvalue arguments if no insertion happens with absl::raw_hash_map::try_emplace, as done with std::unordered_map::try_emplace. PiperOrigin-RevId: 264430409 -- 292e6b9e08fa689e8400d7f2db94cbcab29d5889 by CJ Johnson <johnsoncj@google.com>: Removes use of aligned_storage in FixedArray and InlinedVector in favor of aligned char buffers. PiperOrigin-RevId: 264385559 -- aa0b19ad11ae5702022feee0e2e6434cfb28c9e9 by Derek Mauro <dmauro@google.com>: Make the unit tests for absl::any, absl::optional, and absl::variant no-ops when these types are just aliases for the corresponding std:: types. We have no way to fix standard library implementation bugs, so don't bother working around them. Also disable the corresponding exception-safety tests as well when exceptions are not enabled. Fixes https://github.com/abseil/abseil-cpp/pull/360 PiperOrigin-RevId: 264382050 -- 65896a911f36481b89b4712c83b91c90a76b64e8 by Abseil Team <absl-team@google.com>: Improve documentation on erase PiperOrigin-RevId: 264381266 GitOrigin-RevId: 0f6565955231dc74ebad62ef32a18c457afa2dc7 Change-Id: I74b9bd2ddf84526014104f17e87de70bd3fe65fa
Diffstat (limited to 'absl/container')
-rw-r--r--absl/container/BUILD.bazel1
-rw-r--r--absl/container/CMakeLists.txt1
-rw-r--r--absl/container/fixed_array.h28
-rw-r--r--absl/container/flat_hash_map.h4
-rw-r--r--absl/container/flat_hash_map_test.cc7
-rw-r--r--absl/container/internal/hash_generator_testing.h9
-rw-r--r--absl/container/internal/inlined_vector.h4
-rw-r--r--absl/container/internal/raw_hash_map.h3
-rw-r--r--absl/container/internal/raw_hash_set.h13
-rw-r--r--absl/container/internal/unordered_map_modifiers_test.h41
-rw-r--r--absl/container/internal/unordered_map_test.cc8
-rw-r--r--absl/container/node_hash_map.h4
12 files changed, 97 insertions, 26 deletions
diff --git a/absl/container/BUILD.bazel b/absl/container/BUILD.bazel
index 45c90528..ec890190 100644
--- a/absl/container/BUILD.bazel
+++ b/absl/container/BUILD.bazel
@@ -448,6 +448,7 @@ cc_library(
linkopts = ABSL_DEFAULT_LINKOPTS,
deps = [
":hash_policy_testing",
+ "//absl/memory",
"//absl/meta:type_traits",
"//absl/strings",
],
diff --git a/absl/container/CMakeLists.txt b/absl/container/CMakeLists.txt
index fb966ec7..638c2759 100644
--- a/absl/container/CMakeLists.txt
+++ b/absl/container/CMakeLists.txt
@@ -510,6 +510,7 @@ absl_cc_library(
${ABSL_TEST_COPTS}
DEPS
absl::hash_policy_testing
+ absl::memory
absl::meta
absl::strings
TESTONLY
diff --git a/absl/container/fixed_array.h b/absl/container/fixed_array.h
index 2a8240ae..70e94ad5 100644
--- a/absl/container/fixed_array.h
+++ b/absl/container/fixed_array.h
@@ -31,7 +31,6 @@
#define ABSL_CONTAINER_FIXED_ARRAY_H_
#include <algorithm>
-#include <array>
#include <cassert>
#include <cstddef>
#include <initializer_list>
@@ -386,8 +385,7 @@ class FixedArray {
// error: call to int __builtin___sprintf_chk(etc...)
// will always overflow destination buffer [-Werror]
//
- template <typename OuterT = value_type,
- typename InnerT = absl::remove_extent_t<OuterT>,
+ template <typename OuterT, typename InnerT = absl::remove_extent_t<OuterT>,
size_t InnerN = std::extent<OuterT>::value>
struct StorageElementWrapper {
InnerT array[InnerN];
@@ -396,8 +394,6 @@ class FixedArray {
using StorageElement =
absl::conditional_t<std::is_array<value_type>::value,
StorageElementWrapper<value_type>, value_type>;
- using StorageElementBuffer =
- absl::aligned_storage_t<sizeof(StorageElement), alignof(StorageElement)>;
static pointer AsValueType(pointer ptr) { return ptr; }
static pointer AsValueType(StorageElementWrapper<value_type>* ptr) {
@@ -407,25 +403,25 @@ class FixedArray {
static_assert(sizeof(StorageElement) == sizeof(value_type), "");
static_assert(alignof(StorageElement) == alignof(value_type), "");
- struct NonEmptyInlinedStorage {
- StorageElement* data() {
- return reinterpret_cast<StorageElement*>(inlined_storage_.data());
- }
+ class NonEmptyInlinedStorage {
+ public:
+ StorageElement* data() { return reinterpret_cast<StorageElement*>(buff_); }
+ void AnnotateConstruct(size_type n);
+ void AnnotateDestruct(size_type n);
#ifdef ADDRESS_SANITIZER
void* RedzoneBegin() { return &redzone_begin_; }
void* RedzoneEnd() { return &redzone_end_ + 1; }
#endif // ADDRESS_SANITIZER
- void AnnotateConstruct(size_type);
- void AnnotateDestruct(size_type);
-
+ private:
ADDRESS_SANITIZER_REDZONE(redzone_begin_);
- std::array<StorageElementBuffer, inline_elements> inlined_storage_;
+ alignas(StorageElement) char buff_[sizeof(StorageElement[inline_elements])];
ADDRESS_SANITIZER_REDZONE(redzone_end_);
};
- struct EmptyInlinedStorage {
+ class EmptyInlinedStorage {
+ public:
StorageElement* data() { return nullptr; }
void AnnotateConstruct(size_type) {}
void AnnotateDestruct(size_type) {}
@@ -459,9 +455,7 @@ class FixedArray {
size_type size() const { return size_alloc_.template get<0>(); }
StorageElement* begin() const { return data_; }
StorageElement* end() const { return begin() + size(); }
- allocator_type& alloc() {
- return size_alloc_.template get<1>();
- }
+ allocator_type& alloc() { return size_alloc_.template get<1>(); }
private:
static bool UsingInlinedStorage(size_type n) {
diff --git a/absl/container/flat_hash_map.h b/absl/container/flat_hash_map.h
index 0bc501b1..5c16ac88 100644
--- a/absl/container/flat_hash_map.h
+++ b/absl/container/flat_hash_map.h
@@ -360,6 +360,10 @@ class flat_hash_map : public absl::container_internal::raw_hash_map<
// Inserts (via copy or move) the element of the specified key into the
// `flat_hash_map` using the position of `hint` as a non-binding suggestion
// for where to begin the insertion search.
+ //
+ // All `try_emplace()` overloads make the same guarantees regarding rvalue
+ // arguments as `std::unordered_map::try_emplace()`, namely that these
+ // functions will not move from rvalue arguments if insertions do not happen.
using Base::try_emplace;
// flat_hash_map::extract()
diff --git a/absl/container/flat_hash_map_test.cc b/absl/container/flat_hash_map_test.cc
index ebcb560f..6cff1a25 100644
--- a/absl/container/flat_hash_map_test.cc
+++ b/absl/container/flat_hash_map_test.cc
@@ -14,6 +14,8 @@
#include "absl/container/flat_hash_map.h"
+#include <memory>
+
#include "absl/container/internal/hash_generator_testing.h"
#include "absl/container/internal/unordered_map_constructor_test.h"
#include "absl/container/internal/unordered_map_lookup_test.h"
@@ -46,6 +48,11 @@ INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, LookupTest, MapTypes);
INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, MembersTest, MapTypes);
INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, ModifiersTest, MapTypes);
+using UniquePtrMapTypes = ::testing::Types<Map<int, std::unique_ptr<int>>>;
+
+INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, UniquePtrModifiersTest,
+ UniquePtrMapTypes);
+
TEST(FlatHashMap, StandardLayout) {
struct Int {
explicit Int(size_t value) : value(value) {}
diff --git a/absl/container/internal/hash_generator_testing.h b/absl/container/internal/hash_generator_testing.h
index 27fb84f5..477215cd 100644
--- a/absl/container/internal/hash_generator_testing.h
+++ b/absl/container/internal/hash_generator_testing.h
@@ -19,6 +19,7 @@
#define ABSL_CONTAINER_INTERNAL_HASH_GENERATOR_TESTING_H_
#include <stdint.h>
+
#include <algorithm>
#include <iosfwd>
#include <random>
@@ -27,6 +28,7 @@
#include <utility>
#include "absl/container/internal/hash_policy_testing.h"
+#include "absl/memory/memory.h"
#include "absl/meta/type_traits.h"
#include "absl/strings/string_view.h"
@@ -129,6 +131,13 @@ struct Generator<std::tuple<Ts...>> {
}
};
+template <class T>
+struct Generator<std::unique_ptr<T>> {
+ std::unique_ptr<T> operator()() const {
+ return absl::make_unique<T>(Generator<T>()());
+ }
+};
+
template <class U>
struct Generator<U, absl::void_t<decltype(std::declval<U&>().key()),
decltype(std::declval<U&>().value())>>
diff --git a/absl/container/internal/inlined_vector.h b/absl/container/internal/inlined_vector.h
index d7c616cf..54369c85 100644
--- a/absl/container/internal/inlined_vector.h
+++ b/absl/container/internal/inlined_vector.h
@@ -445,9 +445,7 @@ class Storage {
};
struct Inlined {
- using InlinedDataElement =
- absl::aligned_storage_t<sizeof(value_type), alignof(value_type)>;
- InlinedDataElement inlined_data[N];
+ alignas(value_type) char inlined_data[sizeof(value_type[N])];
};
union Data {
diff --git a/absl/container/internal/raw_hash_map.h b/absl/container/internal/raw_hash_map.h
index 6a9c730c..7dad120a 100644
--- a/absl/container/internal/raw_hash_map.h
+++ b/absl/container/internal/raw_hash_map.h
@@ -109,6 +109,9 @@ class raw_hash_map : public raw_hash_set<Policy, Hash, Eq, Alloc> {
return insert_or_assign(k, v).first;
}
+ // All `try_emplace()` overloads make the same guarantees regarding rvalue
+ // arguments as `std::unordered_map::try_emplace()`, namely that these
+ // functions will not move from rvalue arguments if insertions do not happen.
template <class K = key_type, class... Args,
typename std::enable_if<
!std::is_convertible<K, const_iterator>::value, int>::type = 0,
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index 656e9806..2e6f4dd3 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -1133,15 +1133,16 @@ class raw_hash_set {
}
// Erases the element pointed to by `it`. Unlike `std::unordered_set::erase`,
- // this method returns void to reduce algorithmic complexity to O(1). In
- // order to erase while iterating across a map, use the following idiom (which
- // also works for standard containers):
+ // this method returns void to reduce algorithmic complexity to O(1). The
+ // iterator is invalidated, so any increment should be done before calling
+ // erase. In order to erase while iterating across a map, use the following
+ // idiom (which also works for standard containers):
//
// for (auto it = m.begin(), end = m.end(); it != end;) {
+ // // `erase()` will invalidate `it`, so advance `it` first.
+ // auto copy_it = it++;
// if (<pred>) {
- // m.erase(it++);
- // } else {
- // ++it;
+ // m.erase(copy_it);
// }
// }
void erase(const_iterator cit) { erase(cit.inner_); }
diff --git a/absl/container/internal/unordered_map_modifiers_test.h b/absl/container/internal/unordered_map_modifiers_test.h
index 52a1092e..f6aff542 100644
--- a/absl/container/internal/unordered_map_modifiers_test.h
+++ b/absl/container/internal/unordered_map_modifiers_test.h
@@ -15,6 +15,8 @@
#ifndef ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_MODIFIERS_TEST_H_
#define ABSL_CONTAINER_INTERNAL_UNORDERED_MAP_MODIFIERS_TEST_H_
+#include <memory>
+
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "absl/container/internal/hash_generator_testing.h"
@@ -267,6 +269,45 @@ REGISTER_TYPED_TEST_CASE_P(ModifiersTest, Clear, Insert, InsertHint,
Emplace, EmplaceHint, TryEmplace, TryEmplaceHint,
Erase, EraseRange, EraseKey, Swap);
+template <typename Type>
+struct is_unique_ptr : std::false_type {};
+
+template <typename Type>
+struct is_unique_ptr<std::unique_ptr<Type>> : std::true_type {};
+
+template <class UnordMap>
+class UniquePtrModifiersTest : public ::testing::Test {
+ protected:
+ UniquePtrModifiersTest() {
+ static_assert(is_unique_ptr<typename UnordMap::mapped_type>::value,
+ "UniquePtrModifiersTyest may only be called with a "
+ "std::unique_ptr value type.");
+ }
+};
+
+TYPED_TEST_SUITE_P(UniquePtrModifiersTest);
+
+// Test that we do not move from rvalue arguments if an insertion does not
+// happen.
+TYPED_TEST_P(UniquePtrModifiersTest, TryEmplace) {
+#ifdef UNORDERED_MAP_CXX17
+ using T = hash_internal::GeneratedType<TypeParam>;
+ using V = typename TypeParam::mapped_type;
+ T val = hash_internal::Generator<T>()();
+ TypeParam m;
+ auto p = m.try_emplace(val.first, std::move(val.second));
+ EXPECT_TRUE(p.second);
+ // A moved from std::unique_ptr is guaranteed to be nullptr.
+ EXPECT_EQ(val.second, nullptr);
+ T val2 = {val.first, hash_internal::Generator<V>()()};
+ p = m.try_emplace(val2.first, std::move(val2.second));
+ EXPECT_FALSE(p.second);
+ EXPECT_NE(val2.second, nullptr);
+#endif
+}
+
+REGISTER_TYPED_TEST_SUITE_P(UniquePtrModifiersTest, TryEmplace);
+
} // namespace container_internal
} // namespace absl
diff --git a/absl/container/internal/unordered_map_test.cc b/absl/container/internal/unordered_map_test.cc
index 72567eac..114b342d 100644
--- a/absl/container/internal/unordered_map_test.cc
+++ b/absl/container/internal/unordered_map_test.cc
@@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+#include <memory>
#include <unordered_map>
#include "absl/container/internal/unordered_map_constructor_test.h"
@@ -35,6 +36,13 @@ INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedMap, LookupTest, MapTypes);
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedMap, MembersTest, MapTypes);
INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedMap, ModifiersTest, MapTypes);
+using UniquePtrMapTypes = ::testing::Types<std::unordered_map<
+ int, std::unique_ptr<int>, StatefulTestingHash, StatefulTestingEqual,
+ Alloc<std::pair<const int, std::unique_ptr<int>>>>>;
+
+INSTANTIATE_TYPED_TEST_SUITE_P(UnorderedMap, UniquePtrModifiersTest,
+ UniquePtrMapTypes);
+
} // namespace
} // namespace container_internal
} // namespace absl
diff --git a/absl/container/node_hash_map.h b/absl/container/node_hash_map.h
index a841f5ab..a718842b 100644
--- a/absl/container/node_hash_map.h
+++ b/absl/container/node_hash_map.h
@@ -351,6 +351,10 @@ class node_hash_map
// Inserts (via copy or move) the element of the specified key into the
// `node_hash_map` using the position of `hint` as a non-binding suggestion
// for where to begin the insertion search.
+ //
+ // All `try_emplace()` overloads make the same guarantees regarding rvalue
+ // arguments as `std::unordered_map::try_emplace()`, namely that these
+ // functions will not move from rvalue arguments if insertions do not happen.
using Base::try_emplace;
// node_hash_map::extract()