summaryrefslogtreecommitdiff
path: root/absl/container/internal
diff options
context:
space:
mode:
Diffstat (limited to 'absl/container/internal')
-rw-r--r--absl/container/internal/btree.h52
-rw-r--r--absl/container/internal/btree_container.h4
-rw-r--r--absl/container/internal/raw_hash_set.h31
-rw-r--r--absl/container/internal/raw_hash_set_test.cc2
4 files changed, 59 insertions, 30 deletions
diff --git a/absl/container/internal/btree.h b/absl/container/internal/btree.h
index f52fe235..9fe1b27f 100644
--- a/absl/container/internal/btree.h
+++ b/absl/container/internal/btree.h
@@ -290,9 +290,12 @@ struct map_params : common_params<Key, Compare, Alloc, TargetNodeSize, Multi,
};
using is_map_container = std::true_type;
- static const Key &key(const value_type &value) { return value.first; }
- static const Key &key(const init_type &init) { return init.first; }
+ template <typename V>
+ static auto key(const V &value) -> decltype(value.first) {
+ return value.first;
+ }
static const Key &key(const slot_type *s) { return slot_policy::key(s); }
+ static const Key &key(slot_type *s) { return slot_policy::key(s); }
static mapped_type &value(value_type *value) { return value->second; }
};
@@ -353,8 +356,10 @@ struct set_params : common_params<Key, Compare, Alloc, TargetNodeSize, Multi,
using value_compare = typename set_params::common_params::key_compare;
using is_map_container = std::false_type;
- static const Key &key(const value_type &value) { return value; }
+ template <typename V>
+ static const V &key(const V &value) { return value; }
static const Key &key(const slot_type *slot) { return *slot; }
+ static const Key &key(slot_type *slot) { return *slot; }
};
// An adapter class that converts a lower-bound compare into an upper-bound
@@ -1020,6 +1025,7 @@ template <typename Params>
class btree {
using node_type = btree_node<Params>;
using is_key_compare_to = typename Params::is_key_compare_to;
+ using init_type = typename Params::init_type;
// We use a static empty node for the root/leftmost/rightmost of empty btrees
// in order to avoid branching in begin()/end().
@@ -1184,8 +1190,8 @@ class btree {
// boolean return value indicates whether insertion succeeded or failed.
// Requirement: if `key` already exists in the btree, does not consume `args`.
// Requirement: `key` is never referenced after consuming `args`.
- template <typename... Args>
- std::pair<iterator, bool> insert_unique(const key_type &key, Args &&... args);
+ template <typename K, typename... Args>
+ std::pair<iterator, bool> insert_unique(const K &key, Args &&... args);
// Inserts with hint. Checks to see if the value should be placed immediately
// before `position` in the tree. If so, then the insertion will take
@@ -1193,14 +1199,23 @@ class btree {
// logarithmic time as if a call to insert_unique() were made.
// Requirement: if `key` already exists in the btree, does not consume `args`.
// Requirement: `key` is never referenced after consuming `args`.
- template <typename... Args>
+ template <typename K, typename... Args>
std::pair<iterator, bool> insert_hint_unique(iterator position,
- const key_type &key,
+ const K &key,
Args &&... args);
// Insert a range of values into the btree.
+ // Note: the first overload avoids constructing a value_type if the key
+ // already exists in the btree.
+ template <typename InputIterator,
+ typename = decltype(
+ compare_keys(params_type::key(*std::declval<InputIterator>()),
+ std::declval<const key_type &>()))>
+ void insert_iterator_unique(InputIterator b, InputIterator e, int);
+ // We need the second overload for cases in which we need to construct a
+ // value_type in order to compare it with the keys already in the btree.
template <typename InputIterator>
- void insert_iterator_unique(InputIterator b, InputIterator e);
+ void insert_iterator_unique(InputIterator b, InputIterator e, char);
// Inserts a value into the btree.
template <typename ValueType>
@@ -1855,8 +1870,8 @@ btree<P>::btree(const btree &other)
}
template <typename P>
-template <typename... Args>
-auto btree<P>::insert_unique(const key_type &key, Args &&... args)
+template <typename K, typename... Args>
+auto btree<P>::insert_unique(const K &key, Args &&... args)
-> std::pair<iterator, bool> {
if (empty()) {
mutable_root() = rightmost_ = new_leaf_root_node(1);
@@ -1881,8 +1896,8 @@ auto btree<P>::insert_unique(const key_type &key, Args &&... args)
}
template <typename P>
-template <typename... Args>
-inline auto btree<P>::insert_hint_unique(iterator position, const key_type &key,
+template <typename K, typename... Args>
+inline auto btree<P>::insert_hint_unique(iterator position, const K &key,
Args &&... args)
-> std::pair<iterator, bool> {
if (!empty()) {
@@ -1906,14 +1921,23 @@ inline auto btree<P>::insert_hint_unique(iterator position, const key_type &key,
}
template <typename P>
-template <typename InputIterator>
-void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e) {
+template <typename InputIterator, typename>
+void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, int) {
for (; b != e; ++b) {
insert_hint_unique(end(), params_type::key(*b), *b);
}
}
template <typename P>
+template <typename InputIterator>
+void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, char) {
+ for (; b != e; ++b) {
+ init_type value(*b);
+ insert_hint_unique(end(), params_type::key(value), std::move(value));
+ }
+}
+
+template <typename P>
template <typename ValueType>
auto btree<P>::insert_multi(const key_type &key, ValueType &&v) -> iterator {
if (empty()) {
diff --git a/absl/container/internal/btree_container.h b/absl/container/internal/btree_container.h
index 734c90ef..d0fc6451 100644
--- a/absl/container/internal/btree_container.h
+++ b/absl/container/internal/btree_container.h
@@ -289,10 +289,10 @@ class btree_set_container : public btree_container<Tree> {
}
template <typename InputIterator>
void insert(InputIterator b, InputIterator e) {
- this->tree_.insert_iterator_unique(b, e);
+ this->tree_.insert_iterator_unique(b, e, 0);
}
void insert(std::initializer_list<init_type> init) {
- this->tree_.insert_iterator_unique(init.begin(), init.end());
+ this->tree_.insert_iterator_unique(init.begin(), init.end(), 0);
}
insert_return_type insert(node_type &&node) {
if (!node) return {this->end(), false, node_type()};
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index df0f2b2b..1f304b61 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -497,6 +497,18 @@ inline size_t GrowthToLowerboundCapacity(size_t growth) {
return growth + static_cast<size_t>((static_cast<int64_t>(growth) - 1) / 7);
}
+inline void AssertIsFull(ctrl_t* ctrl) {
+ ABSL_HARDENING_ASSERT((ctrl != nullptr && IsFull(*ctrl)) &&
+ "Invalid operation on iterator. The element might have "
+ "been erased, or the table might have rehashed.");
+}
+
+inline void AssertIsValid(ctrl_t* ctrl) {
+ ABSL_HARDENING_ASSERT((ctrl == nullptr || IsFull(*ctrl)) &&
+ "Invalid operation on iterator. The element might have "
+ "been erased, or the table might have rehashed.");
+}
+
// Policy: a policy defines how to perform different operations on
// the slots of the hashtable (see hash_policy_traits.h for the full interface
// of policy).
@@ -617,7 +629,7 @@ class raw_hash_set {
// PRECONDITION: not an end() iterator.
reference operator*() const {
- assert_is_full();
+ AssertIsFull(ctrl_);
return PolicyTraits::element(slot_);
}
@@ -626,7 +638,7 @@ class raw_hash_set {
// PRECONDITION: not an end() iterator.
iterator& operator++() {
- assert_is_full();
+ AssertIsFull(ctrl_);
++ctrl_;
++slot_;
skip_empty_or_deleted();
@@ -640,8 +652,8 @@ class raw_hash_set {
}
friend bool operator==(const iterator& a, const iterator& b) {
- a.assert_is_valid();
- b.assert_is_valid();
+ AssertIsValid(a.ctrl_);
+ AssertIsValid(b.ctrl_);
return a.ctrl_ == b.ctrl_;
}
friend bool operator!=(const iterator& a, const iterator& b) {
@@ -655,13 +667,6 @@ class raw_hash_set {
ABSL_INTERNAL_ASSUME(ctrl != nullptr);
}
- void assert_is_full() const {
- ABSL_HARDENING_ASSERT(ctrl_ != nullptr && IsFull(*ctrl_));
- }
- void assert_is_valid() const {
- ABSL_HARDENING_ASSERT(ctrl_ == nullptr || IsFull(*ctrl_));
- }
-
void skip_empty_or_deleted() {
while (IsEmptyOrDeleted(*ctrl_)) {
uint32_t shift = Group{ctrl_}.CountLeadingEmptyOrDeleted();
@@ -1174,7 +1179,7 @@ class raw_hash_set {
// This overload is necessary because otherwise erase<K>(const K&) would be
// a better match if non-const iterator is passed as an argument.
void erase(iterator it) {
- it.assert_is_full();
+ AssertIsFull(it.ctrl_);
PolicyTraits::destroy(&alloc_ref(), it.slot_);
erase_meta_only(it);
}
@@ -1208,7 +1213,7 @@ class raw_hash_set {
}
node_type extract(const_iterator position) {
- position.inner_.assert_is_full();
+ AssertIsFull(position.inner_.ctrl_);
auto node =
CommonAccess::Transfer<node_type>(alloc_ref(), position.inner_.slot_);
erase_meta_only(position);
diff --git a/absl/container/internal/raw_hash_set_test.cc b/absl/container/internal/raw_hash_set_test.cc
index 2fc85591..6befa960 100644
--- a/absl/container/internal/raw_hash_set_test.cc
+++ b/absl/container/internal/raw_hash_set_test.cc
@@ -1791,7 +1791,7 @@ TEST(TableDeathTest, EraseOfEndAsserts) {
IntTable t;
// Extra simple "regexp" as regexp support is highly varied across platforms.
- constexpr char kDeathMsg[] = "IsFull";
+ constexpr char kDeathMsg[] = "Invalid operation on iterator";
EXPECT_DEATH_IF_SUPPORTED(t.erase(t.end()), kDeathMsg);
}