summaryrefslogtreecommitdiff
path: root/absl/container/internal/btree.h
diff options
context:
space:
mode:
Diffstat (limited to 'absl/container/internal/btree.h')
-rw-r--r--absl/container/internal/btree.h472
1 files changed, 234 insertions, 238 deletions
diff --git a/absl/container/internal/btree.h b/absl/container/internal/btree.h
index 002ccc1e..0bb38366 100644
--- a/absl/container/internal/btree.h
+++ b/absl/container/internal/btree.h
@@ -182,15 +182,44 @@ struct key_compare_to_adapter<std::greater<absl::Cord>> {
using type = StringBtreeDefaultGreater;
};
+// Detects an 'absl_btree_prefer_linear_node_search' member. This is
+// a protocol used as an opt-in or opt-out of linear search.
+//
+// For example, this would be useful for key types that wrap an integer
+// and define their own cheap operator<(). For example:
+//
+// class K {
+// public:
+// using absl_btree_prefer_linear_node_search = std::true_type;
+// ...
+// private:
+// friend bool operator<(K a, K b) { return a.k_ < b.k_; }
+// int k_;
+// };
+//
+// btree_map<K, V> m; // Uses linear search
+//
+// If T has the preference tag, then it has a preference.
+// Btree will use the tag's truth value.
+template <typename T, typename = void>
+struct has_linear_node_search_preference : std::false_type {};
+template <typename T, typename = void>
+struct prefers_linear_node_search : std::false_type {};
+template <typename T>
+struct has_linear_node_search_preference<
+ T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
+ : std::true_type {};
+template <typename T>
+struct prefers_linear_node_search<
+ T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
+ : T::absl_btree_prefer_linear_node_search {};
+
template <typename Key, typename Compare, typename Alloc, int TargetNodeSize,
bool Multi, typename SlotPolicy>
struct common_params {
// If Compare is a common comparator for a string-like type, then we adapt it
// to use heterogeneous lookup and to be a key-compare-to comparator.
using key_compare = typename key_compare_to_adapter<Compare>::type;
- // True when key_compare has been adapted to StringBtreeDefault{Less,Greater}.
- using is_key_compare_adapted =
- absl::negation<std::is_same<key_compare, Compare>>;
// A type which indicates if we have a key-compare-to functor or a plain old
// key-compare functor.
using is_key_compare_to = btree_is_key_compare_to<key_compare, Key>;
@@ -200,9 +229,6 @@ struct common_params {
using size_type = std::make_signed<size_t>::type;
using difference_type = ptrdiff_t;
- // True if this is a multiset or multimap.
- using is_multi_container = std::integral_constant<bool, Multi>;
-
using slot_policy = SlotPolicy;
using slot_type = typename slot_policy::slot_type;
using value_type = typename slot_policy::value_type;
@@ -212,6 +238,23 @@ struct common_params {
using reference = value_type &;
using const_reference = const value_type &;
+ // For the given lookup key type, returns whether we can have multiple
+ // equivalent keys in the btree. If this is a multi-container, then we can.
+ // Otherwise, we can have multiple equivalent keys only if all of the
+ // following conditions are met:
+ // - The comparator is transparent.
+ // - The lookup key type is not the same as key_type.
+ // - The comparator is not a StringBtreeDefault{Less,Greater} comparator
+ // that we know has the same equivalence classes for all lookup types.
+ template <typename LookupKey>
+ constexpr static bool can_have_multiple_equivalent_keys() {
+ return Multi ||
+ (IsTransparent<key_compare>::value &&
+ !std::is_same<LookupKey, Key>::value &&
+ !std::is_same<key_compare, StringBtreeDefaultLess>::value &&
+ !std::is_same<key_compare, StringBtreeDefaultGreater>::value);
+ }
+
enum {
kTargetNodeSize = TargetNodeSize,
@@ -391,6 +434,10 @@ struct SearchResult {
// useful information.
template <typename V>
struct SearchResult<V, false> {
+ SearchResult() {}
+ explicit SearchResult(V value) : value(value) {}
+ SearchResult(V value, MatchKind /*match*/) : value(value) {}
+
V value;
static constexpr bool HasMatch() { return false; }
@@ -403,7 +450,6 @@ struct SearchResult<V, false> {
template <typename Params>
class btree_node {
using is_key_compare_to = typename Params::is_key_compare_to;
- using is_multi_container = typename Params::is_multi_container;
using field_type = typename Params::node_count_type;
using allocator_type = typename Params::allocator_type;
using slot_type = typename Params::slot_type;
@@ -421,15 +467,22 @@ class btree_node {
using difference_type = typename Params::difference_type;
// Btree decides whether to use linear node search as follows:
+ // - If the comparator expresses a preference, use that.
+ // - If the key expresses a preference, use that.
// - If the key is arithmetic and the comparator is std::less or
// std::greater, choose linear.
// - Otherwise, choose binary.
// TODO(ezb): Might make sense to add condition(s) based on node-size.
using use_linear_search = std::integral_constant<
bool,
- std::is_arithmetic<key_type>::value &&
- (std::is_same<std::less<key_type>, key_compare>::value ||
- std::is_same<std::greater<key_type>, key_compare>::value)>;
+ has_linear_node_search_preference<key_compare>::value
+ ? prefers_linear_node_search<key_compare>::value
+ : has_linear_node_search_preference<key_type>::value
+ ? prefers_linear_node_search<key_type>::value
+ : std::is_arithmetic<key_type>::value &&
+ (std::is_same<std::less<key_type>, key_compare>::value ||
+ std::is_same<std::greater<key_type>,
+ key_compare>::value)>;
// This class is organized by gtl::Layout as if it had the following
// structure:
@@ -446,23 +499,23 @@ class btree_node {
// // is the same as the count of values.
// field_type finish;
// // The maximum number of values the node can hold. This is an integer in
- // // [1, kNodeValues] for root leaf nodes, kNodeValues for non-root leaf
+ // // [1, kNodeSlots] for root leaf nodes, kNodeSlots for non-root leaf
// // nodes, and kInternalNodeMaxCount (as a sentinel value) for internal
- // // nodes (even though there are still kNodeValues values in the node).
+ // // nodes (even though there are still kNodeSlots values in the node).
// // TODO(ezb): make max_count use only 4 bits and record log2(capacity)
// // to free extra bits for is_root, etc.
// field_type max_count;
//
// // The array of values. The capacity is `max_count` for leaf nodes and
- // // kNodeValues for internal nodes. Only the values in
+ // // kNodeSlots for internal nodes. Only the values in
// // [start, finish) have been initialized and are valid.
// slot_type values[max_count];
//
// // The array of child pointers. The keys in children[i] are all less
// // than key(i). The keys in children[i + 1] are all greater than key(i).
- // // There are 0 children for leaf nodes and kNodeValues + 1 children for
+ // // There are 0 children for leaf nodes and kNodeSlots + 1 children for
// // internal nodes.
- // btree_node *children[kNodeValues + 1];
+ // btree_node *children[kNodeSlots + 1];
//
// This class is only constructed by EmptyNodeType. Normally, pointers to the
// layout above are allocated, cast to btree_node*, and de-allocated within
@@ -484,57 +537,62 @@ class btree_node {
private:
using layout_type = absl::container_internal::Layout<btree_node *, field_type,
slot_type, btree_node *>;
- constexpr static size_type SizeWithNValues(size_type n) {
+ constexpr static size_type SizeWithNSlots(size_type n) {
return layout_type(/*parent*/ 1,
/*position, start, finish, max_count*/ 4,
- /*values*/ n,
+ /*slots*/ n,
/*children*/ 0)
.AllocSize();
}
// A lower bound for the overhead of fields other than values in a leaf node.
constexpr static size_type MinimumOverhead() {
- return SizeWithNValues(1) - sizeof(value_type);
+ return SizeWithNSlots(1) - sizeof(value_type);
}
// Compute how many values we can fit onto a leaf node taking into account
// padding.
- constexpr static size_type NodeTargetValues(const int begin, const int end) {
+ constexpr static size_type NodeTargetSlots(const int begin, const int end) {
return begin == end ? begin
- : SizeWithNValues((begin + end) / 2 + 1) >
+ : SizeWithNSlots((begin + end) / 2 + 1) >
params_type::kTargetNodeSize
- ? NodeTargetValues(begin, (begin + end) / 2)
- : NodeTargetValues((begin + end) / 2 + 1, end);
+ ? NodeTargetSlots(begin, (begin + end) / 2)
+ : NodeTargetSlots((begin + end) / 2 + 1, end);
}
enum {
kTargetNodeSize = params_type::kTargetNodeSize,
- kNodeTargetValues = NodeTargetValues(0, params_type::kTargetNodeSize),
+ kNodeTargetSlots = NodeTargetSlots(0, params_type::kTargetNodeSize),
- // We need a minimum of 3 values per internal node in order to perform
+ // We need a minimum of 3 slots per internal node in order to perform
// splitting (1 value for the two nodes involved in the split and 1 value
- // propagated to the parent as the delimiter for the split).
- kNodeValues = kNodeTargetValues >= 3 ? kNodeTargetValues : 3,
+ // propagated to the parent as the delimiter for the split). For performance
+ // reasons, we don't allow 3 slots-per-node due to bad worst case occupancy
+ // of 1/3 (for a node, not a b-tree).
+ kMinNodeSlots = 4,
+
+ kNodeSlots =
+ kNodeTargetSlots >= kMinNodeSlots ? kNodeTargetSlots : kMinNodeSlots,
// The node is internal (i.e. is not a leaf node) if and only if `max_count`
// has this value.
kInternalNodeMaxCount = 0,
};
- // Leaves can have less than kNodeValues values.
- constexpr static layout_type LeafLayout(const int max_values = kNodeValues) {
+ // Leaves can have less than kNodeSlots values.
+ constexpr static layout_type LeafLayout(const int slot_count = kNodeSlots) {
return layout_type(/*parent*/ 1,
/*position, start, finish, max_count*/ 4,
- /*values*/ max_values,
+ /*slots*/ slot_count,
/*children*/ 0);
}
constexpr static layout_type InternalLayout() {
return layout_type(/*parent*/ 1,
/*position, start, finish, max_count*/ 4,
- /*values*/ kNodeValues,
- /*children*/ kNodeValues + 1);
+ /*slots*/ kNodeSlots,
+ /*children*/ kNodeSlots + 1);
}
- constexpr static size_type LeafSize(const int max_values = kNodeValues) {
- return LeafLayout(max_values).AllocSize();
+ constexpr static size_type LeafSize(const int slot_count = kNodeSlots) {
+ return LeafLayout(slot_count).AllocSize();
}
constexpr static size_type InternalSize() {
return InternalLayout().AllocSize();
@@ -591,10 +649,10 @@ class btree_node {
}
field_type max_count() const {
// Internal nodes have max_count==kInternalNodeMaxCount.
- // Leaf nodes have max_count in [1, kNodeValues].
+ // Leaf nodes have max_count in [1, kNodeSlots].
const field_type max_count = GetField<1>()[3];
return max_count == field_type{kInternalNodeMaxCount}
- ? field_type{kNodeValues}
+ ? field_type{kNodeSlots}
: max_count;
}
@@ -672,7 +730,7 @@ class btree_node {
}
++s;
}
- return {s};
+ return SearchResult<int, false>{s};
}
// Returns the position of the first value whose key is not less than k using
@@ -707,7 +765,7 @@ class btree_node {
e = mid;
}
}
- return {s};
+ return SearchResult<int, false>{s};
}
// Returns the position of the first value whose key is not less than k using
@@ -716,7 +774,7 @@ class btree_node {
SearchResult<int, true> binary_search_impl(
const K &k, int s, int e, const CompareTo &comp,
std::true_type /* IsCompareTo */) const {
- if (is_multi_container::value) {
+ if (params_type::template can_have_multiple_equivalent_keys<K>()) {
MatchKind exact_match = MatchKind::kNe;
while (s != e) {
const int mid = (s + e) >> 1;
@@ -727,14 +785,14 @@ class btree_node {
e = mid;
if (c == 0) {
// Need to return the first value whose key is not less than k,
- // which requires continuing the binary search if this is a
- // multi-container.
+ // which requires continuing the binary search if there could be
+ // multiple equivalent keys.
exact_match = MatchKind::kEq;
}
}
}
return {s, exact_match};
- } else { // Not a multi-container.
+ } else { // Can't have multiple equivalent keys.
while (s != e) {
const int mid = (s + e) >> 1;
const absl::weak_ordering c = comp(key(mid), k);
@@ -784,12 +842,12 @@ class btree_node {
start_slot(), max_count * sizeof(slot_type));
}
void init_internal(btree_node *parent) {
- init_leaf(parent, kNodeValues);
+ init_leaf(parent, kNodeSlots);
// Set `max_count` to a sentinel value to indicate that this node is
// internal.
set_max_count(kInternalNodeMaxCount);
absl::container_internal::SanitizerPoisonMemoryRegion(
- &mutable_child(start()), (kNodeValues + 1) * sizeof(btree_node *));
+ &mutable_child(start()), (kNodeSlots + 1) * sizeof(btree_node *));
}
static void deallocate(const size_type size, btree_node *node,
@@ -800,12 +858,6 @@ class btree_node {
// Deletes a node and all of its children.
static void clear_and_delete(btree_node *node, allocator_type *alloc);
- public:
- // Exposed only for tests.
- static bool testonly_uses_linear_node_search() {
- return use_linear_search::value;
- }
-
private:
template <typename... Args>
void value_init(const field_type i, allocator_type *alloc, Args &&... args) {
@@ -873,6 +925,7 @@ struct btree_iterator {
using key_type = typename Node::key_type;
using size_type = typename Node::size_type;
using params_type = typename Node::params_type;
+ using is_map_container = typename params_type::is_map_container;
using node_type = Node;
using normal_node = typename std::remove_const<Node>::type;
@@ -884,7 +937,7 @@ struct btree_iterator {
using slot_type = typename params_type::slot_type;
using iterator =
- btree_iterator<normal_node, normal_reference, normal_pointer>;
+ btree_iterator<normal_node, normal_reference, normal_pointer>;
using const_iterator =
btree_iterator<const_node, const_reference, const_pointer>;
@@ -901,20 +954,19 @@ struct btree_iterator {
btree_iterator(Node *n, int p) : node(n), position(p) {}
// NOTE: this SFINAE allows for implicit conversions from iterator to
- // const_iterator, but it specifically avoids defining copy constructors so
- // that btree_iterator can be trivially copyable. This is for performance and
- // binary size reasons.
+ // const_iterator, but it specifically avoids hiding the copy constructor so
+ // that the trivial one will be used when possible.
template <typename N, typename R, typename P,
absl::enable_if_t<
std::is_same<btree_iterator<N, R, P>, iterator>::value &&
std::is_same<btree_iterator, const_iterator>::value,
int> = 0>
- btree_iterator(const btree_iterator<N, R, P> &other) // NOLINT
+ btree_iterator(const btree_iterator<N, R, P> other) // NOLINT
: node(other.node), position(other.position) {}
private:
// This SFINAE allows explicit conversions from const_iterator to
- // iterator, but also avoids defining a copy constructor.
+ // iterator, but also avoids hiding the copy constructor.
// NOTE: the const_cast is safe because this constructor is only called by
// non-const methods and the container owns the nodes.
template <typename N, typename R, typename P,
@@ -922,7 +974,7 @@ struct btree_iterator {
std::is_same<btree_iterator<N, R, P>, const_iterator>::value &&
std::is_same<btree_iterator, iterator>::value,
int> = 0>
- explicit btree_iterator(const btree_iterator<N, R, P> &other)
+ explicit btree_iterator(const btree_iterator<N, R, P> other)
: node(const_cast<node_type *>(other.node)), position(other.position) {}
// Increment/decrement the iterator.
@@ -985,6 +1037,8 @@ struct btree_iterator {
}
private:
+ friend iterator;
+ friend const_iterator;
template <typename Params>
friend class btree;
template <typename Tree>
@@ -995,8 +1049,6 @@ struct btree_iterator {
friend class btree_map_container;
template <typename Tree>
friend class btree_multiset_container;
- template <typename N, typename R, typename P>
- friend struct btree_iterator;
template <typename TreeType, typename CheckerType>
friend class base_checker;
@@ -1017,8 +1069,6 @@ class btree {
using is_key_compare_to = typename Params::is_key_compare_to;
using init_type = typename Params::init_type;
using field_type = typename node_type::field_type;
- using is_multi_container = typename Params::is_multi_container;
- using is_key_compare_adapted = typename Params::is_key_compare_adapted;
// We use a static empty node for the root/leftmost/rightmost of empty btrees
// in order to avoid branching in begin()/end().
@@ -1054,8 +1104,8 @@ class btree {
}
enum : uint32_t {
- kNodeValues = node_type::kNodeValues,
- kMinNodeValues = kNodeValues / 2,
+ kNodeSlots = node_type::kNodeSlots,
+ kMinNodeValues = kNodeSlots / 2,
};
struct node_stats {
@@ -1085,7 +1135,8 @@ class btree {
using const_reference = typename Params::const_reference;
using pointer = typename Params::pointer;
using const_pointer = typename Params::const_pointer;
- using iterator = btree_iterator<node_type, reference, pointer>;
+ using iterator =
+ typename btree_iterator<node_type, reference, pointer>::iterator;
using const_iterator = typename iterator::const_iterator;
using reverse_iterator = std::reverse_iterator<iterator>;
using const_reverse_iterator = std::reverse_iterator<const_iterator>;
@@ -1098,28 +1149,46 @@ class btree {
private:
// For use in copy_or_move_values_in_order.
const value_type &maybe_move_from_iterator(const_iterator it) { return *it; }
- value_type &&maybe_move_from_iterator(iterator it) { return std::move(*it); }
+ value_type &&maybe_move_from_iterator(iterator it) {
+ // This is a destructive operation on the other container so it's safe for
+ // us to const_cast and move from the keys here even if it's a set.
+ return std::move(const_cast<value_type &>(*it));
+ }
// Copies or moves (depending on the template parameter) the values in
// other into this btree in their order in other. This btree must be empty
// before this method is called. This method is used in copy construction,
// copy assignment, and move assignment.
template <typename Btree>
- void copy_or_move_values_in_order(Btree *other);
+ void copy_or_move_values_in_order(Btree &other);
// Validates that various assumptions/requirements are true at compile time.
constexpr static bool static_assert_validation();
public:
- btree(const key_compare &comp, const allocator_type &alloc);
+ btree(const key_compare &comp, const allocator_type &alloc)
+ : root_(comp, alloc, EmptyNode()), rightmost_(EmptyNode()), size_(0) {}
- btree(const btree &other);
+ btree(const btree &other) : btree(other, other.allocator()) {}
+ btree(const btree &other, const allocator_type &alloc)
+ : btree(other.key_comp(), alloc) {
+ copy_or_move_values_in_order(other);
+ }
btree(btree &&other) noexcept
: root_(std::move(other.root_)),
rightmost_(absl::exchange(other.rightmost_, EmptyNode())),
size_(absl::exchange(other.size_, 0)) {
other.mutable_root() = EmptyNode();
}
+ btree(btree &&other, const allocator_type &alloc)
+ : btree(other.key_comp(), alloc) {
+ if (alloc == other.allocator()) {
+ swap(other);
+ } else {
+ // Move values from `other` one at a time when allocators are different.
+ copy_or_move_values_in_order(other);
+ }
+ }
~btree() {
// Put static_asserts in destructor to avoid triggering them before the type
@@ -1147,17 +1216,22 @@ class btree {
return const_reverse_iterator(begin());
}
- // Finds the first element whose key is not less than key.
+ // Finds the first element whose key is not less than `key`.
template <typename K>
iterator lower_bound(const K &key) {
- return internal_end(internal_lower_bound(key));
+ return internal_end(internal_lower_bound(key).value);
}
template <typename K>
const_iterator lower_bound(const K &key) const {
- return internal_end(internal_lower_bound(key));
+ return internal_end(internal_lower_bound(key).value);
}
- // Finds the first element whose key is greater than key.
+ // Finds the first element whose key is not less than `key` and also returns
+ // whether that element is equal to `key`.
+ template <typename K>
+ std::pair<iterator, bool> lower_bound_equal(const K &key) const;
+
+ // Finds the first element whose key is greater than `key`.
template <typename K>
iterator upper_bound(const K &key) {
return internal_end(internal_upper_bound(key));
@@ -1239,18 +1313,8 @@ class btree {
// to the element after the last erased element.
std::pair<size_type, iterator> erase_range(iterator begin, iterator end);
- // Erases the specified key from the btree. Returns 1 if an element was
- // erased and 0 otherwise.
- template <typename K>
- size_type erase_unique(const K &key);
-
- // Erases all of the entries matching the specified key from the
- // btree. Returns the number of elements erased.
- template <typename K>
- size_type erase_multi(const K &key);
-
- // Finds the iterator corresponding to a key or returns end() if the key is
- // not present.
+ // Finds an element with key equivalent to `key` or returns `end()` if `key`
+ // is not present.
template <typename K>
iterator find(const K &key) {
return internal_end(internal_find(key));
@@ -1260,23 +1324,6 @@ class btree {
return internal_end(internal_find(key));
}
- // Returns a count of the number of times the key appears in the btree.
- template <typename K>
- size_type count_unique(const K &key) const {
- const iterator begin = internal_find(key);
- if (begin.node == nullptr) {
- // The key doesn't exist in the tree.
- return 0;
- }
- return 1;
- }
- // Returns a count of the number of times the key appears in the btree.
- template <typename K>
- size_type count_multi(const K &key) const {
- const auto range = equal_range(key);
- return std::distance(range.first, range.second);
- }
-
// Clear the btree, deleting all of the values it contains.
void clear();
@@ -1339,12 +1386,14 @@ class btree {
}
}
- // The average number of bytes used per value stored in the btree.
+ // The average number of bytes used per value stored in the btree assuming
+ // random insertion order.
static double average_bytes_per_value() {
- // Returns the number of bytes per value on a leaf node that is 75%
- // full. Experimentally, this matches up nicely with the computed number of
- // bytes per value in trees that had their values inserted in random order.
- return node_type::LeafSize() / (kNodeValues * 0.75);
+ // The expected number of values per node with random insertion order is the
+ // average of the maximum and minimum numbers of values per node.
+ const double expected_values_per_node =
+ (kNodeSlots + kMinNodeValues) / 2.0;
+ return node_type::LeafSize() / expected_values_per_node;
}
// The fullness of the btree. Computed as the number of elements in the btree
@@ -1354,7 +1403,7 @@ class btree {
// Returns 0 for empty trees.
double fullness() const {
if (empty()) return 0.0;
- return static_cast<double>(size()) / (nodes() * kNodeValues);
+ return static_cast<double>(size()) / (nodes() * kNodeSlots);
}
// The overhead of the btree structure in bytes per node. Computed as the
// total number of bytes used by the btree minus the number of bytes used for
@@ -1404,7 +1453,7 @@ class btree {
}
node_type *new_leaf_node(node_type *parent) {
node_type *n = allocate(node_type::LeafSize());
- n->init_leaf(parent, kNodeValues);
+ n->init_leaf(parent, kNodeSlots);
return n;
}
node_type *new_leaf_root_node(const int max_count) {
@@ -1453,28 +1502,19 @@ class btree {
static IterType internal_last(IterType iter);
// Returns an iterator pointing to the leaf position at which key would
- // reside in the tree. We provide 2 versions of internal_locate. The first
- // version uses a less-than comparator and is incapable of distinguishing when
- // there is an exact match. The second version is for the key-compare-to
- // specialization and distinguishes exact matches. The key-compare-to
- // specialization allows the caller to avoid a subsequent comparison to
- // determine if an exact match was made, which is important for keys with
- // expensive comparison, such as strings.
+ // reside in the tree, unless there is an exact match - in which case, the
+ // result may not be on a leaf. When there's a three-way comparator, we can
+ // return whether there was an exact match. This allows the caller to avoid a
+ // subsequent comparison to determine if an exact match was made, which is
+ // important for keys with expensive comparison, such as strings.
template <typename K>
SearchResult<iterator, is_key_compare_to::value> internal_locate(
const K &key) const;
- template <typename K>
- SearchResult<iterator, false> internal_locate_impl(
- const K &key, std::false_type /* IsCompareTo */) const;
-
- template <typename K>
- SearchResult<iterator, true> internal_locate_impl(
- const K &key, std::true_type /* IsCompareTo */) const;
-
// Internal routine which implements lower_bound().
template <typename K>
- iterator internal_lower_bound(const K &key) const;
+ SearchResult<iterator, is_key_compare_to::value> internal_lower_bound(
+ const K &key) const;
// Internal routine which implements upper_bound().
template <typename K>
@@ -1503,13 +1543,6 @@ class btree {
return res;
}
- public:
- // Exposed only for tests.
- static bool testonly_uses_linear_node_search() {
- return node_type::testonly_uses_linear_node_search();
- }
-
- private:
// We use compressed tuple in order to save space because key_compare and
// allocator_type are usually empty.
absl::container_internal::CompressedTuple<key_compare, allocator_type,
@@ -1665,7 +1698,7 @@ template <typename P>
void btree_node<P>::split(const int insert_position, btree_node *dest,
allocator_type *alloc) {
assert(dest->count() == 0);
- assert(max_count() == kNodeValues);
+ assert(max_count() == kNodeSlots);
// We bias the split based on the position being inserted. If we're
// inserting at the beginning of the left node then bias the split to put
@@ -1673,7 +1706,7 @@ void btree_node<P>::split(const int insert_position, btree_node *dest,
// right node then bias the split to put more values on the left node.
if (insert_position == start()) {
dest->set_finish(dest->start() + finish() - 1);
- } else if (insert_position == kNodeValues) {
+ } else if (insert_position == kNodeSlots) {
dest->set_finish(dest->start());
} else {
dest->set_finish(dest->start() + count() / 2);
@@ -1744,7 +1777,7 @@ void btree_node<P>::clear_and_delete(btree_node *node, allocator_type *alloc) {
// Navigate to the leftmost leaf under node, and then delete upwards.
while (!node->leaf()) node = node->start_child();
- // Use `int` because `pos` needs to be able to hold `kNodeValues+1`, which
+ // Use `int` because `pos` needs to be able to hold `kNodeSlots+1`, which
// isn't guaranteed to be a valid `field_type`.
int pos = node->position();
btree_node *parent = node->parent();
@@ -1832,7 +1865,7 @@ void btree_iterator<N, R, P>::decrement_slow() {
// btree methods
template <typename P>
template <typename Btree>
-void btree<P>::copy_or_move_values_in_order(Btree *other) {
+void btree<P>::copy_or_move_values_in_order(Btree &other) {
static_assert(std::is_same<btree, Btree>::value ||
std::is_same<const btree, Btree>::value,
"Btree type must be same or const.");
@@ -1840,11 +1873,11 @@ void btree<P>::copy_or_move_values_in_order(Btree *other) {
// We can avoid key comparisons because we know the order of the
// values is the same order we'll store them in.
- auto iter = other->begin();
- if (iter == other->end()) return;
+ auto iter = other.begin();
+ if (iter == other.end()) return;
insert_multi(maybe_move_from_iterator(iter));
++iter;
- for (; iter != other->end(); ++iter) {
+ for (; iter != other.end(); ++iter) {
// If the btree is not empty, we can just insert the new value at the end
// of the tree.
internal_emplace(end(), maybe_move_from_iterator(iter));
@@ -1863,7 +1896,7 @@ constexpr bool btree<P>::static_assert_validation() {
// Note: We assert that kTargetValues, which is computed from
// Params::kTargetNodeSize, must fit the node_type::field_type.
static_assert(
- kNodeValues < (1 << (8 * sizeof(typename node_type::field_type))),
+ kNodeSlots < (1 << (8 * sizeof(typename node_type::field_type))),
"target node size too large");
// Verify that key_compare returns an absl::{weak,strong}_ordering or bool.
@@ -1883,31 +1916,29 @@ constexpr bool btree<P>::static_assert_validation() {
}
template <typename P>
-btree<P>::btree(const key_compare &comp, const allocator_type &alloc)
- : root_(comp, alloc, EmptyNode()), rightmost_(EmptyNode()), size_(0) {}
-
-template <typename P>
-btree<P>::btree(const btree &other)
- : btree(other.key_comp(), other.allocator()) {
- copy_or_move_values_in_order(&other);
+template <typename K>
+auto btree<P>::lower_bound_equal(const K &key) const
+ -> std::pair<iterator, bool> {
+ const SearchResult<iterator, is_key_compare_to::value> res =
+ internal_lower_bound(key);
+ const iterator lower = iterator(internal_end(res.value));
+ const bool equal = res.HasMatch()
+ ? res.IsEq()
+ : lower != end() && !compare_keys(key, lower.key());
+ return {lower, equal};
}
template <typename P>
template <typename K>
auto btree<P>::equal_range(const K &key) -> std::pair<iterator, iterator> {
- const iterator lower = lower_bound(key);
- // TODO(ezb): we should be able to avoid this comparison when there's a
- // three-way comparator.
- if (lower == end() || compare_keys(key, lower.key())) return {lower, lower};
+ const std::pair<iterator, bool> lower_and_equal = lower_bound_equal(key);
+ const iterator lower = lower_and_equal.first;
+ if (!lower_and_equal.second) {
+ return {lower, lower};
+ }
const iterator next = std::next(lower);
- // When the comparator is heterogeneous, we can't assume that comparison with
- // non-`key_type` will be equivalent to `key_type` comparisons so there
- // could be multiple equivalent keys even in a unique-container. But for
- // heterogeneous comparisons from the default string adapted comparators, we
- // don't need to worry about this.
- if (!is_multi_container::value &&
- (std::is_same<K, key_type>::value || is_key_compare_adapted::value)) {
+ if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
// The next iterator after lower must point to a key greater than `key`.
// Note: if this assert fails, then it may indicate that the comparator does
// not meet the equivalence requirements for Compare
@@ -1918,7 +1949,7 @@ auto btree<P>::equal_range(const K &key) -> std::pair<iterator, iterator> {
// Try once more to avoid the call to upper_bound() if there's only one
// equivalent key. This should prevent all calls to upper_bound() in cases of
// unique-containers with heterogeneous comparators in which all comparison
- // operators are equivalent.
+ // operators have the same equivalence classes.
if (next == end() || compare_keys(key, next.key())) return {lower, next};
// In this case, we need to call upper_bound() to avoid worst case O(N)
@@ -1934,8 +1965,8 @@ auto btree<P>::insert_unique(const K &key, Args &&... args)
mutable_root() = rightmost_ = new_leaf_root_node(1);
}
- auto res = internal_locate(key);
- iterator &iter = res.value;
+ SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
+ iterator iter = res.value;
if (res.HasMatch()) {
if (res.IsEq()) {
@@ -2049,7 +2080,7 @@ auto btree<P>::operator=(const btree &other) -> btree & {
*mutable_allocator() = other.allocator();
}
- copy_or_move_values_in_order(&other);
+ copy_or_move_values_in_order(other);
}
return *this;
}
@@ -2079,7 +2110,7 @@ auto btree<P>::operator=(btree &&other) noexcept -> btree & {
// comparator while moving the values so we can't swap the key
// comparators.
*mutable_key_comp() = other.key_comp();
- copy_or_move_values_in_order(&other);
+ copy_or_move_values_in_order(other);
}
}
}
@@ -2203,31 +2234,6 @@ auto btree<P>::erase_range(iterator begin, iterator end)
}
template <typename P>
-template <typename K>
-auto btree<P>::erase_unique(const K &key) -> size_type {
- const iterator iter = internal_find(key);
- if (iter.node == nullptr) {
- // The key doesn't exist in the tree, return nothing done.
- return 0;
- }
- erase(iter);
- return 1;
-}
-
-template <typename P>
-template <typename K>
-auto btree<P>::erase_multi(const K &key) -> size_type {
- const iterator begin = internal_lower_bound(key);
- if (begin.node == nullptr) {
- // The key doesn't exist in the tree, return nothing done.
- return 0;
- }
- // Delete all of the keys between begin and upper_bound(key).
- const iterator end = internal_end(internal_upper_bound(key));
- return erase_range(begin, end).first;
-}
-
-template <typename P>
void btree<P>::clear() {
if (!empty()) {
node_type::clear_and_delete(root(), mutable_allocator());
@@ -2271,7 +2277,7 @@ void btree<P>::rebalance_or_split(iterator *iter) {
node_type *&node = iter->node;
int &insert_position = iter->position;
assert(node->count() == node->max_count());
- assert(kNodeValues == node->max_count());
+ assert(kNodeSlots == node->max_count());
// First try to make room on the node by rebalancing.
node_type *parent = node->parent();
@@ -2279,17 +2285,17 @@ void btree<P>::rebalance_or_split(iterator *iter) {
if (node->position() > parent->start()) {
// Try rebalancing with our left sibling.
node_type *left = parent->child(node->position() - 1);
- assert(left->max_count() == kNodeValues);
- if (left->count() < kNodeValues) {
+ assert(left->max_count() == kNodeSlots);
+ if (left->count() < kNodeSlots) {
// We bias rebalancing based on the position being inserted. If we're
// inserting at the end of the right node then we bias rebalancing to
// fill up the left node.
- int to_move = (kNodeValues - left->count()) /
- (1 + (insert_position < kNodeValues));
+ int to_move = (kNodeSlots - left->count()) /
+ (1 + (insert_position < static_cast<int>(kNodeSlots)));
to_move = (std::max)(1, to_move);
if (insert_position - to_move >= node->start() ||
- left->count() + to_move < kNodeValues) {
+ left->count() + to_move < static_cast<int>(kNodeSlots)) {
left->rebalance_right_to_left(to_move, node, mutable_allocator());
assert(node->max_count() - node->count() == to_move);
@@ -2308,17 +2314,17 @@ void btree<P>::rebalance_or_split(iterator *iter) {
if (node->position() < parent->finish()) {
// Try rebalancing with our right sibling.
node_type *right = parent->child(node->position() + 1);
- assert(right->max_count() == kNodeValues);
- if (right->count() < kNodeValues) {
+ assert(right->max_count() == kNodeSlots);
+ if (right->count() < kNodeSlots) {
// We bias rebalancing based on the position being inserted. If we're
// inserting at the beginning of the left node then we bias rebalancing
// to fill up the right node.
- int to_move = (kNodeValues - right->count()) /
+ int to_move = (static_cast<int>(kNodeSlots) - right->count()) /
(1 + (insert_position > node->start()));
to_move = (std::max)(1, to_move);
if (insert_position <= node->finish() - to_move ||
- right->count() + to_move < kNodeValues) {
+ right->count() + to_move < static_cast<int>(kNodeSlots)) {
node->rebalance_left_to_right(to_move, right, mutable_allocator());
if (insert_position > node->finish()) {
@@ -2334,8 +2340,8 @@ void btree<P>::rebalance_or_split(iterator *iter) {
// Rebalancing failed, make sure there is room on the parent node for a new
// value.
- assert(parent->max_count() == kNodeValues);
- if (parent->count() == kNodeValues) {
+ assert(parent->max_count() == kNodeSlots);
+ if (parent->count() == kNodeSlots) {
iterator parent_iter(node->parent(), node->position());
rebalance_or_split(&parent_iter);
}
@@ -2380,8 +2386,8 @@ bool btree<P>::try_merge_or_rebalance(iterator *iter) {
if (iter->node->position() > parent->start()) {
// Try merging with our left sibling.
node_type *left = parent->child(iter->node->position() - 1);
- assert(left->max_count() == kNodeValues);
- if (1 + left->count() + iter->node->count() <= kNodeValues) {
+ assert(left->max_count() == kNodeSlots);
+ if (1U + left->count() + iter->node->count() <= kNodeSlots) {
iter->position += 1 + left->count();
merge_nodes(left, iter->node);
iter->node = left;
@@ -2391,8 +2397,8 @@ bool btree<P>::try_merge_or_rebalance(iterator *iter) {
if (iter->node->position() < parent->finish()) {
// Try merging with our right sibling.
node_type *right = parent->child(iter->node->position() + 1);
- assert(right->max_count() == kNodeValues);
- if (1 + iter->node->count() + right->count() <= kNodeValues) {
+ assert(right->max_count() == kNodeSlots);
+ if (1U + iter->node->count() + right->count() <= kNodeSlots) {
merge_nodes(iter->node, right);
return true;
}
@@ -2473,12 +2479,12 @@ inline auto btree<P>::internal_emplace(iterator iter, Args &&... args)
allocator_type *alloc = mutable_allocator();
if (iter.node->count() == max_count) {
// Make room in the leaf for the new item.
- if (max_count < kNodeValues) {
+ if (max_count < kNodeSlots) {
// Insertion into the root where the root is smaller than the full node
// size. Simply grow the size of the root node.
assert(iter.node == root());
iter.node =
- new_leaf_root_node((std::min<int>)(kNodeValues, 2 * max_count));
+ new_leaf_root_node((std::min<int>)(kNodeSlots, 2 * max_count));
// Transfer the values from the old root to the new root.
node_type *old_root = root();
node_type *new_root = iter.node;
@@ -2501,61 +2507,51 @@ template <typename P>
template <typename K>
inline auto btree<P>::internal_locate(const K &key) const
-> SearchResult<iterator, is_key_compare_to::value> {
- return internal_locate_impl(key, is_key_compare_to());
-}
-
-template <typename P>
-template <typename K>
-inline auto btree<P>::internal_locate_impl(
- const K &key, std::false_type /* IsCompareTo */) const
- -> SearchResult<iterator, false> {
- iterator iter(const_cast<node_type *>(root()));
- for (;;) {
- iter.position = iter.node->lower_bound(key, key_comp()).value;
- // NOTE: we don't need to walk all the way down the tree if the keys are
- // equal, but determining equality would require doing an extra comparison
- // on each node on the way down, and we will need to go all the way to the
- // leaf node in the expected case.
- if (iter.node->leaf()) {
- break;
- }
- iter.node = iter.node->child(iter.position);
- }
- return {iter};
-}
-
-template <typename P>
-template <typename K>
-inline auto btree<P>::internal_locate_impl(
- const K &key, std::true_type /* IsCompareTo */) const
- -> SearchResult<iterator, true> {
iterator iter(const_cast<node_type *>(root()));
for (;;) {
- SearchResult<int, true> res = iter.node->lower_bound(key, key_comp());
+ SearchResult<int, is_key_compare_to::value> res =
+ iter.node->lower_bound(key, key_comp());
iter.position = res.value;
- if (res.match == MatchKind::kEq) {
+ if (res.IsEq()) {
return {iter, MatchKind::kEq};
}
+ // Note: in the non-key-compare-to case, we don't need to walk all the way
+ // down the tree if the keys are equal, but determining equality would
+ // require doing an extra comparison on each node on the way down, and we
+ // will need to go all the way to the leaf node in the expected case.
if (iter.node->leaf()) {
break;
}
iter.node = iter.node->child(iter.position);
}
+ // Note: in the non-key-compare-to case, the key may actually be equivalent
+ // here (and the MatchKind::kNe is ignored).
return {iter, MatchKind::kNe};
}
template <typename P>
template <typename K>
-auto btree<P>::internal_lower_bound(const K &key) const -> iterator {
+auto btree<P>::internal_lower_bound(const K &key) const
+ -> SearchResult<iterator, is_key_compare_to::value> {
+ if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
+ SearchResult<iterator, is_key_compare_to::value> ret = internal_locate(key);
+ ret.value = internal_last(ret.value);
+ return ret;
+ }
iterator iter(const_cast<node_type *>(root()));
+ SearchResult<int, is_key_compare_to::value> res;
+ bool seen_eq = false;
for (;;) {
- iter.position = iter.node->lower_bound(key, key_comp()).value;
+ res = iter.node->lower_bound(key, key_comp());
+ iter.position = res.value;
if (iter.node->leaf()) {
break;
}
+ seen_eq = seen_eq || res.IsEq();
iter.node = iter.node->child(iter.position);
}
- return internal_last(iter);
+ if (res.IsEq()) return {iter, MatchKind::kEq};
+ return {internal_last(iter), seen_eq ? MatchKind::kEq : MatchKind::kNe};
}
template <typename P>
@@ -2575,7 +2571,7 @@ auto btree<P>::internal_upper_bound(const K &key) const -> iterator {
template <typename P>
template <typename K>
auto btree<P>::internal_find(const K &key) const -> iterator {
- auto res = internal_locate(key);
+ SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
if (res.HasMatch()) {
if (res.IsEq()) {
return res.value;