summaryrefslogtreecommitdiff
path: root/absl/container/internal
diff options
context:
space:
mode:
authorGravatar Vitaly Goldshteyn <goldvitaly@google.com>2024-03-04 13:12:48 -0800
committerGravatar Copybara-Service <copybara-worker@google.com>2024-03-04 13:13:36 -0800
commit8dc90ff07402cd027daec520bb77f46e51855889 (patch)
treebd9f651e2316b649dd24f4be2206af5590251e6a /absl/container/internal
parent59daf188bcced05bbac02d71894e5ba4021597a6 (diff)
Extract `InsertPosition` function to be able to reuse it.
PiperOrigin-RevId: 612560213 Change-Id: Id75dfd1222a0bed8ec72ce21e4a97b1d09fc9eaa
Diffstat (limited to 'absl/container/internal')
-rw-r--r--absl/container/internal/raw_hash_set.cc5
-rw-r--r--absl/container/internal/raw_hash_set.h35
2 files changed, 27 insertions, 13 deletions
diff --git a/absl/container/internal/raw_hash_set.cc b/absl/container/internal/raw_hash_set.cc
index 02301e19..efb3d6f3 100644
--- a/absl/container/internal/raw_hash_set.cc
+++ b/absl/container/internal/raw_hash_set.cc
@@ -104,10 +104,11 @@ bool CommonFieldsGenerationInfoEnabled::should_rehash_for_bug_detection_on_move(
return ShouldRehashForBugDetection(ctrl, capacity);
}
-bool ShouldInsertBackwards(size_t hash, const ctrl_t* ctrl) {
+bool ShouldInsertBackwardsForDebug(size_t capacity, size_t hash,
+ const ctrl_t* ctrl) {
// To avoid problems with weak hashes and single bit tests, we use % 13.
// TODO(kfm,sbenza): revisit after we do unconditional mixing
- return (H1(hash, ctrl) ^ RandomSeed()) % 13 > 6;
+ return !is_small(capacity) && (H1(hash, ctrl) ^ RandomSeed()) % 13 > 6;
}
void ConvertDeletedToEmptyAndFullToDeleted(ctrl_t* ctrl, size_t capacity) {
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index 07ff79af..514f21ae 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -546,7 +546,27 @@ inline bool IsEmptyGeneration(const GenerationType* generation) {
// Mixes a randomly generated per-process seed with `hash` and `ctrl` to
// randomize insertion order within groups.
-bool ShouldInsertBackwards(size_t hash, const ctrl_t* ctrl);
+bool ShouldInsertBackwardsForDebug(size_t capacity, size_t hash,
+ const ctrl_t* ctrl);
+
+// Returns insert position for the given mask.
+// We want to add entropy even when ASLR is not enabled.
+// In debug build we will randomly insert in either the front or back of
+// the group.
+// TODO(kfm,sbenza): revisit after we do unconditional mixing
+template <class Mask>
+ABSL_ATTRIBUTE_ALWAYS_INLINE inline auto GetInsertionOffset(
+ Mask mask, ABSL_ATTRIBUTE_UNUSED size_t capacity,
+ ABSL_ATTRIBUTE_UNUSED size_t hash,
+ ABSL_ATTRIBUTE_UNUSED const ctrl_t* ctrl) {
+#if defined(NDEBUG)
+ return mask.LowestBitSet();
+#else
+ return ShouldInsertBackwardsForDebug(capacity, hash, ctrl)
+ ? mask.HighestBitSet()
+ : mask.LowestBitSet();
+#endif
+}
// Returns a per-table, hash salt, which changes on resize. This gets mixed into
// H1 to randomize iteration order per-table.
@@ -1495,16 +1515,9 @@ inline FindInfo find_first_non_full(const CommonFields& common, size_t hash) {
GroupFullEmptyOrDeleted g{ctrl + seq.offset()};
auto mask = g.MaskEmptyOrDeleted();
if (mask) {
-#if !defined(NDEBUG)
- // We want to add entropy even when ASLR is not enabled.
- // In debug build we will randomly insert in either the front or back of
- // the group.
- // TODO(kfm,sbenza): revisit after we do unconditional mixing
- if (!is_small(common.capacity()) && ShouldInsertBackwards(hash, ctrl)) {
- return {seq.offset(mask.HighestBitSet()), seq.index()};
- }
-#endif
- return {seq.offset(mask.LowestBitSet()), seq.index()};
+ return {
+ seq.offset(GetInsertionOffset(mask, common.capacity(), hash, ctrl)),
+ seq.index()};
}
seq.next();
assert(seq.index() <= common.capacity() && "full table!");