summaryrefslogtreecommitdiff
path: root/absl/hash/internal/low_level_hash.cc
diff options
context:
space:
mode:
Diffstat (limited to 'absl/hash/internal/low_level_hash.cc')
-rw-r--r--absl/hash/internal/low_level_hash.cc12
1 files changed, 12 insertions, 0 deletions
diff --git a/absl/hash/internal/low_level_hash.cc b/absl/hash/internal/low_level_hash.cc
index 856bbd9b..6f9cb9c7 100644
--- a/absl/hash/internal/low_level_hash.cc
+++ b/absl/hash/internal/low_level_hash.cc
@@ -15,6 +15,7 @@
#include "absl/hash/internal/low_level_hash.h"
#include "absl/base/internal/unaligned_access.h"
+#include "absl/numeric/bits.h"
#include "absl/numeric/int128.h"
namespace absl {
@@ -22,9 +23,20 @@ ABSL_NAMESPACE_BEGIN
namespace hash_internal {
static uint64_t Mix(uint64_t v0, uint64_t v1) {
+#if !defined(__aarch64__)
+ // The default bit-mixer uses 64x64->128-bit multiplication.
absl::uint128 p = v0;
p *= v1;
return absl::Uint128Low64(p) ^ absl::Uint128High64(p);
+#else
+ // The default bit-mixer above would perform poorly on some ARM microarchs,
+ // where calculating a 128-bit product requires a sequence of two
+ // instructions with a high combined latency and poor throughput.
+ // Instead, we mix bits using only 64-bit arithmetic, which is faster.
+ uint64_t p = v0 ^ absl::rotl(v1, 40);
+ p *= v1 ^ absl::rotl(v0, 39);
+ return p ^ (p >> 11);
+#endif
}
uint64_t LowLevelHash(const void* data, size_t len, uint64_t seed,