summaryrefslogtreecommitdiff
path: root/absl/base
diff options
context:
space:
mode:
authorGravatar Abseil Team <absl-team@google.com>2018-08-23 10:41:14 -0700
committerGravatar Derek Mauro <dmauro@google.com>2018-08-23 13:48:48 -0400
commit5e7d459eeca7bc53deab0ee9634601386b53d7c0 (patch)
treed4f394901eaf492da175e6ae92c195235a0fc9ab /absl/base
parentbed5bd6e185c7e0311f3a1f2dab4c96083dac636 (diff)
Export of internal Abseil changes.
-- 2dcf3ec79f961d2962cdad33ac4bfbcb14126dad by Abseil Team <absl-team@google.com>: Consolidate implementations of CountLeadingZeros and add new ones for CountTrailingZeros. Internal APIs only. PiperOrigin-RevId: 209961247 -- 4f3ac87c6928dab26faff962bb18aade9383f184 by Jorg Brown <jorg@google.com>: Fix absl::CUnescape not to write to const_cast<char*>(str->data()), which does the wrong thing if the string type is copy-on-write PiperOrigin-RevId: 209957656 -- c5103067be19dc88a4c32e5306154e5e3bba4673 by Tom Manshreck <shreck@google.com>: Update comments in time library header files. PiperOrigin-RevId: 209829588 -- fe36f9b6150243d2ac88e2a892d84c565c18cd2f by Abseil Team <absl-team@google.com>: Clarifying the sample use of absl::LoadTimeZone function in the documentation. PiperOrigin-RevId: 209782218 GitOrigin-RevId: 2dcf3ec79f961d2962cdad33ac4bfbcb14126dad Change-Id: I3e2f87a8c543599b81eada58409a9bddc0b51ab8
Diffstat (limited to 'absl/base')
-rw-r--r--absl/base/BUILD.bazel20
-rw-r--r--absl/base/CMakeLists.txt1
-rw-r--r--absl/base/internal/bits.h193
-rw-r--r--absl/base/internal/bits_test.cc97
4 files changed, 311 insertions, 0 deletions
diff --git a/absl/base/BUILD.bazel b/absl/base/BUILD.bazel
index 7063f96a..83d48f6b 100644
--- a/absl/base/BUILD.bazel
+++ b/absl/base/BUILD.bazel
@@ -427,3 +427,23 @@ cc_test(
"@com_github_google_benchmark//:benchmark_main",
],
)
+
+cc_library(
+ name = "bits",
+ hdrs = ["internal/bits.h"],
+ visibility = [
+ "//absl:__subpackages__",
+ ],
+ deps = [":core_headers"],
+)
+
+cc_test(
+ name = "bits_test",
+ size = "small",
+ srcs = ["internal/bits_test.cc"],
+ copts = ABSL_TEST_COPTS,
+ deps = [
+ ":bits",
+ "@com_google_googletest//:gtest_main",
+ ],
+)
diff --git a/absl/base/CMakeLists.txt b/absl/base/CMakeLists.txt
index 01d2af08..04a6eb31 100644
--- a/absl/base/CMakeLists.txt
+++ b/absl/base/CMakeLists.txt
@@ -31,6 +31,7 @@ list(APPEND BASE_PUBLIC_HEADERS
list(APPEND BASE_INTERNAL_HEADERS
"internal/atomic_hook.h"
+ "internal/bits.h"
"internal/cycleclock.h"
"internal/direct_mmap.h"
"internal/endian.h"
diff --git a/absl/base/internal/bits.h b/absl/base/internal/bits.h
new file mode 100644
index 00000000..bc7faaee
--- /dev/null
+++ b/absl/base/internal/bits.h
@@ -0,0 +1,193 @@
+// Copyright 2018 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_BASE_INTERNAL_BITS_H_
+#define ABSL_BASE_INTERNAL_BITS_H_
+
+// This file contains bitwise ops which are implementation details of various
+// absl libraries.
+
+#include <cstdint>
+
+// Clang on Windows has __builtin_clzll; otherwise we need to use the
+// windows intrinsic functions.
+#if defined(_MSC_VER)
+#include <intrin.h>
+#if defined(_M_X64)
+#pragma intrinsic(_BitScanReverse64)
+#pragma intrinsic(_BitScanForward64)
+#endif
+#pragma intrinsic(_BitScanReverse)
+#pragma intrinsic(_BitScanForward)
+#endif
+
+#include "absl/base/attributes.h"
+
+#if defined(_MSC_VER)
+// We can achieve something similar to attribute((always_inline)) with MSVC by
+// using the __forceinline keyword, however this is not perfect. MSVC is
+// much less aggressive about inlining, and even with the __forceinline keyword.
+#define ABSL_BASE_INTERNAL_FORCEINLINE __forceinline
+#else
+// Use default attribute inline.
+#define ABSL_BASE_INTERNAL_FORCEINLINE inline ABSL_ATTRIBUTE_ALWAYS_INLINE
+#endif
+
+
+namespace absl {
+namespace base_internal {
+
+ABSL_BASE_INTERNAL_FORCEINLINE int CountLeadingZeros64Slow(uint64_t n) {
+ int zeroes = 60;
+ if (n >> 32) zeroes -= 32, n >>= 32;
+ if (n >> 16) zeroes -= 16, n >>= 16;
+ if (n >> 8) zeroes -= 8, n >>= 8;
+ if (n >> 4) zeroes -= 4, n >>= 4;
+ return "\4\3\2\2\1\1\1\1\0\0\0\0\0\0\0"[n] + zeroes;
+}
+
+ABSL_BASE_INTERNAL_FORCEINLINE int CountLeadingZeros64(uint64_t n) {
+#if defined(_MSC_VER) && defined(_M_X64)
+ // MSVC does not have __buitin_clzll. Use _BitScanReverse64.
+ unsigned long result = 0; // NOLINT(runtime/int)
+ if (_BitScanReverse64(&result, n)) {
+ return 63 - result;
+ }
+ return 64;
+#elif defined(_MSC_VER)
+ // MSVC does not have __buitin_clzll. Compose two calls to _BitScanReverse
+ unsigned long result = 0; // NOLINT(runtime/int)
+ if ((n >> 32) && _BitScanReverse(&result, n >> 32)) {
+ return 31 - result;
+ }
+ if (_BitScanReverse(&result, n)) {
+ return 63 - result;
+ }
+ return 64;
+#elif defined(__GNUC__)
+ // Use __builtin_clzll, which uses the following instructions:
+ // x86: bsr
+ // ARM64: clz
+ // PPC: cntlzd
+ static_assert(sizeof(unsigned long long) == sizeof(n), // NOLINT(runtime/int)
+ "__builtin_clzll does not take 64-bit arg");
+
+ // Handle 0 as a special case because __builtin_clzll(0) is undefined.
+ if (n == 0) {
+ return 64;
+ }
+ return __builtin_clzll(n);
+#else
+ return CountLeadingZeros64Slow(n);
+#endif
+}
+
+ABSL_BASE_INTERNAL_FORCEINLINE int CountLeadingZeros32Slow(uint64_t n) {
+ int zeroes = 28;
+ if (n >> 16) zeroes -= 16, n >>= 16;
+ if (n >> 8) zeroes -= 8, n >>= 8;
+ if (n >> 4) zeroes -= 4, n >>= 4;
+ return "\4\3\2\2\1\1\1\1\0\0\0\0\0\0\0"[n] + zeroes;
+}
+
+ABSL_BASE_INTERNAL_FORCEINLINE int CountLeadingZeros32(uint32_t n) {
+#if defined(_MSC_VER)
+ unsigned long result = 0; // NOLINT(runtime/int)
+ if (_BitScanReverse(&result, n)) {
+ return 31 - result;
+ }
+ return 32;
+#elif defined(__GNUC__)
+ // Use __builtin_clz, which uses the following instructions:
+ // x86: bsr
+ // ARM64: clz
+ // PPC: cntlzd
+ static_assert(sizeof(int) == sizeof(n),
+ "__builtin_clz does not take 32-bit arg");
+
+ // Handle 0 as a special case because __builtin_clz(0) is undefined.
+ if (n == 0) {
+ return 32;
+ }
+ return __builtin_clz(n);
+#else
+ return CountLeadingZeros32Slow(n);
+#endif
+}
+
+ABSL_BASE_INTERNAL_FORCEINLINE int CountTrailingZerosNonZero64Slow(uint64_t n) {
+ int c = 63;
+ n &= ~n + 1;
+ if (n & 0x00000000FFFFFFFF) c -= 32;
+ if (n & 0x0000FFFF0000FFFF) c -= 16;
+ if (n & 0x00FF00FF00FF00FF) c -= 8;
+ if (n & 0x0F0F0F0F0F0F0F0F) c -= 4;
+ if (n & 0x3333333333333333) c -= 2;
+ if (n & 0x5555555555555555) c -= 1;
+ return c;
+}
+
+ABSL_BASE_INTERNAL_FORCEINLINE int CountTrailingZerosNonZero64(uint64_t n) {
+#if defined(_MSC_VER) && defined(_M_X64)
+ unsigned long result = 0; // NOLINT(runtime/int)
+ _BitScanForward64(&result, n);
+ return result;
+#elif defined(_MSC_VER)
+ unsigned long result = 0; // NOLINT(runtime/int)
+ if (static_cast<uint32_t>(n) == 0) {
+ _BitScanForward(&result, n >> 32);
+ return result + 32;
+ }
+ _BitScanForward(&result, n);
+ return result;
+#elif defined(__GNUC__)
+ static_assert(sizeof(unsigned long long) == sizeof(n), // NOLINT(runtime/int)
+ "__builtin_ctzll does not take 64-bit arg");
+ return __builtin_ctzll(n);
+#else
+ return CountTrailingZerosNonZero64Slow(n);
+#endif
+}
+
+ABSL_BASE_INTERNAL_FORCEINLINE int CountTrailingZerosNonZero32Slow(uint32_t n) {
+ int c = 31;
+ n &= ~n + 1;
+ if (n & 0x0000FFFF) c -= 16;
+ if (n & 0x00FF00FF) c -= 8;
+ if (n & 0x0F0F0F0F) c -= 4;
+ if (n & 0x33333333) c -= 2;
+ if (n & 0x55555555) c -= 1;
+ return c;
+}
+
+ABSL_BASE_INTERNAL_FORCEINLINE int CountTrailingZerosNonZero32(uint32_t n) {
+#if defined(_MSC_VER)
+ unsigned long result = 0; // NOLINT(runtime/int)
+ _BitScanForward(&result, n);
+ return result;
+#elif defined(__GNUC__)
+ static_assert(sizeof(int) == sizeof(n),
+ "__builtin_ctz does not take 32-bit arg");
+ return __builtin_ctz(n);
+#else
+ return CountTrailingZerosNonZero32Slow(n);
+#endif
+}
+
+#undef ABSL_BASE_INTERNAL_FORCEINLINE
+
+} // namespace base_internal
+} // namespace absl
+
+#endif // ABSL_BASE_INTERNAL_BITS_H_
diff --git a/absl/base/internal/bits_test.cc b/absl/base/internal/bits_test.cc
new file mode 100644
index 00000000..e5d991d6
--- /dev/null
+++ b/absl/base/internal/bits_test.cc
@@ -0,0 +1,97 @@
+// Copyright 2018 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/base/internal/bits.h"
+
+#include "gtest/gtest.h"
+
+namespace {
+
+int CLZ64(uint64_t n) {
+ int fast = absl::base_internal::CountLeadingZeros64(n);
+ int slow = absl::base_internal::CountLeadingZeros64Slow(n);
+ EXPECT_EQ(fast, slow) << n;
+ return fast;
+}
+
+TEST(BitsTest, CountLeadingZeros64) {
+ EXPECT_EQ(64, CLZ64(uint64_t{}));
+ EXPECT_EQ(0, CLZ64(~uint64_t{}));
+
+ for (int index = 0; index < 64; index++) {
+ uint64_t x = static_cast<uint64_t>(1) << index;
+ const auto cnt = 63 - index;
+ ASSERT_EQ(cnt, CLZ64(x)) << index;
+ ASSERT_EQ(cnt, CLZ64(x + x - 1)) << index;
+ }
+}
+
+int CLZ32(uint32_t n) {
+ int fast = absl::base_internal::CountLeadingZeros32(n);
+ int slow = absl::base_internal::CountLeadingZeros32Slow(n);
+ EXPECT_EQ(fast, slow) << n;
+ return fast;
+}
+
+TEST(BitsTest, CountLeadingZeros32) {
+ EXPECT_EQ(32, CLZ32(uint32_t{}));
+ EXPECT_EQ(0, CLZ32(~uint32_t{}));
+
+ for (int index = 0; index < 32; index++) {
+ uint32_t x = static_cast<uint32_t>(1) << index;
+ const auto cnt = 31 - index;
+ ASSERT_EQ(cnt, CLZ32(x)) << index;
+ ASSERT_EQ(cnt, CLZ32(x + x - 1)) << index;
+ ASSERT_EQ(CLZ64(x), CLZ32(x) + 32);
+ }
+}
+
+int CTZ64(uint64_t n) {
+ int fast = absl::base_internal::CountTrailingZerosNonZero64(n);
+ int slow = absl::base_internal::CountTrailingZerosNonZero64Slow(n);
+ EXPECT_EQ(fast, slow) << n;
+ return fast;
+}
+
+TEST(BitsTest, CountTrailingZerosNonZero64) {
+ EXPECT_EQ(0, CTZ64(~uint64_t{}));
+
+ for (int index = 0; index < 64; index++) {
+ uint64_t x = static_cast<uint64_t>(1) << index;
+ const auto cnt = index;
+ ASSERT_EQ(cnt, CTZ64(x)) << index;
+ ASSERT_EQ(cnt, CTZ64(~(x - 1))) << index;
+ }
+}
+
+int CTZ32(uint32_t n) {
+ int fast = absl::base_internal::CountTrailingZerosNonZero32(n);
+ int slow = absl::base_internal::CountTrailingZerosNonZero32Slow(n);
+ EXPECT_EQ(fast, slow) << n;
+ return fast;
+}
+
+TEST(BitsTest, CountTrailingZerosNonZero32) {
+ EXPECT_EQ(0, CTZ32(~uint32_t{}));
+
+ for (int index = 0; index < 32; index++) {
+ uint32_t x = static_cast<uint32_t>(1) << index;
+ const auto cnt = index;
+ ASSERT_EQ(cnt, CTZ32(x)) << index;
+ ASSERT_EQ(cnt, CTZ32(~(x - 1))) << index;
+ }
+}
+
+
+} // namespace