From 55621d1af55be7bf9a2c8e5dcef4368d9a6d4e51 Mon Sep 17 00:00:00 2001 From: Dmitri Gribenko Date: Mon, 11 Dec 2023 09:07:07 -0800 Subject: Add nullability annotations PiperOrigin-RevId: 589842893 Change-Id: I9657761d1f71c665582406f278c6605f6d382f6d --- absl/base/BUILD.bazel | 2 ++ absl/base/CMakeLists.txt | 2 ++ absl/base/call_once.h | 24 +++++++++++++++--------- absl/base/internal/endian.h | 25 +++++++++++++------------ absl/base/internal/unaligned_access.h | 19 +++++++++++++------ 5 files changed, 45 insertions(+), 27 deletions(-) (limited to 'absl/base') diff --git a/absl/base/BUILD.bazel b/absl/base/BUILD.bazel index 60d05a83..06bd4950 100644 --- a/absl/base/BUILD.bazel +++ b/absl/base/BUILD.bazel @@ -287,6 +287,7 @@ cc_library( ":cycleclock_internal", ":dynamic_annotations", ":log_severity", + ":nullability", ":raw_logging_internal", ":spinlock_wait", "//absl/meta:type_traits", @@ -549,6 +550,7 @@ cc_library( ":base", ":config", ":core_headers", + ":nullability", ], ) diff --git a/absl/base/CMakeLists.txt b/absl/base/CMakeLists.txt index 9ca5cf8b..4cfc2285 100644 --- a/absl/base/CMakeLists.txt +++ b/absl/base/CMakeLists.txt @@ -247,6 +247,7 @@ absl_cc_library( absl::core_headers absl::dynamic_annotations absl::log_severity + absl::nullability absl::raw_logging_internal absl::spinlock_wait absl::type_traits @@ -475,6 +476,7 @@ absl_cc_library( absl::base absl::config absl::core_headers + absl::nullability PUBLIC ) diff --git a/absl/base/call_once.h b/absl/base/call_once.h index 08436bac..7b0e69cc 100644 --- a/absl/base/call_once.h +++ b/absl/base/call_once.h @@ -37,6 +37,7 @@ #include "absl/base/internal/scheduling_mode.h" #include "absl/base/internal/spinlock_wait.h" #include "absl/base/macros.h" +#include "absl/base/nullability.h" #include "absl/base/optimization.h" #include "absl/base/port.h" @@ -46,7 +47,8 @@ ABSL_NAMESPACE_BEGIN class once_flag; namespace base_internal { -std::atomic* ControlWord(absl::once_flag* flag); +absl::Nonnull*> ControlWord( + absl::Nonnull flag); } // namespace base_internal // call_once() @@ -89,7 +91,8 @@ class once_flag { once_flag& operator=(const once_flag&) = delete; private: - friend std::atomic* base_internal::ControlWord(once_flag* flag); + friend absl::Nonnull*> base_internal::ControlWord( + absl::Nonnull flag); std::atomic control_; }; @@ -103,7 +106,8 @@ namespace base_internal { // Like call_once, but uses KERNEL_ONLY scheduling. Intended to be used to // initialize entities used by the scheduler implementation. template -void LowLevelCallOnce(absl::once_flag* flag, Callable&& fn, Args&&... args); +void LowLevelCallOnce(absl::Nonnull flag, Callable&& fn, + Args&&... args); // Disables scheduling while on stack when scheduling mode is non-cooperative. // No effect for cooperative scheduling modes. @@ -143,10 +147,10 @@ enum { }; template -ABSL_ATTRIBUTE_NOINLINE -void CallOnceImpl(std::atomic* control, - base_internal::SchedulingMode scheduling_mode, Callable&& fn, - Args&&... args) { +ABSL_ATTRIBUTE_NOINLINE void CallOnceImpl( + absl::Nonnull*> control, + base_internal::SchedulingMode scheduling_mode, Callable&& fn, + Args&&... args) { #ifndef NDEBUG { uint32_t old_control = control->load(std::memory_order_relaxed); @@ -185,12 +189,14 @@ void CallOnceImpl(std::atomic* control, } // else *control is already kOnceDone } -inline std::atomic* ControlWord(once_flag* flag) { +inline absl::Nonnull*> ControlWord( + absl::Nonnull flag) { return &flag->control_; } template -void LowLevelCallOnce(absl::once_flag* flag, Callable&& fn, Args&&... args) { +void LowLevelCallOnce(absl::Nonnull flag, Callable&& fn, + Args&&... args) { std::atomic* once = base_internal::ControlWord(flag); uint32_t s = once->load(std::memory_order_acquire); if (ABSL_PREDICT_FALSE(s != base_internal::kOnceDone)) { diff --git a/absl/base/internal/endian.h b/absl/base/internal/endian.h index 50747d75..943f3d97 100644 --- a/absl/base/internal/endian.h +++ b/absl/base/internal/endian.h @@ -22,6 +22,7 @@ #include "absl/base/casts.h" #include "absl/base/config.h" #include "absl/base/internal/unaligned_access.h" +#include "absl/base/nullability.h" #include "absl/base/port.h" namespace absl { @@ -160,27 +161,27 @@ inline int64_t ToHost(int64_t x) { } // Functions to do unaligned loads and stores in little-endian order. -inline uint16_t Load16(const void *p) { +inline uint16_t Load16(absl::Nonnull p) { return ToHost16(ABSL_INTERNAL_UNALIGNED_LOAD16(p)); } -inline void Store16(void *p, uint16_t v) { +inline void Store16(absl::Nonnull p, uint16_t v) { ABSL_INTERNAL_UNALIGNED_STORE16(p, FromHost16(v)); } -inline uint32_t Load32(const void *p) { +inline uint32_t Load32(absl::Nonnull p) { return ToHost32(ABSL_INTERNAL_UNALIGNED_LOAD32(p)); } -inline void Store32(void *p, uint32_t v) { +inline void Store32(absl::Nonnull p, uint32_t v) { ABSL_INTERNAL_UNALIGNED_STORE32(p, FromHost32(v)); } -inline uint64_t Load64(const void *p) { +inline uint64_t Load64(absl::Nonnull p) { return ToHost64(ABSL_INTERNAL_UNALIGNED_LOAD64(p)); } -inline void Store64(void *p, uint64_t v) { +inline void Store64(absl::Nonnull p, uint64_t v) { ABSL_INTERNAL_UNALIGNED_STORE64(p, FromHost64(v)); } @@ -250,27 +251,27 @@ inline int64_t ToHost(int64_t x) { } // Functions to do unaligned loads and stores in big-endian order. -inline uint16_t Load16(const void *p) { +inline uint16_t Load16(absl::Nonnull p) { return ToHost16(ABSL_INTERNAL_UNALIGNED_LOAD16(p)); } -inline void Store16(void *p, uint16_t v) { +inline void Store16(absl::Nonnull p, uint16_t v) { ABSL_INTERNAL_UNALIGNED_STORE16(p, FromHost16(v)); } -inline uint32_t Load32(const void *p) { +inline uint32_t Load32(absl::Nonnull p) { return ToHost32(ABSL_INTERNAL_UNALIGNED_LOAD32(p)); } -inline void Store32(void *p, uint32_t v) { +inline void Store32(absl::Nonnullp, uint32_t v) { ABSL_INTERNAL_UNALIGNED_STORE32(p, FromHost32(v)); } -inline uint64_t Load64(const void *p) { +inline uint64_t Load64(absl::Nonnull p) { return ToHost64(ABSL_INTERNAL_UNALIGNED_LOAD64(p)); } -inline void Store64(void *p, uint64_t v) { +inline void Store64(absl::Nonnull p, uint64_t v) { ABSL_INTERNAL_UNALIGNED_STORE64(p, FromHost64(v)); } diff --git a/absl/base/internal/unaligned_access.h b/absl/base/internal/unaligned_access.h index 093dd9b4..4fea4574 100644 --- a/absl/base/internal/unaligned_access.h +++ b/absl/base/internal/unaligned_access.h @@ -23,6 +23,7 @@ #include "absl/base/attributes.h" #include "absl/base/config.h" +#include "absl/base/nullability.h" // unaligned APIs @@ -35,29 +36,35 @@ namespace absl { ABSL_NAMESPACE_BEGIN namespace base_internal { -inline uint16_t UnalignedLoad16(const void *p) { +inline uint16_t UnalignedLoad16(absl::Nonnull p) { uint16_t t; memcpy(&t, p, sizeof t); return t; } -inline uint32_t UnalignedLoad32(const void *p) { +inline uint32_t UnalignedLoad32(absl::Nonnull p) { uint32_t t; memcpy(&t, p, sizeof t); return t; } -inline uint64_t UnalignedLoad64(const void *p) { +inline uint64_t UnalignedLoad64(absl::Nonnull p) { uint64_t t; memcpy(&t, p, sizeof t); return t; } -inline void UnalignedStore16(void *p, uint16_t v) { memcpy(p, &v, sizeof v); } +inline void UnalignedStore16(absl::Nonnull p, uint16_t v) { + memcpy(p, &v, sizeof v); +} -inline void UnalignedStore32(void *p, uint32_t v) { memcpy(p, &v, sizeof v); } +inline void UnalignedStore32(absl::Nonnull p, uint32_t v) { + memcpy(p, &v, sizeof v); +} -inline void UnalignedStore64(void *p, uint64_t v) { memcpy(p, &v, sizeof v); } +inline void UnalignedStore64(absl::Nonnull p, uint64_t v) { + memcpy(p, &v, sizeof v); +} } // namespace base_internal ABSL_NAMESPACE_END -- cgit v1.2.3