diff options
author | Abseil Team <absl-team@google.com> | 2022-07-28 07:45:06 -0700 |
---|---|---|
committer | Copybara-Service <copybara-worker@google.com> | 2022-07-28 07:46:07 -0700 |
commit | 7f51ef5ed2740dab2bbf53c4dd5931b6e8ec6a5b (patch) | |
tree | 72096ff69ed2b4dac6db4e1bcc5d85d3ecb0ef8d /absl/container | |
parent | c7e60ccfcd708a73008ed2df040162c66697bc18 (diff) |
Fix "unsafe narrowing" warnings in absl, 1/n.
Addresses failures with the following, in some files:
-Wshorten-64-to-32
-Wimplicit-int-conversion
-Wsign-compare
-Wsign-conversion
-Wtautological-unsigned-zero-compare
(This specific CL focuses on .h and win32 .inc files.)
Bug: chromium:1292951
PiperOrigin-RevId: 463835431
Change-Id: If8e5f7f651d5cd96035e23e4623bdb08a7fedabe
Diffstat (limited to 'absl/container')
-rw-r--r-- | absl/container/internal/hashtablez_sampler.cc | 9 | ||||
-rw-r--r-- | absl/container/internal/hashtablez_sampler.h | 6 | ||||
-rw-r--r-- | absl/container/internal/inlined_vector.h | 6 | ||||
-rw-r--r-- | absl/container/internal/raw_hash_set.h | 11 |
4 files changed, 16 insertions, 16 deletions
diff --git a/absl/container/internal/hashtablez_sampler.cc b/absl/container/internal/hashtablez_sampler.cc index efc1be58..5b8cf341 100644 --- a/absl/container/internal/hashtablez_sampler.cc +++ b/absl/container/internal/hashtablez_sampler.cc @@ -215,21 +215,20 @@ void SetHashtablezSampleParameterInternal(int32_t rate) { } } -int32_t GetHashtablezMaxSamples() { +size_t GetHashtablezMaxSamples() { return GlobalHashtablezSampler().GetMaxSamples(); } -void SetHashtablezMaxSamples(int32_t max) { +void SetHashtablezMaxSamples(size_t max) { SetHashtablezMaxSamplesInternal(max); TriggerHashtablezConfigListener(); } -void SetHashtablezMaxSamplesInternal(int32_t max) { +void SetHashtablezMaxSamplesInternal(size_t max) { if (max > 0) { GlobalHashtablezSampler().SetMaxSamples(max); } else { - ABSL_RAW_LOG(ERROR, "Invalid hashtablez max samples: %lld", - static_cast<long long>(max)); // NOLINT(runtime/int) + ABSL_RAW_LOG(ERROR, "Invalid hashtablez max samples: 0"); } } diff --git a/absl/container/internal/hashtablez_sampler.h b/absl/container/internal/hashtablez_sampler.h index d4016d8a..a89518bb 100644 --- a/absl/container/internal/hashtablez_sampler.h +++ b/absl/container/internal/hashtablez_sampler.h @@ -281,9 +281,9 @@ void SetHashtablezSampleParameter(int32_t rate); void SetHashtablezSampleParameterInternal(int32_t rate); // Sets a soft max for the number of samples that will be kept. -int32_t GetHashtablezMaxSamples(); -void SetHashtablezMaxSamples(int32_t max); -void SetHashtablezMaxSamplesInternal(int32_t max); +size_t GetHashtablezMaxSamples(); +void SetHashtablezMaxSamples(size_t max); +void SetHashtablezMaxSamplesInternal(size_t max); // Configuration override. // This allows process-wide sampling without depending on order of diff --git a/absl/container/internal/inlined_vector.h b/absl/container/internal/inlined_vector.h index 54c92a01..741821fa 100644 --- a/absl/container/internal/inlined_vector.h +++ b/absl/container/internal/inlined_vector.h @@ -784,9 +784,9 @@ auto Storage<T, N, A>::Erase(ConstIterator<A> from, ConstIterator<A> to) -> Iterator<A> { StorageView<A> storage_view = MakeStorageView(); - SizeType<A> erase_size = std::distance(from, to); - SizeType<A> erase_index = - std::distance(ConstIterator<A>(storage_view.data), from); + auto erase_size = static_cast<SizeType<A>>(std::distance(from, to)); + auto erase_index = static_cast<SizeType<A>>( + std::distance(ConstIterator<A>(storage_view.data), from)); SizeType<A> erase_end_index = erase_index + erase_size; IteratorValueAdapter<A, MoveIterator<A>> move_values( diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h index 8f00f097..b8118cd2 100644 --- a/absl/container/internal/raw_hash_set.h +++ b/absl/container/internal/raw_hash_set.h @@ -545,7 +545,7 @@ struct GroupSse2Impl { // Returns a bitmask representing the positions of slots that match hash. BitMask<uint32_t, kWidth> Match(h2_t hash) const { - auto match = _mm_set1_epi8(hash); + auto match = _mm_set1_epi8(static_cast<char>(hash)); return BitMask<uint32_t, kWidth>( static_cast<uint32_t>(_mm_movemask_epi8(_mm_cmpeq_epi8(match, ctrl)))); } @@ -557,7 +557,7 @@ struct GroupSse2Impl { return NonIterableBitMask<uint32_t, kWidth>( static_cast<uint32_t>(_mm_movemask_epi8(_mm_sign_epi8(ctrl, ctrl)))); #else - auto match = _mm_set1_epi8(static_cast<h2_t>(ctrl_t::kEmpty)); + auto match = _mm_set1_epi8(static_cast<char>(ctrl_t::kEmpty)); return NonIterableBitMask<uint32_t, kWidth>( static_cast<uint32_t>(_mm_movemask_epi8(_mm_cmpeq_epi8(match, ctrl)))); #endif @@ -565,14 +565,14 @@ struct GroupSse2Impl { // Returns a bitmask representing the positions of empty or deleted slots. NonIterableBitMask<uint32_t, kWidth> MaskEmptyOrDeleted() const { - auto special = _mm_set1_epi8(static_cast<uint8_t>(ctrl_t::kSentinel)); + auto special = _mm_set1_epi8(static_cast<char>(ctrl_t::kSentinel)); return NonIterableBitMask<uint32_t, kWidth>(static_cast<uint32_t>( _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)))); } // Returns the number of trailing empty or deleted elements in the group. uint32_t CountLeadingEmptyOrDeleted() const { - auto special = _mm_set1_epi8(static_cast<uint8_t>(ctrl_t::kSentinel)); + auto special = _mm_set1_epi8(static_cast<char>(ctrl_t::kSentinel)); return TrailingZeros(static_cast<uint32_t>( _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)) + 1)); } @@ -635,7 +635,8 @@ struct GroupAArch64Impl { // Clang and GCC optimize countr_zero to rbit+clz without any check for 0, // so we should be fine. constexpr uint64_t bits = 0x0101010101010101ULL; - return countr_zero((mask | ~(mask >> 7)) & bits) >> 3; + return static_cast<uint32_t>(countr_zero((mask | ~(mask >> 7)) & bits) >> + 3); } void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const { |