aboutsummaryrefslogtreecommitdiffhomepage
path: root/absl/base/internal/low_level_alloc.cc
diff options
context:
space:
mode:
Diffstat (limited to 'absl/base/internal/low_level_alloc.cc')
-rw-r--r--absl/base/internal/low_level_alloc.cc16
1 files changed, 8 insertions, 8 deletions
diff --git a/absl/base/internal/low_level_alloc.cc b/absl/base/internal/low_level_alloc.cc
index 64d7aa8..f7314ab 100644
--- a/absl/base/internal/low_level_alloc.cc
+++ b/absl/base/internal/low_level_alloc.cc
@@ -211,7 +211,7 @@ struct LowLevelAlloc::Arena {
// Result of sysconf(_SC_PAGESIZE)
const size_t pagesize;
// Lowest power of two >= max(16, sizeof(AllocList))
- const size_t roundup;
+ const size_t round_up;
// Smallest allocation block size
const size_t min_size;
// PRNG state
@@ -336,11 +336,11 @@ size_t GetPageSize() {
size_t RoundedUpBlockSize() {
// Round up block sizes to a power of two close to the header size.
- size_t roundup = 16;
- while (roundup < sizeof(AllocList::Header)) {
- roundup += roundup;
+ size_t round_up = 16;
+ while (round_up < sizeof(AllocList::Header)) {
+ round_up += round_up;
}
- return roundup;
+ return round_up;
}
} // namespace
@@ -350,8 +350,8 @@ LowLevelAlloc::Arena::Arena(uint32_t flags_value)
allocation_count(0),
flags(flags_value),
pagesize(GetPageSize()),
- roundup(RoundedUpBlockSize()),
- min_size(2 * roundup),
+ round_up(RoundedUpBlockSize()),
+ min_size(2 * round_up),
random(0) {
freelist.header.size = 0;
freelist.header.magic =
@@ -528,7 +528,7 @@ static void *DoAllocWithArena(size_t request, LowLevelAlloc::Arena *arena) {
ArenaLock section(arena);
// round up with header
size_t req_rnd = RoundUp(CheckedAdd(request, sizeof (s->header)),
- arena->roundup);
+ arena->round_up);
for (;;) { // loop until we find a suitable region
// find the minimum levels that a block of this size must have
int i = LLA_SkiplistLevels(req_rnd, arena->min_size, nullptr) - 1;