diff options
author | Mark D. Roth <roth@google.com> | 2018-01-23 08:09:56 -0800 |
---|---|---|
committer | GitHub <noreply@github.com> | 2018-01-23 08:09:56 -0800 |
commit | ac0808b107d73613191b66617a547a201871a845 (patch) | |
tree | 26df11a9e0508ee96504971bdee7a4e0102d73be /src/core/lib | |
parent | 20e7074e4101b4fdbae1764caa952301b38957c4 (diff) | |
parent | 62569dd9785e59a6b87a5002d09aeec1c39f541b (diff) |
Merge pull request #14093 from markdroth/arena_alignment
Fix arena to return aligned memory.
Diffstat (limited to 'src/core/lib')
-rw-r--r-- | src/core/lib/gpr/alloc.cc | 4 | ||||
-rw-r--r-- | src/core/lib/gpr/arena.cc | 31 |
2 files changed, 27 insertions, 8 deletions
diff --git a/src/core/lib/gpr/alloc.cc b/src/core/lib/gpr/alloc.cc index 518bdb99f7..000b7dcb25 100644 --- a/src/core/lib/gpr/alloc.cc +++ b/src/core/lib/gpr/alloc.cc @@ -90,8 +90,8 @@ void* gpr_realloc(void* p, size_t size) { return p; } -void* gpr_malloc_aligned(size_t size, size_t alignment_log) { - size_t alignment = ((size_t)1) << alignment_log; +void* gpr_malloc_aligned(size_t size, size_t alignment) { + GPR_ASSERT(((alignment - 1) & alignment) == 0); // Must be power of 2. size_t extra = alignment - 1 + sizeof(void*); void* p = gpr_malloc(size + extra); void** ret = (void**)(((uintptr_t)p + extra) & ~(alignment - 1)); diff --git a/src/core/lib/gpr/arena.cc b/src/core/lib/gpr/arena.cc index 177c176732..687592a140 100644 --- a/src/core/lib/gpr/arena.cc +++ b/src/core/lib/gpr/arena.cc @@ -17,11 +17,19 @@ */ #include "src/core/lib/gpr/arena.h" + +#include <string.h> + #include <grpc/support/alloc.h> #include <grpc/support/atm.h> #include <grpc/support/log.h> #include <grpc/support/useful.h> +// TODO(roth): We currently assume that all callers need alignment of 16 +// bytes, which may be wrong in some cases. As part of converting the +// arena API to C++, we should consider replacing gpr_arena_alloc() with a +// template that takes the type of the value being allocated, which +// would allow us to use the alignment actually needed by the caller. #define ROUND_UP_TO_ALIGNMENT_SIZE(x) \ (((x) + GPR_MAX_ALIGNMENT - 1u) & ~(GPR_MAX_ALIGNMENT - 1u)) @@ -36,9 +44,16 @@ struct gpr_arena { zone initial_zone; }; +static void* zalloc_aligned(size_t size) { + void* ptr = gpr_malloc_aligned(size, GPR_MAX_ALIGNMENT); + memset(ptr, 0, size); + return ptr; +} + gpr_arena* gpr_arena_create(size_t initial_size) { initial_size = ROUND_UP_TO_ALIGNMENT_SIZE(initial_size); - gpr_arena* a = (gpr_arena*)gpr_zalloc(sizeof(gpr_arena) + initial_size); + gpr_arena* a = (gpr_arena*)zalloc_aligned( + ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena)) + initial_size); a->initial_zone.size_end = initial_size; return a; } @@ -46,10 +61,10 @@ gpr_arena* gpr_arena_create(size_t initial_size) { size_t gpr_arena_destroy(gpr_arena* arena) { gpr_atm size = gpr_atm_no_barrier_load(&arena->size_so_far); zone* z = (zone*)gpr_atm_no_barrier_load(&arena->initial_zone.next_atm); - gpr_free(arena); + gpr_free_aligned(arena); while (z) { zone* next_z = (zone*)gpr_atm_no_barrier_load(&z->next_atm); - gpr_free(z); + gpr_free_aligned(z); z = next_z; } return (size_t)size; @@ -64,11 +79,12 @@ void* gpr_arena_alloc(gpr_arena* arena, size_t size) { zone* next_z = (zone*)gpr_atm_acq_load(&z->next_atm); if (next_z == nullptr) { size_t next_z_size = (size_t)gpr_atm_no_barrier_load(&arena->size_so_far); - next_z = (zone*)gpr_zalloc(sizeof(zone) + next_z_size); + next_z = (zone*)zalloc_aligned(ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone)) + + next_z_size); next_z->size_begin = z->size_end; next_z->size_end = z->size_end + next_z_size; if (!gpr_atm_rel_cas(&z->next_atm, (gpr_atm)NULL, (gpr_atm)next_z)) { - gpr_free(next_z); + gpr_free_aligned(next_z); next_z = (zone*)gpr_atm_acq_load(&z->next_atm); } } @@ -79,5 +95,8 @@ void* gpr_arena_alloc(gpr_arena* arena, size_t size) { } GPR_ASSERT(start >= z->size_begin); GPR_ASSERT(start + size <= z->size_end); - return ((char*)(z + 1)) + start - z->size_begin; + char* ptr = (z == &arena->initial_zone) + ? (char*)arena + ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena)) + : (char*)z + ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone)); + return ptr + start - z->size_begin; } |