aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Mark D. Roth <roth@google.com>2018-01-22 12:51:03 -0800
committerGravatar Mark D. Roth <roth@google.com>2018-01-22 12:51:03 -0800
commit62569dd9785e59a6b87a5002d09aeec1c39f541b (patch)
treeb7e0fd0f075b891af60f1c6736a04ed12f77bf16
parenta9fc084ec1be4ab8f2071227d9affb77f713e0db (diff)
Fix arena to return aligned memory.
-rw-r--r--include/grpc/support/alloc.h5
-rw-r--r--src/core/lib/gpr/alloc.cc4
-rw-r--r--src/core/lib/gpr/arena.cc31
-rw-r--r--src/ruby/ext/grpc/rb_grpc_imports.generated.h2
-rw-r--r--test/core/gpr/alloc_test.cc14
-rw-r--r--test/core/gpr/arena_test.cc2
6 files changed, 47 insertions, 11 deletions
diff --git a/include/grpc/support/alloc.h b/include/grpc/support/alloc.h
index c559198237..577d4f0069 100644
--- a/include/grpc/support/alloc.h
+++ b/include/grpc/support/alloc.h
@@ -46,8 +46,9 @@ GPRAPI void* gpr_zalloc(size_t size);
GPRAPI void gpr_free(void* ptr);
/** realloc, never returns NULL */
GPRAPI void* gpr_realloc(void* p, size_t size);
-/** aligned malloc, never returns NULL, will align to 1 << alignment_log */
-GPRAPI void* gpr_malloc_aligned(size_t size, size_t alignment_log);
+/** aligned malloc, never returns NULL, will align to alignment, which
+ * must be a power of 2. */
+GPRAPI void* gpr_malloc_aligned(size_t size, size_t alignment);
/** free memory allocated by gpr_malloc_aligned */
GPRAPI void gpr_free_aligned(void* ptr);
diff --git a/src/core/lib/gpr/alloc.cc b/src/core/lib/gpr/alloc.cc
index 518bdb99f7..000b7dcb25 100644
--- a/src/core/lib/gpr/alloc.cc
+++ b/src/core/lib/gpr/alloc.cc
@@ -90,8 +90,8 @@ void* gpr_realloc(void* p, size_t size) {
return p;
}
-void* gpr_malloc_aligned(size_t size, size_t alignment_log) {
- size_t alignment = ((size_t)1) << alignment_log;
+void* gpr_malloc_aligned(size_t size, size_t alignment) {
+ GPR_ASSERT(((alignment - 1) & alignment) == 0); // Must be power of 2.
size_t extra = alignment - 1 + sizeof(void*);
void* p = gpr_malloc(size + extra);
void** ret = (void**)(((uintptr_t)p + extra) & ~(alignment - 1));
diff --git a/src/core/lib/gpr/arena.cc b/src/core/lib/gpr/arena.cc
index 177c176732..687592a140 100644
--- a/src/core/lib/gpr/arena.cc
+++ b/src/core/lib/gpr/arena.cc
@@ -17,11 +17,19 @@
*/
#include "src/core/lib/gpr/arena.h"
+
+#include <string.h>
+
#include <grpc/support/alloc.h>
#include <grpc/support/atm.h>
#include <grpc/support/log.h>
#include <grpc/support/useful.h>
+// TODO(roth): We currently assume that all callers need alignment of 16
+// bytes, which may be wrong in some cases. As part of converting the
+// arena API to C++, we should consider replacing gpr_arena_alloc() with a
+// template that takes the type of the value being allocated, which
+// would allow us to use the alignment actually needed by the caller.
#define ROUND_UP_TO_ALIGNMENT_SIZE(x) \
(((x) + GPR_MAX_ALIGNMENT - 1u) & ~(GPR_MAX_ALIGNMENT - 1u))
@@ -36,9 +44,16 @@ struct gpr_arena {
zone initial_zone;
};
+static void* zalloc_aligned(size_t size) {
+ void* ptr = gpr_malloc_aligned(size, GPR_MAX_ALIGNMENT);
+ memset(ptr, 0, size);
+ return ptr;
+}
+
gpr_arena* gpr_arena_create(size_t initial_size) {
initial_size = ROUND_UP_TO_ALIGNMENT_SIZE(initial_size);
- gpr_arena* a = (gpr_arena*)gpr_zalloc(sizeof(gpr_arena) + initial_size);
+ gpr_arena* a = (gpr_arena*)zalloc_aligned(
+ ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena)) + initial_size);
a->initial_zone.size_end = initial_size;
return a;
}
@@ -46,10 +61,10 @@ gpr_arena* gpr_arena_create(size_t initial_size) {
size_t gpr_arena_destroy(gpr_arena* arena) {
gpr_atm size = gpr_atm_no_barrier_load(&arena->size_so_far);
zone* z = (zone*)gpr_atm_no_barrier_load(&arena->initial_zone.next_atm);
- gpr_free(arena);
+ gpr_free_aligned(arena);
while (z) {
zone* next_z = (zone*)gpr_atm_no_barrier_load(&z->next_atm);
- gpr_free(z);
+ gpr_free_aligned(z);
z = next_z;
}
return (size_t)size;
@@ -64,11 +79,12 @@ void* gpr_arena_alloc(gpr_arena* arena, size_t size) {
zone* next_z = (zone*)gpr_atm_acq_load(&z->next_atm);
if (next_z == nullptr) {
size_t next_z_size = (size_t)gpr_atm_no_barrier_load(&arena->size_so_far);
- next_z = (zone*)gpr_zalloc(sizeof(zone) + next_z_size);
+ next_z = (zone*)zalloc_aligned(ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone)) +
+ next_z_size);
next_z->size_begin = z->size_end;
next_z->size_end = z->size_end + next_z_size;
if (!gpr_atm_rel_cas(&z->next_atm, (gpr_atm)NULL, (gpr_atm)next_z)) {
- gpr_free(next_z);
+ gpr_free_aligned(next_z);
next_z = (zone*)gpr_atm_acq_load(&z->next_atm);
}
}
@@ -79,5 +95,8 @@ void* gpr_arena_alloc(gpr_arena* arena, size_t size) {
}
GPR_ASSERT(start >= z->size_begin);
GPR_ASSERT(start + size <= z->size_end);
- return ((char*)(z + 1)) + start - z->size_begin;
+ char* ptr = (z == &arena->initial_zone)
+ ? (char*)arena + ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena))
+ : (char*)z + ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone));
+ return ptr + start - z->size_begin;
}
diff --git a/src/ruby/ext/grpc/rb_grpc_imports.generated.h b/src/ruby/ext/grpc/rb_grpc_imports.generated.h
index 6377008a3b..1f2bd3e321 100644
--- a/src/ruby/ext/grpc/rb_grpc_imports.generated.h
+++ b/src/ruby/ext/grpc/rb_grpc_imports.generated.h
@@ -579,7 +579,7 @@ extern gpr_free_type gpr_free_import;
typedef void*(*gpr_realloc_type)(void* p, size_t size);
extern gpr_realloc_type gpr_realloc_import;
#define gpr_realloc gpr_realloc_import
-typedef void*(*gpr_malloc_aligned_type)(size_t size, size_t alignment_log);
+typedef void*(*gpr_malloc_aligned_type)(size_t size, size_t alignment);
extern gpr_malloc_aligned_type gpr_malloc_aligned_import;
#define gpr_malloc_aligned gpr_malloc_aligned_import
typedef void(*gpr_free_aligned_type)(void* ptr);
diff --git a/test/core/gpr/alloc_test.cc b/test/core/gpr/alloc_test.cc
index 6074c6e6e7..bf4471c36f 100644
--- a/test/core/gpr/alloc_test.cc
+++ b/test/core/gpr/alloc_test.cc
@@ -16,8 +16,11 @@
*
*/
+#include <string.h>
+
#include <grpc/support/alloc.h>
#include <grpc/support/log.h>
+
#include "test/core/util/test_config.h"
static void* fake_malloc(size_t size) { return (void*)size; }
@@ -48,8 +51,19 @@ static void test_custom_allocs() {
gpr_free(i);
}
+static void test_malloc_aligned() {
+ for (size_t size = 1; size <= 256; ++size) {
+ void* ptr = gpr_malloc_aligned(size, 16);
+ GPR_ASSERT(ptr != nullptr);
+ GPR_ASSERT(((intptr_t)ptr & 0xf) == 0);
+ memset(ptr, 0, size);
+ gpr_free_aligned(ptr);
+ }
+}
+
int main(int argc, char** argv) {
grpc_test_init(argc, argv);
test_custom_allocs();
+ test_malloc_aligned();
return 0;
}
diff --git a/test/core/gpr/arena_test.cc b/test/core/gpr/arena_test.cc
index 59ea04c0ed..62a3f8bf50 100644
--- a/test/core/gpr/arena_test.cc
+++ b/test/core/gpr/arena_test.cc
@@ -53,6 +53,8 @@ static void test(const char* name, size_t init_size, const size_t* allocs,
void** ps = static_cast<void**>(gpr_zalloc(sizeof(*ps) * nallocs));
for (size_t i = 0; i < nallocs; i++) {
ps[i] = gpr_arena_alloc(a, allocs[i]);
+ // ensure the returned address is aligned
+ GPR_ASSERT(((intptr_t)ps[i] & 0xf) == 0);
// ensure no duplicate results
for (size_t j = 0; j < i; j++) {
GPR_ASSERT(ps[i] != ps[j]);