1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
|
/*
*
* Copyright 2016 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <stdint.h>
#include <string.h>
#include <grpc/support/alloc.h>
#include <grpc/support/sync.h>
#include "test/core/util/memory_counters.h"
static struct grpc_memory_counters g_memory_counters;
static gpr_allocation_functions g_old_allocs;
static void* guard_malloc(size_t size);
static void* guard_realloc(void* vptr, size_t size);
static void guard_free(void* vptr);
#ifdef GPR_LOW_LEVEL_COUNTERS
/* hide these from the microbenchmark atomic stats */
#define NO_BARRIER_FETCH_ADD(x, sz) \
__atomic_fetch_add((x), (sz), __ATOMIC_RELAXED)
#define NO_BARRIER_LOAD(x) __atomic_load_n((x), __ATOMIC_RELAXED)
#else
#define NO_BARRIER_FETCH_ADD(x, sz) gpr_atm_no_barrier_fetch_add(x, sz)
#define NO_BARRIER_LOAD(x) gpr_atm_no_barrier_load(x)
#endif
static void* guard_malloc(size_t size) {
size_t* ptr;
if (!size) return nullptr;
NO_BARRIER_FETCH_ADD(&g_memory_counters.total_size_absolute, (gpr_atm)size);
NO_BARRIER_FETCH_ADD(&g_memory_counters.total_size_relative, (gpr_atm)size);
NO_BARRIER_FETCH_ADD(&g_memory_counters.total_allocs_absolute, (gpr_atm)1);
NO_BARRIER_FETCH_ADD(&g_memory_counters.total_allocs_relative, (gpr_atm)1);
ptr = static_cast<size_t*>(g_old_allocs.malloc_fn(size + sizeof(size)));
*ptr++ = size;
return ptr;
}
static void* guard_realloc(void* vptr, size_t size) {
size_t* ptr = static_cast<size_t*>(vptr);
if (vptr == nullptr) {
return guard_malloc(size);
}
if (size == 0) {
guard_free(vptr);
return nullptr;
}
--ptr;
NO_BARRIER_FETCH_ADD(&g_memory_counters.total_size_absolute, (gpr_atm)size);
NO_BARRIER_FETCH_ADD(&g_memory_counters.total_size_relative, -(gpr_atm)*ptr);
NO_BARRIER_FETCH_ADD(&g_memory_counters.total_size_relative, (gpr_atm)size);
NO_BARRIER_FETCH_ADD(&g_memory_counters.total_allocs_absolute, (gpr_atm)1);
ptr = static_cast<size_t*>(g_old_allocs.realloc_fn(ptr, size + sizeof(size)));
*ptr++ = size;
return ptr;
}
static void guard_free(void* vptr) {
size_t* ptr = static_cast<size_t*>(vptr);
if (!vptr) return;
--ptr;
NO_BARRIER_FETCH_ADD(&g_memory_counters.total_size_relative, -(gpr_atm)*ptr);
NO_BARRIER_FETCH_ADD(&g_memory_counters.total_allocs_relative, -(gpr_atm)1);
g_old_allocs.free_fn(ptr);
}
struct gpr_allocation_functions g_guard_allocs = {guard_malloc, nullptr,
guard_realloc, guard_free};
void grpc_memory_counters_init() {
memset(&g_memory_counters, 0, sizeof(g_memory_counters));
g_old_allocs = gpr_get_allocation_functions();
gpr_set_allocation_functions(g_guard_allocs);
}
void grpc_memory_counters_destroy() {
gpr_set_allocation_functions(g_old_allocs);
}
struct grpc_memory_counters grpc_memory_counters_snapshot() {
struct grpc_memory_counters counters;
counters.total_size_relative =
NO_BARRIER_LOAD(&g_memory_counters.total_size_relative);
counters.total_size_absolute =
NO_BARRIER_LOAD(&g_memory_counters.total_size_absolute);
counters.total_allocs_relative =
NO_BARRIER_LOAD(&g_memory_counters.total_allocs_relative);
counters.total_allocs_absolute =
NO_BARRIER_LOAD(&g_memory_counters.total_allocs_absolute);
return counters;
}
|