Fix: lttng-ust-malloc ip context
[lttng-ust.git] / liblttng-ust-libc-wrapper / lttng-ust-malloc.c
CommitLineData
b27f8e75
MD
1/*
2 * Copyright (C) 2009 Pierre-Marc Fournier
1622ba22 3 * Copyright (C) 2011 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
c39c72ee
PMF
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2.1 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
14 *
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 */
19
e541a28d 20#define _GNU_SOURCE
f02baefb 21#include <lttng/ust-dlfcn.h>
e541a28d
PMF
22#include <sys/types.h>
23#include <stdio.h>
2594a5b4 24#include <assert.h>
4c3536e0
MD
25#include <urcu/system.h>
26#include <urcu/uatomic.h>
2594a5b4 27#include <urcu/compiler.h>
8c06ba6f 28#include <urcu/tls-compat.h>
20ef5166 29#include <urcu/arch.h>
2594a5b4 30#include <lttng/align.h>
1622ba22
MD
31
32#define TRACEPOINT_DEFINE
33#define TRACEPOINT_CREATE_PROBES
b221a2d2 34#define TP_IP_PARAM ip
1622ba22 35#include "ust_libc.h"
fbd8191b 36
f95b2888
SS
37#define STATIC_CALLOC_LEN 4096
38static char static_calloc_buf[STATIC_CALLOC_LEN];
4c3536e0 39static unsigned long static_calloc_buf_offset;
f95b2888 40
2594a5b4
MD
41struct alloc_functions {
42 void *(*calloc)(size_t nmemb, size_t size);
43 void *(*malloc)(size_t size);
44 void (*free)(void *ptr);
45 void *(*realloc)(void *ptr, size_t size);
46 void *(*memalign)(size_t alignment, size_t size);
47 int (*posix_memalign)(void **memptr, size_t alignment, size_t size);
48};
49
50static
51struct alloc_functions cur_alloc;
52
8c06ba6f
MD
53/*
54 * Make sure our own use of the LTS compat layer will not cause infinite
55 * recursion by calling calloc.
56 */
57
58static
59void *static_calloc(size_t nmemb, size_t size);
60
20ef5166
MD
61/*
62 * pthread mutex replacement for URCU tls compat layer.
63 */
64static int ust_malloc_lock;
65
66static __attribute__((unused))
67void ust_malloc_spin_lock(pthread_mutex_t *lock)
68{
69 /*
70 * The memory barrier within cmpxchg takes care of ordering
71 * memory accesses with respect to the start of the critical
72 * section.
73 */
74 while (uatomic_cmpxchg(&ust_malloc_lock, 0, 1) != 0)
75 caa_cpu_relax();
76}
77
78static __attribute__((unused))
79void ust_malloc_spin_unlock(pthread_mutex_t *lock)
80{
81 /*
82 * Ensure memory accesses within the critical section do not
83 * leak outside.
84 */
85 cmm_smp_mb();
86 uatomic_set(&ust_malloc_lock, 0);
87}
88
8c06ba6f 89#define calloc static_calloc
20ef5166
MD
90#define pthread_mutex_lock ust_malloc_spin_lock
91#define pthread_mutex_unlock ust_malloc_spin_unlock
8c06ba6f 92static DEFINE_URCU_TLS(int, malloc_nesting);
20ef5166
MD
93#undef ust_malloc_spin_unlock
94#undef ust_malloc_spin_lock
8c06ba6f
MD
95#undef calloc
96
2594a5b4
MD
97/*
98 * Static allocator to use when initially executing dlsym(). It keeps a
99 * size_t value of each object size prior to the object.
100 */
101static
102void *static_calloc_aligned(size_t nmemb, size_t size, size_t alignment)
f95b2888 103{
2594a5b4
MD
104 size_t prev_offset, new_offset, res_offset, aligned_offset;
105
106 if (nmemb * size == 0) {
107 return NULL;
108 }
f95b2888 109
4c3536e0
MD
110 /*
111 * Protect static_calloc_buf_offset from concurrent updates
112 * using a cmpxchg loop rather than a mutex to remove a
113 * dependency on pthread. This will minimize the risk of bad
114 * interaction between mutex and malloc instrumentation.
115 */
116 res_offset = CMM_LOAD_SHARED(static_calloc_buf_offset);
117 do {
118 prev_offset = res_offset;
2594a5b4
MD
119 aligned_offset = ALIGN(prev_offset + sizeof(size_t), alignment);
120 new_offset = aligned_offset + nmemb * size;
121 if (new_offset > sizeof(static_calloc_buf)) {
122 abort();
4c3536e0 123 }
4c3536e0
MD
124 } while ((res_offset = uatomic_cmpxchg(&static_calloc_buf_offset,
125 prev_offset, new_offset)) != prev_offset);
2594a5b4
MD
126 *(size_t *) &static_calloc_buf[aligned_offset - sizeof(size_t)] = size;
127 return &static_calloc_buf[aligned_offset];
128}
129
130static
131void *static_calloc(size_t nmemb, size_t size)
132{
133 void *retval;
134
135 retval = static_calloc_aligned(nmemb, size, 1);
2594a5b4
MD
136 return retval;
137}
138
139static
140void *static_malloc(size_t size)
141{
142 void *retval;
143
144 retval = static_calloc_aligned(1, size, 1);
2594a5b4
MD
145 return retval;
146}
147
148static
149void static_free(void *ptr)
150{
151 /* no-op. */
2594a5b4
MD
152}
153
154static
155void *static_realloc(void *ptr, size_t size)
156{
157 size_t *old_size = NULL;
158 void *retval;
159
160 if (size == 0) {
161 retval = NULL;
162 goto end;
163 }
164
165 if (ptr) {
166 old_size = (size_t *) ptr - 1;
167 if (size <= *old_size) {
168 /* We can re-use the old entry. */
169 *old_size = size;
170 retval = ptr;
171 goto end;
172 }
173 }
174 /* We need to expand. Don't free previous memory location. */
175 retval = static_calloc_aligned(1, size, 1);
176 assert(retval);
177 if (ptr)
178 memcpy(retval, ptr, *old_size);
179end:
2594a5b4
MD
180 return retval;
181}
182
183static
184void *static_memalign(size_t alignment, size_t size)
185{
186 void *retval;
187
188 retval = static_calloc_aligned(1, size, alignment);
2594a5b4
MD
189 return retval;
190}
191
192static
193int static_posix_memalign(void **memptr, size_t alignment, size_t size)
194{
2594a5b4
MD
195 void *ptr;
196
197 /* Check for power of 2, larger than void *. */
198 if (alignment & (alignment - 1)
199 || alignment < sizeof(void *)
200 || alignment == 0) {
2594a5b4
MD
201 goto end;
202 }
203 ptr = static_calloc_aligned(1, size, alignment);
204 *memptr = ptr;
2594a5b4 205end:
2594a5b4
MD
206 return 0;
207}
208
209static
210void setup_static_allocator(void)
211{
212 assert(cur_alloc.calloc == NULL);
213 cur_alloc.calloc = static_calloc;
214 assert(cur_alloc.malloc == NULL);
215 cur_alloc.malloc = static_malloc;
216 assert(cur_alloc.free == NULL);
217 cur_alloc.free = static_free;
218 assert(cur_alloc.realloc == NULL);
219 cur_alloc.realloc = static_realloc;
220 assert(cur_alloc.memalign == NULL);
221 cur_alloc.memalign = static_memalign;
222 assert(cur_alloc.posix_memalign == NULL);
223 cur_alloc.posix_memalign = static_posix_memalign;
224}
225
226static
227void lookup_all_symbols(void)
228{
229 struct alloc_functions af;
230
231 /*
232 * Temporarily redirect allocation functions to
233 * static_calloc_aligned, and free function to static_free
234 * (no-op), until the dlsym lookup has completed.
235 */
236 setup_static_allocator();
237
238 /* Perform the actual lookups */
239 af.calloc = dlsym(RTLD_NEXT, "calloc");
240 af.malloc = dlsym(RTLD_NEXT, "malloc");
241 af.free = dlsym(RTLD_NEXT, "free");
242 af.realloc = dlsym(RTLD_NEXT, "realloc");
243 af.memalign = dlsym(RTLD_NEXT, "memalign");
244 af.posix_memalign = dlsym(RTLD_NEXT, "posix_memalign");
245
246 /* Populate the new allocator functions */
247 memcpy(&cur_alloc, &af, sizeof(cur_alloc));
f95b2888
SS
248}
249
e541a28d
PMF
250void *malloc(size_t size)
251{
1c184644
PMF
252 void *retval;
253
8c06ba6f 254 URCU_TLS(malloc_nesting)++;
2594a5b4
MD
255 if (cur_alloc.malloc == NULL) {
256 lookup_all_symbols();
257 if (cur_alloc.malloc == NULL) {
e541a28d 258 fprintf(stderr, "mallocwrap: unable to find malloc\n");
2594a5b4 259 abort();
e541a28d
PMF
260 }
261 }
2594a5b4 262 retval = cur_alloc.malloc(size);
8c06ba6f 263 if (URCU_TLS(malloc_nesting) == 1) {
b221a2d2
MD
264 tracepoint(ust_libc, malloc, size, retval,
265 __builtin_return_address(0));
8c06ba6f
MD
266 }
267 URCU_TLS(malloc_nesting)--;
1c184644
PMF
268 return retval;
269}
270
271void free(void *ptr)
272{
8c06ba6f 273 URCU_TLS(malloc_nesting)++;
2594a5b4
MD
274 /*
275 * Check whether the memory was allocated with
276 * static_calloc_align, in which case there is nothing to free.
f95b2888 277 */
2594a5b4
MD
278 if (caa_unlikely((char *)ptr >= static_calloc_buf &&
279 (char *)ptr < static_calloc_buf + STATIC_CALLOC_LEN)) {
8c06ba6f
MD
280 goto end;
281 }
282
283 if (URCU_TLS(malloc_nesting) == 1) {
b221a2d2
MD
284 tracepoint(ust_libc, free, ptr,
285 __builtin_return_address(0));
f95b2888 286 }
1c184644 287
2594a5b4
MD
288 if (cur_alloc.free == NULL) {
289 lookup_all_symbols();
290 if (cur_alloc.free == NULL) {
1c184644 291 fprintf(stderr, "mallocwrap: unable to find free\n");
2594a5b4 292 abort();
1c184644
PMF
293 }
294 }
2594a5b4 295 cur_alloc.free(ptr);
8c06ba6f
MD
296end:
297 URCU_TLS(malloc_nesting)--;
e541a28d 298}
f95b2888
SS
299
300void *calloc(size_t nmemb, size_t size)
301{
f95b2888
SS
302 void *retval;
303
8c06ba6f 304 URCU_TLS(malloc_nesting)++;
2594a5b4
MD
305 if (cur_alloc.calloc == NULL) {
306 lookup_all_symbols();
307 if (cur_alloc.calloc == NULL) {
f95b2888 308 fprintf(stderr, "callocwrap: unable to find calloc\n");
2594a5b4 309 abort();
f95b2888
SS
310 }
311 }
2594a5b4 312 retval = cur_alloc.calloc(nmemb, size);
8c06ba6f 313 if (URCU_TLS(malloc_nesting) == 1) {
b221a2d2
MD
314 tracepoint(ust_libc, calloc, nmemb, size, retval,
315 __builtin_return_address(0));
8c06ba6f
MD
316 }
317 URCU_TLS(malloc_nesting)--;
f95b2888
SS
318 return retval;
319}
320
321void *realloc(void *ptr, size_t size)
322{
f95b2888
SS
323 void *retval;
324
8c06ba6f
MD
325 URCU_TLS(malloc_nesting)++;
326 /*
327 * Check whether the memory was allocated with
2594a5b4
MD
328 * static_calloc_align, in which case there is nothing
329 * to free, and we need to copy the old data.
330 */
331 if (caa_unlikely((char *)ptr >= static_calloc_buf &&
332 (char *)ptr < static_calloc_buf + STATIC_CALLOC_LEN)) {
333 size_t *old_size;
334
335 old_size = (size_t *) ptr - 1;
336 if (cur_alloc.calloc == NULL) {
337 lookup_all_symbols();
338 if (cur_alloc.calloc == NULL) {
339 fprintf(stderr, "reallocwrap: unable to find calloc\n");
340 abort();
341 }
342 }
343 retval = cur_alloc.calloc(1, size);
344 if (retval) {
345 memcpy(retval, ptr, *old_size);
346 }
8c06ba6f
MD
347 /*
348 * Mimick that a NULL pointer has been received, so
349 * memory allocation analysis based on the trace don't
350 * get confused by the address from the static
351 * allocator.
352 */
353 ptr = NULL;
2594a5b4
MD
354 goto end;
355 }
356
357 if (cur_alloc.realloc == NULL) {
358 lookup_all_symbols();
359 if (cur_alloc.realloc == NULL) {
f95b2888 360 fprintf(stderr, "reallocwrap: unable to find realloc\n");
2594a5b4 361 abort();
f95b2888
SS
362 }
363 }
2594a5b4
MD
364 retval = cur_alloc.realloc(ptr, size);
365end:
8c06ba6f 366 if (URCU_TLS(malloc_nesting) == 1) {
b221a2d2
MD
367 tracepoint(ust_libc, realloc, ptr, size, retval,
368 __builtin_return_address(0));
8c06ba6f
MD
369 }
370 URCU_TLS(malloc_nesting)--;
f95b2888
SS
371 return retval;
372}
9d34b226
SS
373
374void *memalign(size_t alignment, size_t size)
375{
9d34b226
SS
376 void *retval;
377
8c06ba6f 378 URCU_TLS(malloc_nesting)++;
2594a5b4
MD
379 if (cur_alloc.memalign == NULL) {
380 lookup_all_symbols();
381 if (cur_alloc.memalign == NULL) {
9d34b226 382 fprintf(stderr, "memalignwrap: unable to find memalign\n");
2594a5b4 383 abort();
9d34b226
SS
384 }
385 }
2594a5b4 386 retval = cur_alloc.memalign(alignment, size);
8c06ba6f 387 if (URCU_TLS(malloc_nesting) == 1) {
b221a2d2
MD
388 tracepoint(ust_libc, memalign, alignment, size, retval,
389 __builtin_return_address(0));
8c06ba6f
MD
390 }
391 URCU_TLS(malloc_nesting)--;
9d34b226
SS
392 return retval;
393}
394
395int posix_memalign(void **memptr, size_t alignment, size_t size)
396{
9d34b226
SS
397 int retval;
398
8c06ba6f 399 URCU_TLS(malloc_nesting)++;
2594a5b4
MD
400 if (cur_alloc.posix_memalign == NULL) {
401 lookup_all_symbols();
402 if (cur_alloc.posix_memalign == NULL) {
9d34b226 403 fprintf(stderr, "posix_memalignwrap: unable to find posix_memalign\n");
2594a5b4 404 abort();
9d34b226
SS
405 }
406 }
2594a5b4 407 retval = cur_alloc.posix_memalign(memptr, alignment, size);
8c06ba6f
MD
408 if (URCU_TLS(malloc_nesting) == 1) {
409 tracepoint(ust_libc, posix_memalign, *memptr, alignment, size,
b221a2d2 410 retval, __builtin_return_address(0));
8c06ba6f
MD
411 }
412 URCU_TLS(malloc_nesting)--;
9d34b226
SS
413 return retval;
414}
2594a5b4
MD
415
416__attribute__((constructor))
417void lttng_ust_malloc_wrapper_init(void)
418{
419 /* Initialization already done */
420 if (cur_alloc.calloc) {
421 return;
422 }
423 /*
424 * Ensure the allocator is in place before the process becomes
425 * multithreaded.
426 */
427 lookup_all_symbols();
428}
This page took 0.046905 seconds and 4 git commands to generate.