Fix: pass private data to context callbacks
[lttng-ust.git] / liblttng-ust-libc-wrapper / lttng-ust-malloc.c
CommitLineData
b27f8e75 1/*
c0c0989a 2 * SPDX-License-Identifier: LGPL-2.1-or-later
c39c72ee 3 *
c0c0989a
MJ
4 * Copyright (C) 2009 Pierre-Marc Fournier
5 * Copyright (C) 2011 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
c39c72ee
PMF
6 */
7
d7e89462
MD
8/*
9 * Do _not_ define _LGPL_SOURCE because we don't want to create a
10 * circular dependency loop between this malloc wrapper, liburcu and
11 * libc.
12 */
ae4b659d 13#include <ust-dlfcn.h>
e541a28d
PMF
14#include <sys/types.h>
15#include <stdio.h>
2594a5b4 16#include <assert.h>
4b4a1337
MJ
17#include <malloc.h>
18
4c3536e0
MD
19#include <urcu/system.h>
20#include <urcu/uatomic.h>
2594a5b4 21#include <urcu/compiler.h>
8c06ba6f 22#include <urcu/tls-compat.h>
20ef5166 23#include <urcu/arch.h>
4b4a1337
MJ
24
25#include <lttng/ust-libc-wrapper.h>
26
864a1eda 27#include <ust-helper.h>
cd61d9bf 28#include "ust-compat.h"
1622ba22
MD
29
30#define TRACEPOINT_DEFINE
31#define TRACEPOINT_CREATE_PROBES
52c95399 32#define TP_IP_PARAM ip
1622ba22 33#include "ust_libc.h"
fbd8191b 34
f95b2888
SS
35#define STATIC_CALLOC_LEN 4096
36static char static_calloc_buf[STATIC_CALLOC_LEN];
4c3536e0 37static unsigned long static_calloc_buf_offset;
f95b2888 38
2594a5b4
MD
39struct alloc_functions {
40 void *(*calloc)(size_t nmemb, size_t size);
41 void *(*malloc)(size_t size);
42 void (*free)(void *ptr);
43 void *(*realloc)(void *ptr, size_t size);
44 void *(*memalign)(size_t alignment, size_t size);
45 int (*posix_memalign)(void **memptr, size_t alignment, size_t size);
46};
47
48static
49struct alloc_functions cur_alloc;
50
8c06ba6f
MD
51/*
52 * Make sure our own use of the LTS compat layer will not cause infinite
53 * recursion by calling calloc.
54 */
55
56static
57void *static_calloc(size_t nmemb, size_t size);
58
20ef5166
MD
59/*
60 * pthread mutex replacement for URCU tls compat layer.
61 */
62static int ust_malloc_lock;
63
8da9deee
MJ
64static
65void ust_malloc_spin_lock(pthread_mutex_t *lock)
66 __attribute__((unused));
67static
2208d8b5 68void ust_malloc_spin_lock(pthread_mutex_t *lock __attribute__((unused)))
20ef5166
MD
69{
70 /*
71 * The memory barrier within cmpxchg takes care of ordering
72 * memory accesses with respect to the start of the critical
73 * section.
74 */
75 while (uatomic_cmpxchg(&ust_malloc_lock, 0, 1) != 0)
76 caa_cpu_relax();
77}
78
8da9deee
MJ
79static
80void ust_malloc_spin_unlock(pthread_mutex_t *lock)
81 __attribute__((unused));
82static
2208d8b5 83void ust_malloc_spin_unlock(pthread_mutex_t *lock __attribute__((unused)))
20ef5166
MD
84{
85 /*
86 * Ensure memory accesses within the critical section do not
87 * leak outside.
88 */
89 cmm_smp_mb();
90 uatomic_set(&ust_malloc_lock, 0);
91}
92
8c06ba6f 93#define calloc static_calloc
20ef5166
MD
94#define pthread_mutex_lock ust_malloc_spin_lock
95#define pthread_mutex_unlock ust_malloc_spin_unlock
16adecf1 96static DEFINE_URCU_TLS(int, malloc_nesting);
50170875
CB
97#undef pthread_mutex_unlock
98#undef pthread_mutex_lock
8c06ba6f
MD
99#undef calloc
100
2594a5b4
MD
101/*
102 * Static allocator to use when initially executing dlsym(). It keeps a
103 * size_t value of each object size prior to the object.
104 */
105static
106void *static_calloc_aligned(size_t nmemb, size_t size, size_t alignment)
f95b2888 107{
2594a5b4
MD
108 size_t prev_offset, new_offset, res_offset, aligned_offset;
109
110 if (nmemb * size == 0) {
111 return NULL;
112 }
f95b2888 113
4c3536e0
MD
114 /*
115 * Protect static_calloc_buf_offset from concurrent updates
116 * using a cmpxchg loop rather than a mutex to remove a
117 * dependency on pthread. This will minimize the risk of bad
118 * interaction between mutex and malloc instrumentation.
119 */
120 res_offset = CMM_LOAD_SHARED(static_calloc_buf_offset);
121 do {
122 prev_offset = res_offset;
b72687b8 123 aligned_offset = LTTNG_UST_ALIGN(prev_offset + sizeof(size_t), alignment);
2594a5b4
MD
124 new_offset = aligned_offset + nmemb * size;
125 if (new_offset > sizeof(static_calloc_buf)) {
126 abort();
4c3536e0 127 }
4c3536e0
MD
128 } while ((res_offset = uatomic_cmpxchg(&static_calloc_buf_offset,
129 prev_offset, new_offset)) != prev_offset);
2594a5b4
MD
130 *(size_t *) &static_calloc_buf[aligned_offset - sizeof(size_t)] = size;
131 return &static_calloc_buf[aligned_offset];
132}
133
134static
135void *static_calloc(size_t nmemb, size_t size)
136{
137 void *retval;
138
139 retval = static_calloc_aligned(nmemb, size, 1);
2594a5b4
MD
140 return retval;
141}
142
143static
144void *static_malloc(size_t size)
145{
146 void *retval;
147
148 retval = static_calloc_aligned(1, size, 1);
2594a5b4
MD
149 return retval;
150}
151
152static
2208d8b5 153void static_free(void *ptr __attribute__((unused)))
2594a5b4
MD
154{
155 /* no-op. */
2594a5b4
MD
156}
157
158static
159void *static_realloc(void *ptr, size_t size)
160{
161 size_t *old_size = NULL;
162 void *retval;
163
164 if (size == 0) {
165 retval = NULL;
166 goto end;
167 }
168
169 if (ptr) {
170 old_size = (size_t *) ptr - 1;
171 if (size <= *old_size) {
172 /* We can re-use the old entry. */
173 *old_size = size;
174 retval = ptr;
175 goto end;
176 }
177 }
178 /* We need to expand. Don't free previous memory location. */
179 retval = static_calloc_aligned(1, size, 1);
180 assert(retval);
181 if (ptr)
182 memcpy(retval, ptr, *old_size);
183end:
2594a5b4
MD
184 return retval;
185}
186
187static
188void *static_memalign(size_t alignment, size_t size)
189{
190 void *retval;
191
192 retval = static_calloc_aligned(1, size, alignment);
2594a5b4
MD
193 return retval;
194}
195
196static
197int static_posix_memalign(void **memptr, size_t alignment, size_t size)
198{
2594a5b4
MD
199 void *ptr;
200
201 /* Check for power of 2, larger than void *. */
202 if (alignment & (alignment - 1)
203 || alignment < sizeof(void *)
204 || alignment == 0) {
2594a5b4
MD
205 goto end;
206 }
207 ptr = static_calloc_aligned(1, size, alignment);
208 *memptr = ptr;
2594a5b4 209end:
2594a5b4
MD
210 return 0;
211}
212
213static
214void setup_static_allocator(void)
215{
216 assert(cur_alloc.calloc == NULL);
217 cur_alloc.calloc = static_calloc;
218 assert(cur_alloc.malloc == NULL);
219 cur_alloc.malloc = static_malloc;
220 assert(cur_alloc.free == NULL);
221 cur_alloc.free = static_free;
222 assert(cur_alloc.realloc == NULL);
223 cur_alloc.realloc = static_realloc;
224 assert(cur_alloc.memalign == NULL);
225 cur_alloc.memalign = static_memalign;
226 assert(cur_alloc.posix_memalign == NULL);
227 cur_alloc.posix_memalign = static_posix_memalign;
228}
229
230static
231void lookup_all_symbols(void)
232{
233 struct alloc_functions af;
234
235 /*
236 * Temporarily redirect allocation functions to
237 * static_calloc_aligned, and free function to static_free
238 * (no-op), until the dlsym lookup has completed.
239 */
240 setup_static_allocator();
241
242 /* Perform the actual lookups */
243 af.calloc = dlsym(RTLD_NEXT, "calloc");
244 af.malloc = dlsym(RTLD_NEXT, "malloc");
245 af.free = dlsym(RTLD_NEXT, "free");
246 af.realloc = dlsym(RTLD_NEXT, "realloc");
247 af.memalign = dlsym(RTLD_NEXT, "memalign");
248 af.posix_memalign = dlsym(RTLD_NEXT, "posix_memalign");
249
250 /* Populate the new allocator functions */
251 memcpy(&cur_alloc, &af, sizeof(cur_alloc));
f95b2888
SS
252}
253
e541a28d
PMF
254void *malloc(size_t size)
255{
1c184644
PMF
256 void *retval;
257
8c06ba6f 258 URCU_TLS(malloc_nesting)++;
2594a5b4
MD
259 if (cur_alloc.malloc == NULL) {
260 lookup_all_symbols();
261 if (cur_alloc.malloc == NULL) {
e541a28d 262 fprintf(stderr, "mallocwrap: unable to find malloc\n");
2594a5b4 263 abort();
e541a28d
PMF
264 }
265 }
2594a5b4 266 retval = cur_alloc.malloc(size);
8c06ba6f 267 if (URCU_TLS(malloc_nesting) == 1) {
6d4658aa 268 tracepoint(lttng_ust_libc, malloc,
171fcc6f 269 size, retval, LTTNG_UST_CALLER_IP());
8c06ba6f
MD
270 }
271 URCU_TLS(malloc_nesting)--;
1c184644
PMF
272 return retval;
273}
274
275void free(void *ptr)
276{
8c06ba6f 277 URCU_TLS(malloc_nesting)++;
2594a5b4
MD
278 /*
279 * Check whether the memory was allocated with
280 * static_calloc_align, in which case there is nothing to free.
f95b2888 281 */
2594a5b4
MD
282 if (caa_unlikely((char *)ptr >= static_calloc_buf &&
283 (char *)ptr < static_calloc_buf + STATIC_CALLOC_LEN)) {
8c06ba6f
MD
284 goto end;
285 }
286
287 if (URCU_TLS(malloc_nesting) == 1) {
6d4658aa 288 tracepoint(lttng_ust_libc, free,
171fcc6f 289 ptr, LTTNG_UST_CALLER_IP());
f95b2888 290 }
1c184644 291
2594a5b4
MD
292 if (cur_alloc.free == NULL) {
293 lookup_all_symbols();
294 if (cur_alloc.free == NULL) {
1c184644 295 fprintf(stderr, "mallocwrap: unable to find free\n");
2594a5b4 296 abort();
1c184644
PMF
297 }
298 }
2594a5b4 299 cur_alloc.free(ptr);
8c06ba6f
MD
300end:
301 URCU_TLS(malloc_nesting)--;
e541a28d 302}
f95b2888
SS
303
304void *calloc(size_t nmemb, size_t size)
305{
f95b2888
SS
306 void *retval;
307
8c06ba6f 308 URCU_TLS(malloc_nesting)++;
2594a5b4
MD
309 if (cur_alloc.calloc == NULL) {
310 lookup_all_symbols();
311 if (cur_alloc.calloc == NULL) {
f95b2888 312 fprintf(stderr, "callocwrap: unable to find calloc\n");
2594a5b4 313 abort();
f95b2888
SS
314 }
315 }
2594a5b4 316 retval = cur_alloc.calloc(nmemb, size);
8c06ba6f 317 if (URCU_TLS(malloc_nesting) == 1) {
6d4658aa 318 tracepoint(lttng_ust_libc, calloc,
171fcc6f 319 nmemb, size, retval, LTTNG_UST_CALLER_IP());
8c06ba6f
MD
320 }
321 URCU_TLS(malloc_nesting)--;
f95b2888
SS
322 return retval;
323}
324
325void *realloc(void *ptr, size_t size)
326{
f95b2888
SS
327 void *retval;
328
8c06ba6f
MD
329 URCU_TLS(malloc_nesting)++;
330 /*
331 * Check whether the memory was allocated with
2594a5b4
MD
332 * static_calloc_align, in which case there is nothing
333 * to free, and we need to copy the old data.
334 */
335 if (caa_unlikely((char *)ptr >= static_calloc_buf &&
336 (char *)ptr < static_calloc_buf + STATIC_CALLOC_LEN)) {
337 size_t *old_size;
338
339 old_size = (size_t *) ptr - 1;
340 if (cur_alloc.calloc == NULL) {
341 lookup_all_symbols();
342 if (cur_alloc.calloc == NULL) {
343 fprintf(stderr, "reallocwrap: unable to find calloc\n");
344 abort();
345 }
346 }
347 retval = cur_alloc.calloc(1, size);
348 if (retval) {
349 memcpy(retval, ptr, *old_size);
350 }
8c06ba6f
MD
351 /*
352 * Mimick that a NULL pointer has been received, so
353 * memory allocation analysis based on the trace don't
354 * get confused by the address from the static
355 * allocator.
356 */
357 ptr = NULL;
2594a5b4
MD
358 goto end;
359 }
360
361 if (cur_alloc.realloc == NULL) {
362 lookup_all_symbols();
363 if (cur_alloc.realloc == NULL) {
f95b2888 364 fprintf(stderr, "reallocwrap: unable to find realloc\n");
2594a5b4 365 abort();
f95b2888
SS
366 }
367 }
2594a5b4
MD
368 retval = cur_alloc.realloc(ptr, size);
369end:
8c06ba6f 370 if (URCU_TLS(malloc_nesting) == 1) {
6d4658aa 371 tracepoint(lttng_ust_libc, realloc,
171fcc6f 372 ptr, size, retval, LTTNG_UST_CALLER_IP());
8c06ba6f
MD
373 }
374 URCU_TLS(malloc_nesting)--;
f95b2888
SS
375 return retval;
376}
9d34b226
SS
377
378void *memalign(size_t alignment, size_t size)
379{
9d34b226
SS
380 void *retval;
381
8c06ba6f 382 URCU_TLS(malloc_nesting)++;
2594a5b4
MD
383 if (cur_alloc.memalign == NULL) {
384 lookup_all_symbols();
385 if (cur_alloc.memalign == NULL) {
9d34b226 386 fprintf(stderr, "memalignwrap: unable to find memalign\n");
2594a5b4 387 abort();
9d34b226
SS
388 }
389 }
2594a5b4 390 retval = cur_alloc.memalign(alignment, size);
8c06ba6f 391 if (URCU_TLS(malloc_nesting) == 1) {
6d4658aa
AB
392 tracepoint(lttng_ust_libc, memalign,
393 alignment, size, retval,
171fcc6f 394 LTTNG_UST_CALLER_IP());
8c06ba6f
MD
395 }
396 URCU_TLS(malloc_nesting)--;
9d34b226
SS
397 return retval;
398}
399
400int posix_memalign(void **memptr, size_t alignment, size_t size)
401{
9d34b226
SS
402 int retval;
403
8c06ba6f 404 URCU_TLS(malloc_nesting)++;
2594a5b4
MD
405 if (cur_alloc.posix_memalign == NULL) {
406 lookup_all_symbols();
407 if (cur_alloc.posix_memalign == NULL) {
9d34b226 408 fprintf(stderr, "posix_memalignwrap: unable to find posix_memalign\n");
2594a5b4 409 abort();
9d34b226
SS
410 }
411 }
2594a5b4 412 retval = cur_alloc.posix_memalign(memptr, alignment, size);
8c06ba6f 413 if (URCU_TLS(malloc_nesting) == 1) {
6d4658aa
AB
414 tracepoint(lttng_ust_libc, posix_memalign,
415 *memptr, alignment, size,
171fcc6f 416 retval, LTTNG_UST_CALLER_IP());
8c06ba6f
MD
417 }
418 URCU_TLS(malloc_nesting)--;
9d34b226
SS
419 return retval;
420}
2594a5b4 421
f4a90c3e
MD
422static
423void lttng_ust_fixup_malloc_nesting_tls(void)
424{
425 asm volatile ("" : : "m" (URCU_TLS(malloc_nesting)));
426}
427
d1f1110f 428void lttng_ust_libc_wrapper_malloc_init(void)
2594a5b4
MD
429{
430 /* Initialization already done */
431 if (cur_alloc.calloc) {
432 return;
433 }
f4a90c3e 434 lttng_ust_fixup_malloc_nesting_tls();
2594a5b4
MD
435 /*
436 * Ensure the allocator is in place before the process becomes
437 * multithreaded.
438 */
439 lookup_all_symbols();
440}
This page took 0.054862 seconds and 4 git commands to generate.