1 /******************************************************************************/ 2 #ifdef JEMALLOC_H_TYPES 3 4 #ifdef JEMALLOC_VALGRIND 5 #include <valgrind/valgrind.h> 6 7 /* 8 * The size that is reported to Valgrind must be consistent through a chain of 9 * malloc..realloc..realloc calls. Request size isn't recorded anywhere in 10 * jemalloc, so it is critical that all callers of these macros provide usize 11 * rather than request size. As a result, buffer overflow detection is 12 * technically weakened for the standard API, though it is generally accepted 13 * practice to consider any extra bytes reported by malloc_usable_size() as 14 * usable space. 15 */ 16 #define JEMALLOC_VALGRIND_MAKE_MEM_NOACCESS(ptr, usize) do { \ 17 if (in_valgrind) \ 18 valgrind_make_mem_noaccess(ptr, usize); \ 19 } while (0) 20 #define JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ptr, usize) do { \ 21 if (in_valgrind) \ 22 valgrind_make_mem_undefined(ptr, usize); \ 23 } while (0) 24 #define JEMALLOC_VALGRIND_MAKE_MEM_DEFINED(ptr, usize) do { \ 25 if (in_valgrind) \ 26 valgrind_make_mem_defined(ptr, usize); \ 27 } while (0) 28 /* 29 * The VALGRIND_MALLOCLIKE_BLOCK() and VALGRIND_RESIZEINPLACE_BLOCK() macro 30 * calls must be embedded in macros rather than in functions so that when 31 * Valgrind reports errors, there are no extra stack frames in the backtraces. 32 */ 33 #define JEMALLOC_VALGRIND_MALLOC(cond, ptr, usize, zero) do { \ 34 if (in_valgrind && cond) \ 35 VALGRIND_MALLOCLIKE_BLOCK(ptr, usize, p2rz(ptr), zero); \ 36 } while (0) 37 #define JEMALLOC_VALGRIND_REALLOC(maybe_moved, ptr, usize, \ 38 ptr_maybe_null, old_ptr, old_usize, old_rzsize, old_ptr_maybe_null, \ 39 zero) do { \ 40 if (in_valgrind) { \ 41 size_t rzsize = p2rz(ptr); \ 42 \ 43 if (!maybe_moved || ptr == old_ptr) { \ 44 VALGRIND_RESIZEINPLACE_BLOCK(ptr, old_usize, \ 45 usize, rzsize); \ 46 if (zero && old_usize < usize) { \ 47 valgrind_make_mem_defined( \ 48 (void *)((uintptr_t)ptr + \ 49 old_usize), usize - old_usize); \ 50 } \ 51 } else { \ 52 if (!old_ptr_maybe_null || old_ptr != NULL) { \ 53 valgrind_freelike_block(old_ptr, \ 54 old_rzsize); \ 55 } \ 56 if (!ptr_maybe_null || ptr != NULL) { \ 57 size_t copy_size = (old_usize < usize) \ 58 ? old_usize : usize; \ 59 size_t tail_size = usize - copy_size; \ 60 VALGRIND_MALLOCLIKE_BLOCK(ptr, usize, \ 61 rzsize, false); \ 62 if (copy_size > 0) { \ 63 valgrind_make_mem_defined(ptr, \ 64 copy_size); \ 65 } \ 66 if (zero && tail_size > 0) { \ 67 valgrind_make_mem_defined( \ 68 (void *)((uintptr_t)ptr + \ 69 copy_size), tail_size); \ 70 } \ 71 } \ 72 } \ 73 } \ 74 } while (0) 75 #define JEMALLOC_VALGRIND_FREE(ptr, rzsize) do { \ 76 if (in_valgrind) \ 77 valgrind_freelike_block(ptr, rzsize); \ 78 } while (0) 79 #else 80 #define RUNNING_ON_VALGRIND ((unsigned)0) 81 #define JEMALLOC_VALGRIND_MAKE_MEM_NOACCESS(ptr, usize) do {} while (0) 82 #define JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ptr, usize) do {} while (0) 83 #define JEMALLOC_VALGRIND_MAKE_MEM_DEFINED(ptr, usize) do {} while (0) 84 #define JEMALLOC_VALGRIND_MALLOC(cond, ptr, usize, zero) do {} while (0) 85 #define JEMALLOC_VALGRIND_REALLOC(maybe_moved, ptr, usize, \ 86 ptr_maybe_null, old_ptr, old_usize, old_rzsize, old_ptr_maybe_null, \ 87 zero) do {} while (0) 88 #define JEMALLOC_VALGRIND_FREE(ptr, rzsize) do {} while (0) 89 #endif 90 91 #endif /* JEMALLOC_H_TYPES */ 92 /******************************************************************************/ 93 #ifdef JEMALLOC_H_STRUCTS 94 95 #endif /* JEMALLOC_H_STRUCTS */ 96 /******************************************************************************/ 97 #ifdef JEMALLOC_H_EXTERNS 98 99 #ifdef JEMALLOC_VALGRIND 100 void valgrind_make_mem_noaccess(void *ptr, size_t usize); 101 void valgrind_make_mem_undefined(void *ptr, size_t usize); 102 void valgrind_make_mem_defined(void *ptr, size_t usize); 103 void valgrind_freelike_block(void *ptr, size_t usize); 104 #endif 105 106 #endif /* JEMALLOC_H_EXTERNS */ 107 /******************************************************************************/ 108 #ifdef JEMALLOC_H_INLINES 109 110 #endif /* JEMALLOC_H_INLINES */ 111 /******************************************************************************/ 112 113