malloc: Use global flag instead of function pointer dispatch for mtag
A flag check can be faster than function pointers because of how branch prediction and speculation works and it can also remove a layer of indirection when there is a mismatch between the malloc internal tag_* api and __libc_mtag_* target hooks. Memory tagging wrapper functions are moved to malloc.c from arena.c and the logic now checks mmap_enabled. The definition of tag_new_usable is moved after chunk related definitions. This refactoring also allows using mtag_enabled checks instead of USE_MTAG ifdefs when memory tagging support only changes code logic when memory tagging is enabled at runtime. Note: an "if (false)" code block is optimized away even at -O0 by gcc. Reviewed-by: DJ Delorie <dj@redhat.com>
This commit is contained in:
parent
0c719cf42c
commit
42bac88a21
|
@ -287,34 +287,6 @@ extern struct dl_open_hook *_dl_open_hook;
|
|||
libc_hidden_proto (_dl_open_hook);
|
||||
#endif
|
||||
|
||||
#ifdef USE_MTAG
|
||||
|
||||
/* Generate a new (random) tag value for PTR and tag the memory it
|
||||
points to upto the end of the usable size for the chunk containing
|
||||
it. Return the newly tagged pointer. */
|
||||
static void *
|
||||
__mtag_tag_new_usable (void *ptr)
|
||||
{
|
||||
if (ptr)
|
||||
{
|
||||
mchunkptr cp = mem2chunk(ptr);
|
||||
ptr = __libc_mtag_tag_region (__libc_mtag_new_tag (ptr),
|
||||
CHUNK_AVAILABLE_SIZE (cp) - CHUNK_HDR_SZ);
|
||||
}
|
||||
return ptr;
|
||||
}
|
||||
|
||||
/* Generate a new (random) tag value for PTR, set the tags for the
|
||||
memory to the new tag and initialize the memory contents to VAL.
|
||||
In practice this function will only be called with VAL=0, but we
|
||||
keep this parameter to maintain the same prototype as memset. */
|
||||
static void *
|
||||
__mtag_tag_new_memset (void *ptr, int val, size_t size)
|
||||
{
|
||||
return __libc_mtag_memset_with_tag (__libc_mtag_new_tag (ptr), val, size);
|
||||
}
|
||||
#endif
|
||||
|
||||
static void
|
||||
ptmalloc_init (void)
|
||||
{
|
||||
|
@ -332,11 +304,8 @@ ptmalloc_init (void)
|
|||
if (__MTAG_SBRK_UNTAGGED)
|
||||
__morecore = __failing_morecore;
|
||||
|
||||
mtag_enabled = true;
|
||||
mtag_mmap_flags = __MTAG_MMAP_FLAGS;
|
||||
tag_new_memset = __mtag_tag_new_memset;
|
||||
tag_region = __libc_mtag_tag_region;
|
||||
tag_new_usable = __mtag_tag_new_usable;
|
||||
tag_at = __libc_mtag_address_get_tag;
|
||||
mtag_granule_mask = ~(size_t)(__MTAG_GRANULE_SIZE - 1);
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -441,36 +441,42 @@ void *(*__morecore)(ptrdiff_t) = __default_morecore;
|
|||
*/
|
||||
|
||||
#ifdef USE_MTAG
|
||||
|
||||
/* Default implementaions when memory tagging is supported, but disabled. */
|
||||
static void *
|
||||
__default_tag_region (void *ptr, size_t size)
|
||||
{
|
||||
return ptr;
|
||||
}
|
||||
|
||||
static void *
|
||||
__default_tag_nop (void *ptr)
|
||||
{
|
||||
return ptr;
|
||||
}
|
||||
|
||||
static bool mtag_enabled = false;
|
||||
static int mtag_mmap_flags = 0;
|
||||
static size_t mtag_granule_mask = ~(size_t)0;
|
||||
|
||||
static void *(*tag_new_memset)(void *, int, size_t) = memset;
|
||||
static void *(*tag_region)(void *, size_t) = __default_tag_region;
|
||||
static void *(*tag_new_usable)(void *) = __default_tag_nop;
|
||||
static void *(*tag_at)(void *) = __default_tag_nop;
|
||||
|
||||
#else
|
||||
# define mtag_enabled false
|
||||
# define mtag_mmap_flags 0
|
||||
# define tag_new_memset(ptr, val, size) memset (ptr, val, size)
|
||||
# define tag_region(ptr, size) (ptr)
|
||||
# define tag_new_usable(ptr) (ptr)
|
||||
# define tag_at(ptr) (ptr)
|
||||
#endif
|
||||
|
||||
static __always_inline void *
|
||||
tag_region (void *ptr, size_t size)
|
||||
{
|
||||
if (__glibc_unlikely (mtag_enabled))
|
||||
return __libc_mtag_tag_region (ptr, size);
|
||||
return ptr;
|
||||
}
|
||||
|
||||
static __always_inline void *
|
||||
tag_new_memset (void *ptr, int val, size_t size)
|
||||
{
|
||||
if (__glibc_unlikely (mtag_enabled))
|
||||
return __libc_mtag_memset_with_tag (__libc_mtag_new_tag (ptr), val, size);
|
||||
return memset (ptr, val, size);
|
||||
}
|
||||
|
||||
/* Defined later. */
|
||||
static void *
|
||||
tag_new_usable (void *ptr);
|
||||
|
||||
static __always_inline void *
|
||||
tag_at (void *ptr)
|
||||
{
|
||||
if (__glibc_unlikely (mtag_enabled))
|
||||
return __libc_mtag_address_get_tag (ptr);
|
||||
return ptr;
|
||||
}
|
||||
|
||||
#include <string.h>
|
||||
|
||||
/*
|
||||
|
@ -1460,6 +1466,18 @@ checked_request2size (size_t req, size_t *sz) __nonnull (1)
|
|||
#pragma GCC poison mchunk_size
|
||||
#pragma GCC poison mchunk_prev_size
|
||||
|
||||
static __always_inline void *
|
||||
tag_new_usable (void *ptr)
|
||||
{
|
||||
if (__glibc_unlikely (mtag_enabled) && ptr)
|
||||
{
|
||||
mchunkptr cp = mem2chunk(ptr);
|
||||
ptr = __libc_mtag_tag_region (__libc_mtag_new_tag (ptr),
|
||||
CHUNK_AVAILABLE_SIZE (cp) - CHUNK_HDR_SZ);
|
||||
}
|
||||
return ptr;
|
||||
}
|
||||
|
||||
/*
|
||||
-------------------- Internal data structures --------------------
|
||||
|
||||
|
|
Loading…
Reference in a new issue