diff options
author | Jason Evans <jasone@canonware.com> | 2013-01-22 16:45:43 (GMT) |
---|---|---|
committer | Jason Evans <jasone@canonware.com> | 2013-01-22 16:45:43 (GMT) |
commit | 88393cb0eb9a046000d20809809d4adac11957ab (patch) | |
tree | ca6efbe3a61ebe33bc2129fb3efcad762217d4d5 /src/jemalloc.c | |
parent | 38067483c542adfe092644d1ecc103c6bc74add0 (diff) | |
download | jemalloc-88393cb0eb9a046000d20809809d4adac11957ab.zip jemalloc-88393cb0eb9a046000d20809809d4adac11957ab.tar.gz jemalloc-88393cb0eb9a046000d20809809d4adac11957ab.tar.bz2 |
Add and use JEMALLOC_ALWAYS_INLINE.
Add JEMALLOC_ALWAYS_INLINE and use it to guarantee that the entire fast
paths of the primary allocation/deallocation functions are inlined.
Diffstat (limited to 'src/jemalloc.c')
-rw-r--r-- | src/jemalloc.c | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/src/jemalloc.c b/src/jemalloc.c index ec88700..58e18df 100644 --- a/src/jemalloc.c +++ b/src/jemalloc.c @@ -279,7 +279,7 @@ arenas_cleanup(void *arg) malloc_mutex_unlock(&arenas_lock); } -static inline bool +static JEMALLOC_ATTR(always_inline) bool malloc_init(void) { @@ -892,7 +892,7 @@ JEMALLOC_ATTR(nonnull(1)) * Avoid any uncertainty as to how many backtrace frames to ignore in * PROF_ALLOC_PREP(). */ -JEMALLOC_ATTR(noinline) +JEMALLOC_NOINLINE #endif static int imemalign(void **memptr, size_t alignment, size_t size, @@ -1378,7 +1378,7 @@ je_mallctlbymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, */ #ifdef JEMALLOC_EXPERIMENTAL -JEMALLOC_INLINE void * +static JEMALLOC_ATTR(always_inline) void * iallocm(size_t usize, size_t alignment, bool zero, bool try_tcache, arena_t *arena) { |