diff options
author | Jason Evans <je@fb.com> | 2012-02-13 22:30:52 (GMT) |
---|---|---|
committer | Jason Evans <je@fb.com> | 2012-02-13 23:03:59 (GMT) |
commit | ef8897b4b938111fcc9b54725067f1dbb33a4c20 (patch) | |
tree | 46e59a294ec4b435432de0f2bd9412f5caaec9b7 /include | |
parent | 0fee70d718b9846cfab04225dc86a4b4216b963f (diff) | |
download | jemalloc-ef8897b4b938111fcc9b54725067f1dbb33a4c20.zip jemalloc-ef8897b4b938111fcc9b54725067f1dbb33a4c20.tar.gz jemalloc-ef8897b4b938111fcc9b54725067f1dbb33a4c20.tar.bz2 |
Make 8-byte tiny size class non-optional.
When tiny size class support was first added, it was intended to support
truly tiny size classes (even 2 bytes). However, this wasn't very
useful in practice, so the minimum tiny size class has been limited to
sizeof(void *) for a long time now. This is too small to be standards
compliant, but other commonly used malloc implementations do not even
bother using a 16-byte quantum on systems with vector units (SSE2+,
AltiVEC, etc.). As such, it is safe in practice to support an 8-byte
tiny size class on 64-bit systems that support 16-byte types.
Diffstat (limited to 'include')
-rw-r--r-- | include/jemalloc/internal/arena.h | 19 | ||||
-rw-r--r-- | include/jemalloc/internal/jemalloc_internal.h.in | 7 | ||||
-rw-r--r-- | include/jemalloc/jemalloc_defs.h.in | 6 |
3 files changed, 7 insertions, 25 deletions
diff --git a/include/jemalloc/internal/arena.h b/include/jemalloc/internal/arena.h index b8de12b..cacb03f 100644 --- a/include/jemalloc/internal/arena.h +++ b/include/jemalloc/internal/arena.h @@ -17,7 +17,7 @@ (((s) + SUBPAGE_MASK) & ~SUBPAGE_MASK) /* Smallest size class to support. */ -#define LG_TINY_MIN LG_SIZEOF_PTR +#define LG_TINY_MIN 3 #define TINY_MIN (1U << LG_TINY_MIN) /* @@ -418,18 +418,13 @@ extern uint8_t const *small_size2bin; extern arena_bin_info_t *arena_bin_info; /* Various bin-related settings. */ -#ifdef JEMALLOC_TINY /* Number of (2^n)-spaced tiny bins. */ -# define ntbins ((unsigned)(LG_QUANTUM - LG_TINY_MIN)) -#else -# define ntbins 0 -#endif + /* Number of (2^n)-spaced tiny bins. */ +#define ntbins ((unsigned)(LG_QUANTUM - LG_TINY_MIN)) extern unsigned nqbins; /* Number of quantum-spaced bins. */ extern unsigned ncbins; /* Number of cacheline-spaced bins. */ extern unsigned nsbins; /* Number of subpage-spaced bins. */ extern unsigned nbins; -#ifdef JEMALLOC_TINY -# define tspace_max ((size_t)(QUANTUM >> 1)) -#endif +#define tspace_max ((size_t)(QUANTUM >> 1)) #define qspace_min QUANTUM extern size_t qspace_max; extern size_t cspace_min; @@ -633,18 +628,18 @@ arena_prof_ctx_set(const void *ptr, prof_ctx_t *ctx) JEMALLOC_INLINE void * arena_malloc(size_t size, bool zero) { - tcache_t *tcache = tcache_get(); + tcache_t *tcache; assert(size != 0); assert(QUANTUM_CEILING(size) <= arena_maxclass); if (size <= small_maxclass) { - if (tcache != NULL) + if ((tcache = tcache_get()) != NULL) return (tcache_alloc_small(tcache, size, zero)); else return (arena_malloc_small(choose_arena(), size, zero)); } else { - if (tcache != NULL && size <= tcache_maxclass) + if (size <= tcache_maxclass && (tcache = tcache_get()) != NULL) return (tcache_alloc_large(tcache, size, zero)); else return (arena_malloc_large(choose_arena(), size, zero)); diff --git a/include/jemalloc/internal/jemalloc_internal.h.in b/include/jemalloc/internal/jemalloc_internal.h.in index 4441537..971336e 100644 --- a/include/jemalloc/internal/jemalloc_internal.h.in +++ b/include/jemalloc/internal/jemalloc_internal.h.in @@ -118,13 +118,6 @@ static const bool config_tcache = false #endif ; -static const bool config_tiny = -#ifdef JEMALLOC_TINY - true -#else - false -#endif - ; static const bool config_tls = #ifdef JEMALLOC_TLS true diff --git a/include/jemalloc/jemalloc_defs.h.in b/include/jemalloc/jemalloc_defs.h.in index f78028b..66da6f3 100644 --- a/include/jemalloc/jemalloc_defs.h.in +++ b/include/jemalloc/jemalloc_defs.h.in @@ -80,12 +80,6 @@ #undef JEMALLOC_PROF_GCC /* - * JEMALLOC_TINY enables support for tiny objects, which are smaller than one - * quantum. - */ -#undef JEMALLOC_TINY - -/* * JEMALLOC_TCACHE enables a thread-specific caching layer for small objects. * This makes it possible to allocate/deallocate objects without any locking * when the cache is in the steady state. |