summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJason Evans <jasone@canonware.com>2016-04-06 17:32:06 (GMT)
committerJason Evans <jasone@canonware.com>2016-04-06 17:32:06 (GMT)
commit4a8abbb400afe695f145a487380c04a946500bc6 (patch)
tree847dd16b4b150cd2fb20b9146086758690517f5f
parenta3c4193280b2fbd267b68f3bce091a53b5ea0b97 (diff)
downloadjemalloc-4a8abbb400afe695f145a487380c04a946500bc6.zip
jemalloc-4a8abbb400afe695f145a487380c04a946500bc6.tar.gz
jemalloc-4a8abbb400afe695f145a487380c04a946500bc6.tar.bz2
Fix bitmap_sfu() regression.
Fix bitmap_sfu() to shift by LG_BITMAP_GROUP_NBITS rather than hard-coded 6 when using linear (non-USE_TREE) bitmap search. In practice this affects only 64-bit systems for which sizeof(long) is not 8 (i.e. Windows), since USE_TREE is defined for 32-bit systems. This regression was caused by b8823ab02607d6f03febd32ac504bb6188c54047 (Use linear scan for small bitmaps). This resolves #368.
-rw-r--r--include/jemalloc/internal/bitmap.h2
1 files changed, 1 insertions, 1 deletions
diff --git a/include/jemalloc/internal/bitmap.h b/include/jemalloc/internal/bitmap.h
index 2594e3a..0e0d247 100644
--- a/include/jemalloc/internal/bitmap.h
+++ b/include/jemalloc/internal/bitmap.h
@@ -223,7 +223,7 @@ bitmap_sfu(bitmap_t *bitmap, const bitmap_info_t *binfo)
i++;
g = bitmap[i];
}
- bit = (bit - 1) + (i << 6);
+ bit = (bit - 1) + (i << LG_BITMAP_GROUP_NBITS);
#endif
bitmap_set(bitmap, binfo, bit);
return (bit);