summaryrefslogtreecommitdiffstats
path: root/include/jemalloc
diff options
context:
space:
mode:
authorJason Evans <jasone@canonware.com>2017-02-08 18:30:44 (GMT)
committerJason Evans <jasone@canonware.com>2017-02-09 02:50:03 (GMT)
commitde8a68e85304848189643fb48100c18aa9d60e32 (patch)
tree501e17ec14e3f73068711b8e19c470cc56c50b80 /include/jemalloc
parent5f118307543b128e1ad6298ec2ab1acd71140095 (diff)
downloadjemalloc-de8a68e85304848189643fb48100c18aa9d60e32.zip
jemalloc-de8a68e85304848189643fb48100c18aa9d60e32.tar.gz
jemalloc-de8a68e85304848189643fb48100c18aa9d60e32.tar.bz2
Enhance spin_adaptive() to yield after several iterations.
This avoids worst case behavior if e.g. another thread is preempted while owning the resource the spinning thread is waiting for.
Diffstat (limited to 'include/jemalloc')
-rw-r--r--include/jemalloc/internal/spin_inlines.h17
1 files changed, 11 insertions, 6 deletions
diff --git a/include/jemalloc/internal/spin_inlines.h b/include/jemalloc/internal/spin_inlines.h
index 03beead..1657326 100644
--- a/include/jemalloc/internal/spin_inlines.h
+++ b/include/jemalloc/internal/spin_inlines.h
@@ -8,14 +8,19 @@ void spin_adaptive(spin_t *spin);
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_SPIN_C_))
JEMALLOC_INLINE void
spin_adaptive(spin_t *spin) {
- volatile uint64_t i;
+ volatile uint32_t i;
- for (i = 0; i < (KQU(1) << spin->iteration); i++) {
- CPU_SPINWAIT;
- }
-
- if (spin->iteration < 63) {
+ if (spin->iteration < 5) {
+ for (i = 0; i < (1U << spin->iteration); i++) {
+ CPU_SPINWAIT;
+ }
spin->iteration++;
+ } else {
+#ifdef _WIN32
+ SwitchToThread();
+#else
+ sched_yield();
+#endif
}
}