summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Include/object.h27
-rw-r--r--Include/pystate.h3
-rw-r--r--Lib/test/test_gc.py69
-rw-r--r--Misc/NEWS5
-rw-r--r--Objects/object.c37
-rw-r--r--Objects/typeobject.c5
-rw-r--r--Python/pystate.c3
7 files changed, 140 insertions, 9 deletions
diff --git a/Include/object.h b/Include/object.h
index edcd3fc..1ba33eb 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -971,24 +971,33 @@ chain of N deallocations is broken into N / PyTrash_UNWIND_LEVEL pieces,
with the call stack never exceeding a depth of PyTrash_UNWIND_LEVEL.
*/
+/* This is the old private API, invoked by the macros before 2.7.4.
+ Kept for binary compatibility of extensions. */
PyAPI_FUNC(void) _PyTrash_deposit_object(PyObject*);
PyAPI_FUNC(void) _PyTrash_destroy_chain(void);
PyAPI_DATA(int) _PyTrash_delete_nesting;
PyAPI_DATA(PyObject *) _PyTrash_delete_later;
+/* The new thread-safe private API, invoked by the macros below. */
+PyAPI_FUNC(void) _PyTrash_thread_deposit_object(PyObject*);
+PyAPI_FUNC(void) _PyTrash_thread_destroy_chain(void);
+
#define PyTrash_UNWIND_LEVEL 50
#define Py_TRASHCAN_SAFE_BEGIN(op) \
- if (_PyTrash_delete_nesting < PyTrash_UNWIND_LEVEL) { \
- ++_PyTrash_delete_nesting;
- /* The body of the deallocator is here. */
+ do { \
+ PyThreadState *_tstate = PyThreadState_GET(); \
+ if (_tstate->trash_delete_nesting < PyTrash_UNWIND_LEVEL) { \
+ ++_tstate->trash_delete_nesting;
+ /* The body of the deallocator is here. */
#define Py_TRASHCAN_SAFE_END(op) \
- --_PyTrash_delete_nesting; \
- if (_PyTrash_delete_later && _PyTrash_delete_nesting <= 0) \
- _PyTrash_destroy_chain(); \
- } \
- else \
- _PyTrash_deposit_object((PyObject*)op);
+ --_tstate->trash_delete_nesting; \
+ if (_tstate->trash_delete_later && _tstate->trash_delete_nesting <= 0) \
+ _PyTrash_thread_destroy_chain(); \
+ } \
+ else \
+ _PyTrash_thread_deposit_object((PyObject*)op); \
+ } while (0);
#ifdef __cplusplus
}
diff --git a/Include/pystate.h b/Include/pystate.h
index 8d74940..f2cfc30 100644
--- a/Include/pystate.h
+++ b/Include/pystate.h
@@ -95,6 +95,9 @@ typedef struct _ts {
PyObject *async_exc; /* Asynchronous exception to raise */
long thread_id; /* Thread id where this tstate was created */
+ int trash_delete_nesting;
+ PyObject *trash_delete_later;
+
/* XXX signal handlers should also be here */
} PyThreadState;
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index 5a35dec..fd874c3 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -1,9 +1,15 @@
import unittest
from test.test_support import verbose, run_unittest
import sys
+import time
import gc
import weakref
+try:
+ import threading
+except ImportError:
+ threading = None
+
### Support code
###############################################################################
@@ -299,6 +305,69 @@ class GCTests(unittest.TestCase):
v = {1: v, 2: Ouch()}
gc.disable()
+ @unittest.skipUnless(threading, "test meaningless on builds without threads")
+ def test_trashcan_threads(self):
+ # Issue #13992: trashcan mechanism should be thread-safe
+ NESTING = 60
+ N_THREADS = 2
+
+ def sleeper_gen():
+ """A generator that releases the GIL when closed or dealloc'ed."""
+ try:
+ yield
+ finally:
+ time.sleep(0.000001)
+
+ class C(list):
+ # Appending to a list is atomic, which avoids the use of a lock.
+ inits = []
+ dels = []
+ def __init__(self, alist):
+ self[:] = alist
+ C.inits.append(None)
+ def __del__(self):
+ # This __del__ is called by subtype_dealloc().
+ C.dels.append(None)
+ # `g` will release the GIL when garbage-collected. This
+ # helps assert subtype_dealloc's behaviour when threads
+ # switch in the middle of it.
+ g = sleeper_gen()
+ next(g)
+ # Now that __del__ is finished, subtype_dealloc will proceed
+ # to call list_dealloc, which also uses the trashcan mechanism.
+
+ def make_nested():
+ """Create a sufficiently nested container object so that the
+ trashcan mechanism is invoked when deallocating it."""
+ x = C([])
+ for i in range(NESTING):
+ x = [C([x])]
+ del x
+
+ def run_thread():
+ """Exercise make_nested() in a loop."""
+ while not exit:
+ make_nested()
+
+ old_checkinterval = sys.getcheckinterval()
+ sys.setcheckinterval(3)
+ try:
+ exit = False
+ threads = []
+ for i in range(N_THREADS):
+ t = threading.Thread(target=run_thread)
+ threads.append(t)
+ for t in threads:
+ t.start()
+ time.sleep(1.0)
+ exit = True
+ for t in threads:
+ t.join()
+ finally:
+ sys.setcheckinterval(old_checkinterval)
+ gc.collect()
+ self.assertEqual(len(C.inits), len(C.dels))
+
def test_boom(self):
class Boom:
def __getattr__(self, someattribute):
diff --git a/Misc/NEWS b/Misc/NEWS
index 68e0d6b..b080c1e 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -9,6 +9,11 @@ What's New in Python 2.7.4
Core and Builtins
-----------------
+- Issue #13992: The trashcan mechanism is now thread-safe. This eliminates
+ sporadic crashes in multi-thread programs when several long deallocator
+ chains ran concurrently and involved subclasses of built-in container
+ types.
+
- Issue #15801: Make sure mappings passed to '%' formatting are actually
subscriptable.
diff --git a/Objects/object.c b/Objects/object.c
index f1902a7..9fea13a 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -2428,6 +2428,18 @@ _PyTrash_deposit_object(PyObject *op)
_PyTrash_delete_later = op;
}
+/* The equivalent API, using per-thread state recursion info */
+void
+_PyTrash_thread_deposit_object(PyObject *op)
+{
+ PyThreadState *tstate = PyThreadState_GET();
+ assert(PyObject_IS_GC(op));
+ assert(_Py_AS_GC(op)->gc.gc_refs == _PyGC_REFS_UNTRACKED);
+ assert(op->ob_refcnt == 0);
+ _Py_AS_GC(op)->gc.gc_prev = (PyGC_Head *) tstate->trash_delete_later;
+ tstate->trash_delete_later = op;
+}
+
/* Dealloccate all the objects in the _PyTrash_delete_later list. Called when
* the call-stack unwinds again.
*/
@@ -2454,6 +2466,31 @@ _PyTrash_destroy_chain(void)
}
}
+/* The equivalent API, using per-thread state recursion info */
+void
+_PyTrash_thread_destroy_chain(void)
+{
+ PyThreadState *tstate = PyThreadState_GET();
+ while (tstate->trash_delete_later) {
+ PyObject *op = tstate->trash_delete_later;
+ destructor dealloc = Py_TYPE(op)->tp_dealloc;
+
+ tstate->trash_delete_later =
+ (PyObject*) _Py_AS_GC(op)->gc.gc_prev;
+
+ /* Call the deallocator directly. This used to try to
+ * fool Py_DECREF into calling it indirectly, but
+ * Py_DECREF was already called on this object, and in
+ * assorted non-release builds calling Py_DECREF again ends
+ * up distorting allocation statistics.
+ */
+ assert(op->ob_refcnt == 0);
+ ++tstate->trash_delete_nesting;
+ (*dealloc)(op);
+ --tstate->trash_delete_nesting;
+ }
+}
+
#ifdef __cplusplus
}
#endif
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index 77cba70..69cb02d 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -896,6 +896,7 @@ subtype_dealloc(PyObject *self)
{
PyTypeObject *type, *base;
destructor basedealloc;
+ PyThreadState *tstate = PyThreadState_GET();
/* Extract the type; we expect it to be a heap type */
type = Py_TYPE(self);
@@ -945,8 +946,10 @@ subtype_dealloc(PyObject *self)
/* See explanation at end of function for full disclosure */
PyObject_GC_UnTrack(self);
++_PyTrash_delete_nesting;
+ ++ tstate->trash_delete_nesting;
Py_TRASHCAN_SAFE_BEGIN(self);
--_PyTrash_delete_nesting;
+ -- tstate->trash_delete_nesting;
/* DO NOT restore GC tracking at this point. weakref callbacks
* (if any, and whether directly here or indirectly in something we
* call) may trigger GC, and if self is tracked at that point, it
@@ -1025,8 +1028,10 @@ subtype_dealloc(PyObject *self)
endlabel:
++_PyTrash_delete_nesting;
+ ++ tstate->trash_delete_nesting;
Py_TRASHCAN_SAFE_END(self);
--_PyTrash_delete_nesting;
+ -- tstate->trash_delete_nesting;
/* Explanation of the weirdness around the trashcan macros:
diff --git a/Python/pystate.c b/Python/pystate.c
index d7d127b..56eed88 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -192,6 +192,9 @@ new_threadstate(PyInterpreterState *interp, int init)
tstate->c_profileobj = NULL;
tstate->c_traceobj = NULL;
+ tstate->trash_delete_nesting = 0;
+ tstate->trash_delete_later = NULL;
+
if (init)
_PyThreadState_Init(tstate);