summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Include/objimpl.h12
-rw-r--r--Modules/gcmodule.c22
2 files changed, 26 insertions, 8 deletions
diff --git a/Include/objimpl.h b/Include/objimpl.h
index e037322..28f3661 100644
--- a/Include/objimpl.h
+++ b/Include/objimpl.h
@@ -262,12 +262,18 @@ extern PyGC_Head *_PyGC_generation0;
#define _Py_AS_GC(o) ((PyGC_Head *)(o)-1)
+#define _PyGC_REFS_UNTRACKED (-2)
+#define _PyGC_REFS_REACHABLE (-3)
+#define _PyGC_REFS_TENTATIVELY_UNREACHABLE (-4)
+
/* Tell the GC to track this object. NB: While the object is tracked the
* collector it must be safe to call the ob_traverse method. */
#define _PyObject_GC_TRACK(o) do { \
PyGC_Head *g = _Py_AS_GC(o); \
- if (g->gc.gc_next != NULL) \
- Py_FatalError("GC object already in linked list"); \
+ if (g->gc.gc_refs != _PyGC_REFS_UNTRACKED) \
+ Py_FatalError("GC object already tracked"); \
+ assert(g->gc.gc_refs == _PyGC_REFS_UNTRACKED); \
+ g->gc.gc_refs = _PyGC_REFS_REACHABLE; \
g->gc.gc_next = _PyGC_generation0; \
g->gc.gc_prev = _PyGC_generation0->gc.gc_prev; \
g->gc.gc_prev->gc.gc_next = g; \
@@ -277,6 +283,8 @@ extern PyGC_Head *_PyGC_generation0;
/* Tell the GC to stop tracking this object. */
#define _PyObject_GC_UNTRACK(o) do { \
PyGC_Head *g = _Py_AS_GC(o); \
+ assert(g->gc.gc_refs != _PyGC_REFS_UNTRACKED); \
+ g->gc.gc_refs = _PyGC_REFS_UNTRACKED; \
g->gc.gc_prev->gc.gc_next = g->gc.gc_next; \
g->gc.gc_next->gc.gc_prev = g->gc.gc_prev; \
g->gc.gc_next = NULL; \
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
index cb56253..7148e4c 100644
--- a/Modules/gcmodule.c
+++ b/Modules/gcmodule.c
@@ -82,8 +82,9 @@ static int debug;
*/
/* Special gc_refs values. */
-#define GC_REACHABLE -123
-#define GC_TENTATIVELY_UNREACHABLE -42
+#define GC_UNTRACKED _PyGC_REFS_UNTRACKED
+#define GC_REACHABLE _PyGC_REFS_REACHABLE
+#define GC_TENTATIVELY_UNREACHABLE _PyGC_REFS_TENTATIVELY_UNREACHABLE
#define IS_REACHABLE(o) ((AS_GC(o))->gc.gc_refs == GC_REACHABLE)
#define IS_TENTATIVELY_UNREACHABLE(o) ( \
@@ -179,8 +180,10 @@ static void
update_refs(PyGC_Head *containers)
{
PyGC_Head *gc = containers->gc.gc_next;
- for (; gc != containers; gc = gc->gc.gc_next)
+ for (; gc != containers; gc = gc->gc.gc_next) {
+ assert(gc->gc.gc_refs == GC_REACHABLE);
gc->gc.gc_refs = FROM_GC(gc)->ob_refcnt;
+ }
}
/* A traversal callback for subtract_refs. */
@@ -222,7 +225,7 @@ subtract_refs(PyGC_Head *containers)
static int
visit_reachable(PyObject *op, PyGC_Head *reachable)
{
- if (PyObject_IS_GC(op) && IS_TRACKED(op)) {
+ if (PyObject_IS_GC(op)) {
PyGC_Head *gc = AS_GC(op);
const int gc_refs = gc->gc.gc_refs;
@@ -250,8 +253,14 @@ visit_reachable(PyObject *op, PyGC_Head *reachable)
* list, and move_unreachable will eventually get to it.
* If gc_refs == GC_REACHABLE, it's either in some other
* generation so we don't care about it, or move_unreachable
- * already dealt with it.
+ * already deat with it.
+ * If gc_refs == GC_UNTRACKED, it must be ignored.
*/
+ else {
+ assert(gc_refs > 0
+ || gc_refs == GC_REACHABLE
+ || gc_refs == GC_UNTRACKED);
+ }
}
return 0;
}
@@ -352,7 +361,7 @@ static int
visit_move(PyObject *op, PyGC_Head *tolist)
{
if (PyObject_IS_GC(op)) {
- if (IS_TRACKED(op) && IS_TENTATIVELY_UNREACHABLE(op)) {
+ if (IS_TENTATIVELY_UNREACHABLE(op)) {
PyGC_Head *gc = AS_GC(op);
gc_list_remove(gc);
gc_list_append(gc, tolist);
@@ -966,6 +975,7 @@ _PyObject_GC_Malloc(size_t basicsize)
if (g == NULL)
return PyErr_NoMemory();
g->gc.gc_next = NULL;
+ g->gc.gc_refs = GC_UNTRACKED;
generations[0].count++; /* number of allocated GC objects */
if (generations[0].count > generations[0].threshold &&
enabled &&