summaryrefslogtreecommitdiffstats
path: root/Python
diff options
context:
space:
mode:
authorSam Gross <colesbury@gmail.com>2024-08-15 16:09:11 (GMT)
committerGitHub <noreply@github.com>2024-08-15 16:09:11 (GMT)
commite001027188001f4bdf6ea16f70726ca0fabe85c4 (patch)
tree49ef595c47159c755e5e847f1aac863da92cc330 /Python
parent1dad23edbc9db3a13268c1000c8dd428edba29f8 (diff)
downloadcpython-e001027188001f4bdf6ea16f70726ca0fabe85c4.zip
cpython-e001027188001f4bdf6ea16f70726ca0fabe85c4.tar.gz
cpython-e001027188001f4bdf6ea16f70726ca0fabe85c4.tar.bz2
gh-117139: Garbage collector support for deferred refcounting (#122956)
The free-threaded GC now visits interpreter stacks to keep objects that use deferred reference counting alive. Interpreter frames are zero initialized in the free-threaded GC so that the GC doesn't see garbage data. This is a temporary measure until stack spilling around escaping calls is implemented. Co-authored-by: Ken Jin <kenjin@python.org>
Diffstat (limited to 'Python')
-rw-r--r--Python/frame.c10
-rw-r--r--Python/gc.c11
-rw-r--r--Python/gc_free_threading.c90
3 files changed, 93 insertions, 18 deletions
diff --git a/Python/frame.c b/Python/frame.c
index 25fa282..3192968 100644
--- a/Python/frame.c
+++ b/Python/frame.c
@@ -15,15 +15,7 @@ _PyFrame_Traverse(_PyInterpreterFrame *frame, visitproc visit, void *arg)
Py_VISIT(frame->f_locals);
Py_VISIT(frame->f_funcobj);
Py_VISIT(_PyFrame_GetCode(frame));
- /* locals */
- _PyStackRef *locals = _PyFrame_GetLocalsArray(frame);
- _PyStackRef *sp = frame->stackpointer;
- /* locals and stack */
- while (sp > locals) {
- sp--;
- Py_VISIT(PyStackRef_AsPyObjectBorrow(*sp));
- }
- return 0;
+ return _PyGC_VisitFrameStack(frame, visit, arg);
}
PyFrameObject *
diff --git a/Python/gc.c b/Python/gc.c
index 38a0da9..923a792 100644
--- a/Python/gc.c
+++ b/Python/gc.c
@@ -534,6 +534,17 @@ visit_decref(PyObject *op, void *parent)
return 0;
}
+int
+_PyGC_VisitFrameStack(_PyInterpreterFrame *frame, visitproc visit, void *arg)
+{
+ _PyStackRef *ref = _PyFrame_GetLocalsArray(frame);
+ /* locals and stack */
+ for (; ref < frame->stackpointer; ref++) {
+ Py_VISIT(PyStackRef_AsPyObjectBorrow(*ref));
+ }
+ return 0;
+}
+
/* Subtract internal references from gc_refs. After this, gc_refs is >= 0
* for all objects in containers, and is GC_REACHABLE for all tracked gc
* objects not in containers. The ones with gc_refs > 0 are directly
diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c
index 543bee2..b954565 100644
--- a/Python/gc_free_threading.c
+++ b/Python/gc_free_threading.c
@@ -164,15 +164,31 @@ gc_decref(PyObject *op)
static void
disable_deferred_refcounting(PyObject *op)
{
- if (_PyObject_HasDeferredRefcount(op)) {
- op->ob_gc_bits &= ~_PyGC_BITS_DEFERRED;
- op->ob_ref_shared -= _Py_REF_SHARED(_Py_REF_DEFERRED, 0);
-
- if (PyType_Check(op)) {
- // Disable thread-local refcounting for heap types
- PyTypeObject *type = (PyTypeObject *)op;
- if (PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
- _PyType_ReleaseId((PyHeapTypeObject *)op);
+ if (!_PyObject_HasDeferredRefcount(op)) {
+ return;
+ }
+
+ op->ob_gc_bits &= ~_PyGC_BITS_DEFERRED;
+ op->ob_ref_shared -= _Py_REF_SHARED(_Py_REF_DEFERRED, 0);
+
+ if (PyType_Check(op)) {
+ // Disable thread-local refcounting for heap types
+ PyTypeObject *type = (PyTypeObject *)op;
+ if (PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
+ _PyType_ReleaseId((PyHeapTypeObject *)op);
+ }
+ }
+ else if (PyGen_CheckExact(op) || PyCoro_CheckExact(op) || PyAsyncGen_CheckExact(op)) {
+ // Ensure any non-refcounted pointers in locals are converted to
+ // strong references. This ensures that the generator/coroutine is not
+ // freed before its locals.
+ PyGenObject *gen = (PyGenObject *)op;
+ struct _PyInterpreterFrame *frame = &gen->gi_iframe;
+ assert(frame->stackpointer != NULL);
+ for (_PyStackRef *ref = frame->localsplus; ref < frame->stackpointer; ref++) {
+ if (!PyStackRef_IsNull(*ref) && PyStackRef_IsDeferred(*ref)) {
+ // Convert a deferred reference to a strong reference.
+ *ref = PyStackRef_FromPyObjectSteal(PyStackRef_AsPyObjectSteal(*ref));
}
}
}
@@ -313,6 +329,41 @@ gc_visit_heaps(PyInterpreterState *interp, mi_block_visit_fun *visitor,
return err;
}
+static inline void
+gc_visit_stackref(_PyStackRef stackref)
+{
+ // Note: we MUST check that it is deferred before checking the rest.
+ // Otherwise we might read into invalid memory due to non-deferred references
+ // being dead already.
+ if (PyStackRef_IsDeferred(stackref) && !PyStackRef_IsNull(stackref)) {
+ PyObject *obj = PyStackRef_AsPyObjectBorrow(stackref);
+ if (_PyObject_GC_IS_TRACKED(obj)) {
+ gc_add_refs(obj, 1);
+ }
+ }
+}
+
+// Add 1 to the gc_refs for every deferred reference on each thread's stack.
+static void
+gc_visit_thread_stacks(PyInterpreterState *interp)
+{
+ HEAD_LOCK(&_PyRuntime);
+ for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) {
+ _PyInterpreterFrame *f = p->current_frame;
+ while (f != NULL) {
+ if (f->f_executable != NULL && PyCode_Check(f->f_executable)) {
+ PyCodeObject *co = (PyCodeObject *)f->f_executable;
+ int max_stack = co->co_nlocalsplus + co->co_stacksize;
+ for (int i = 0; i < max_stack; i++) {
+ gc_visit_stackref(f->localsplus[i]);
+ }
+ }
+ f = f->previous;
+ }
+ }
+ HEAD_UNLOCK(&_PyRuntime);
+}
+
static void
merge_queued_objects(_PyThreadStateImpl *tstate, struct collection_state *state)
{
@@ -617,6 +668,9 @@ deduce_unreachable_heap(PyInterpreterState *interp,
gc_visit_heaps(interp, &validate_gc_objects, &state->base);
#endif
+ // Visit the thread stacks to account for any deferred references.
+ gc_visit_thread_stacks(interp);
+
// Transitively mark reachable objects by clearing the
// _PyGC_BITS_UNREACHABLE flag.
if (gc_visit_heaps(interp, &mark_heap_visitor, &state->base) < 0) {
@@ -897,6 +951,24 @@ visit_decref_unreachable(PyObject *op, void *data)
return 0;
}
+int
+_PyGC_VisitFrameStack(_PyInterpreterFrame *frame, visitproc visit, void *arg)
+{
+ _PyStackRef *ref = _PyFrame_GetLocalsArray(frame);
+ /* locals and stack */
+ for (; ref < frame->stackpointer; ref++) {
+ // This is a bit tricky! We want to ignore deferred references when
+ // computing the incoming references, but otherwise treat them like
+ // regular references.
+ if (PyStackRef_IsDeferred(*ref) &&
+ (visit == visit_decref || visit == visit_decref_unreachable)) {
+ continue;
+ }
+ Py_VISIT(PyStackRef_AsPyObjectBorrow(*ref));
+ }
+ return 0;
+}
+
// Handle objects that may have resurrected after a call to 'finalize_garbage'.
static int
handle_resurrected_objects(struct collection_state *state)