summaryrefslogtreecommitdiffstats
path: root/Include/internal
diff options
context:
space:
mode:
authormpage <mpage@meta.com>2024-04-08 14:58:38 (GMT)
committerGitHub <noreply@github.com>2024-04-08 14:58:38 (GMT)
commitdf7317904849a41d51db39d92c5d431a18e22637 (patch)
treee7dddcb5006cb6f50b9f47477217043157a42e01 /Include/internal
parente16062dd3428a5846344e0a8c6ee2f352d34ce1b (diff)
downloadcpython-df7317904849a41d51db39d92c5d431a18e22637.zip
cpython-df7317904849a41d51db39d92c5d431a18e22637.tar.gz
cpython-df7317904849a41d51db39d92c5d431a18e22637.tar.bz2
gh-111926: Make weakrefs thread-safe in free-threaded builds (#117168)
Most mutable data is protected by a striped lock that is keyed on the referenced object's address. The weakref's hash is protected using the weakref's per-object lock. Note that this only affects free-threaded builds. Apart from some minor refactoring, the added code is all either gated by `ifdef`s or is a no-op (e.g. `Py_BEGIN_CRITICAL_SECTION`).
Diffstat (limited to 'Include/internal')
-rw-r--r--Include/internal/pycore_interp.h7
-rw-r--r--Include/internal/pycore_object.h40
-rw-r--r--Include/internal/pycore_pyatomic_ft_wrappers.h5
-rw-r--r--Include/internal/pycore_weakref.h73
4 files changed, 105 insertions, 20 deletions
diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h
index b5cea86..1bb123b 100644
--- a/Include/internal/pycore_interp.h
+++ b/Include/internal/pycore_interp.h
@@ -59,6 +59,12 @@ struct _stoptheworld_state {
PyThreadState *requester; // Thread that requested the pause (may be NULL).
};
+#ifdef Py_GIL_DISABLED
+// This should be prime but otherwise the choice is arbitrary. A larger value
+// increases concurrency at the expense of memory.
+# define NUM_WEAKREF_LIST_LOCKS 127
+#endif
+
/* cross-interpreter data registry */
/* Tracks some rare events per-interpreter, used by the optimizer to turn on/off
@@ -203,6 +209,7 @@ struct _is {
#if defined(Py_GIL_DISABLED)
struct _mimalloc_interp_state mimalloc;
struct _brc_state brc; // biased reference counting state
+ PyMutex weakref_locks[NUM_WEAKREF_LIST_LOCKS];
#endif
// Per-interpreter state for the obmalloc allocator. For the main
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index 4fc5e9b..1e1b664 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -426,7 +426,7 @@ _Py_TryIncRefShared(PyObject *op)
/* Tries to incref the object op and ensures that *src still points to it. */
static inline int
-_Py_TryIncref(PyObject **src, PyObject *op)
+_Py_TryIncrefCompare(PyObject **src, PyObject *op)
{
if (_Py_TryIncrefFast(op)) {
return 1;
@@ -452,7 +452,7 @@ _Py_XGetRef(PyObject **ptr)
if (value == NULL) {
return value;
}
- if (_Py_TryIncref(ptr, value)) {
+ if (_Py_TryIncrefCompare(ptr, value)) {
return value;
}
}
@@ -467,7 +467,7 @@ _Py_TryXGetRef(PyObject **ptr)
if (value == NULL) {
return value;
}
- if (_Py_TryIncref(ptr, value)) {
+ if (_Py_TryIncrefCompare(ptr, value)) {
return value;
}
return NULL;
@@ -506,8 +506,42 @@ _Py_XNewRefWithLock(PyObject *obj)
return _Py_NewRefWithLock(obj);
}
+static inline void
+_PyObject_SetMaybeWeakref(PyObject *op)
+{
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
+ for (;;) {
+ Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared);
+ if ((shared & _Py_REF_SHARED_FLAG_MASK) != 0) {
+ // Nothing to do if it's in WEAKREFS, QUEUED, or MERGED states.
+ return;
+ }
+ if (_Py_atomic_compare_exchange_ssize(
+ &op->ob_ref_shared, &shared, shared | _Py_REF_MAYBE_WEAKREF)) {
+ return;
+ }
+ }
+}
+
#endif
+/* Tries to incref op and returns 1 if successful or 0 otherwise. */
+static inline int
+_Py_TryIncref(PyObject *op)
+{
+#ifdef Py_GIL_DISABLED
+ return _Py_TryIncrefFast(op) || _Py_TryIncRefShared(op);
+#else
+ if (Py_REFCNT(op) > 0) {
+ Py_INCREF(op);
+ return 1;
+ }
+ return 0;
+#endif
+}
+
#ifdef Py_REF_DEBUG
extern void _PyInterpreterState_FinalizeRefTotal(PyInterpreterState *);
extern void _Py_FinalizeRefTotal(_PyRuntimeState *);
diff --git a/Include/internal/pycore_pyatomic_ft_wrappers.h b/Include/internal/pycore_pyatomic_ft_wrappers.h
index e441600..2514f51 100644
--- a/Include/internal/pycore_pyatomic_ft_wrappers.h
+++ b/Include/internal/pycore_pyatomic_ft_wrappers.h
@@ -20,9 +20,12 @@ extern "C" {
#endif
#ifdef Py_GIL_DISABLED
+#define FT_ATOMIC_LOAD_PTR(value) _Py_atomic_load_ptr(&value)
#define FT_ATOMIC_LOAD_SSIZE(value) _Py_atomic_load_ssize(&value)
#define FT_ATOMIC_LOAD_SSIZE_RELAXED(value) \
_Py_atomic_load_ssize_relaxed(&value)
+#define FT_ATOMIC_STORE_PTR(value, new_value) \
+ _Py_atomic_store_ptr(&value, new_value)
#define FT_ATOMIC_STORE_PTR_RELAXED(value, new_value) \
_Py_atomic_store_ptr_relaxed(&value, new_value)
#define FT_ATOMIC_STORE_PTR_RELEASE(value, new_value) \
@@ -30,8 +33,10 @@ extern "C" {
#define FT_ATOMIC_STORE_SSIZE_RELAXED(value, new_value) \
_Py_atomic_store_ssize_relaxed(&value, new_value)
#else
+#define FT_ATOMIC_LOAD_PTR(value) value
#define FT_ATOMIC_LOAD_SSIZE(value) value
#define FT_ATOMIC_LOAD_SSIZE_RELAXED(value) value
+#define FT_ATOMIC_STORE_PTR(value, new_value) value = new_value
#define FT_ATOMIC_STORE_PTR_RELAXED(value, new_value) value = new_value
#define FT_ATOMIC_STORE_PTR_RELEASE(value, new_value) value = new_value
#define FT_ATOMIC_STORE_SSIZE_RELAXED(value, new_value) value = new_value
diff --git a/Include/internal/pycore_weakref.h b/Include/internal/pycore_weakref.h
index dea267b..e057a27 100644
--- a/Include/internal/pycore_weakref.h
+++ b/Include/internal/pycore_weakref.h
@@ -9,7 +9,35 @@ extern "C" {
#endif
#include "pycore_critical_section.h" // Py_BEGIN_CRITICAL_SECTION()
+#include "pycore_lock.h"
#include "pycore_object.h" // _Py_REF_IS_MERGED()
+#include "pycore_pyatomic_ft_wrappers.h"
+
+#ifdef Py_GIL_DISABLED
+
+#define WEAKREF_LIST_LOCK(obj) \
+ _PyInterpreterState_GET() \
+ ->weakref_locks[((uintptr_t)obj) % NUM_WEAKREF_LIST_LOCKS]
+
+// Lock using the referenced object
+#define LOCK_WEAKREFS(obj) \
+ PyMutex_LockFlags(&WEAKREF_LIST_LOCK(obj), _Py_LOCK_DONT_DETACH)
+#define UNLOCK_WEAKREFS(obj) PyMutex_Unlock(&WEAKREF_LIST_LOCK(obj))
+
+// Lock using a weakref
+#define LOCK_WEAKREFS_FOR_WR(wr) \
+ PyMutex_LockFlags(wr->weakrefs_lock, _Py_LOCK_DONT_DETACH)
+#define UNLOCK_WEAKREFS_FOR_WR(wr) PyMutex_Unlock(wr->weakrefs_lock)
+
+#else
+
+#define LOCK_WEAKREFS(obj)
+#define UNLOCK_WEAKREFS(obj)
+
+#define LOCK_WEAKREFS_FOR_WR(wr)
+#define UNLOCK_WEAKREFS_FOR_WR(wr)
+
+#endif
static inline int _is_dead(PyObject *obj)
{
@@ -30,53 +58,64 @@ static inline int _is_dead(PyObject *obj)
static inline PyObject* _PyWeakref_GET_REF(PyObject *ref_obj)
{
assert(PyWeakref_Check(ref_obj));
- PyObject *ret = NULL;
- Py_BEGIN_CRITICAL_SECTION(ref_obj);
PyWeakReference *ref = _Py_CAST(PyWeakReference*, ref_obj);
- PyObject *obj = ref->wr_object;
+ PyObject *obj = FT_ATOMIC_LOAD_PTR(ref->wr_object);
if (obj == Py_None) {
// clear_weakref() was called
- goto end;
+ return NULL;
}
- if (_is_dead(obj)) {
- goto end;
+ LOCK_WEAKREFS(obj);
+#ifdef Py_GIL_DISABLED
+ if (ref->wr_object == Py_None) {
+ // clear_weakref() was called
+ UNLOCK_WEAKREFS(obj);
+ return NULL;
}
-#if !defined(Py_GIL_DISABLED)
- assert(Py_REFCNT(obj) > 0);
#endif
- ret = Py_NewRef(obj);
-end:
- Py_END_CRITICAL_SECTION();
- return ret;
+ if (_Py_TryIncref(obj)) {
+ UNLOCK_WEAKREFS(obj);
+ return obj;
+ }
+ UNLOCK_WEAKREFS(obj);
+ return NULL;
}
static inline int _PyWeakref_IS_DEAD(PyObject *ref_obj)
{
assert(PyWeakref_Check(ref_obj));
int ret = 0;
- Py_BEGIN_CRITICAL_SECTION(ref_obj);
PyWeakReference *ref = _Py_CAST(PyWeakReference*, ref_obj);
- PyObject *obj = ref->wr_object;
+ PyObject *obj = FT_ATOMIC_LOAD_PTR(ref->wr_object);
if (obj == Py_None) {
// clear_weakref() was called
ret = 1;
}
else {
+ LOCK_WEAKREFS(obj);
// See _PyWeakref_GET_REF() for the rationale of this test
+#ifdef Py_GIL_DISABLED
+ ret = (ref->wr_object == Py_None) || _is_dead(obj);
+#else
ret = _is_dead(obj);
+#endif
+ UNLOCK_WEAKREFS(obj);
}
- Py_END_CRITICAL_SECTION();
return ret;
}
-extern Py_ssize_t _PyWeakref_GetWeakrefCount(PyWeakReference *head);
+extern Py_ssize_t _PyWeakref_GetWeakrefCount(PyObject *obj);
+
+// Clear all the weak references to obj but leave their callbacks uncalled and
+// intact.
+extern void _PyWeakref_ClearWeakRefsExceptCallbacks(PyObject *obj);
extern void _PyWeakref_ClearRef(PyWeakReference *self);
+PyAPI_FUNC(int) _PyWeakref_IsDead(PyObject *weakref);
+
#ifdef __cplusplus
}
#endif
#endif /* !Py_INTERNAL_WEAKREF_H */
-