summaryrefslogtreecommitdiffstats
path: root/Include
diff options
context:
space:
mode:
authorMark Shannon <mark@hotpy.org>2024-12-11 17:37:38 (GMT)
committerGitHub <noreply@github.com>2024-12-11 17:37:38 (GMT)
commitbc262de06b10a2d119c28bac75060bf00301697a (patch)
treeee2cd2b91e8c9d91aa3b2287b7c59db0097a10ec /Include
parentdd9da738ad1d420fabafaded3fe63912b2b17cfb (diff)
downloadcpython-bc262de06b10a2d119c28bac75060bf00301697a.zip
cpython-bc262de06b10a2d119c28bac75060bf00301697a.tar.gz
cpython-bc262de06b10a2d119c28bac75060bf00301697a.tar.bz2
GH-125174: Mark objects as statically allocated. (#127797)
* Set a bit in the unused part of the refcount on 64 bit machines and the free-threaded build. * Use the top of the refcount range on 32 bit machines
Diffstat (limited to 'Include')
-rw-r--r--Include/internal/pycore_object.h16
-rw-r--r--Include/object.h20
-rw-r--r--Include/refcount.h36
3 files changed, 61 insertions, 11 deletions
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index 6b0b464..22de3c9 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -73,14 +73,24 @@ PyAPI_FUNC(int) _PyObject_IsFreed(PyObject *);
#define _PyObject_HEAD_INIT(type) \
{ \
.ob_ref_local = _Py_IMMORTAL_REFCNT_LOCAL, \
+ .ob_flags = _Py_STATICALLY_ALLOCATED_FLAG, \
.ob_type = (type) \
}
#else
+#if SIZEOF_VOID_P > 4
#define _PyObject_HEAD_INIT(type) \
{ \
- .ob_refcnt = _Py_IMMORTAL_INITIAL_REFCNT, \
+ .ob_refcnt = _Py_IMMORTAL_INITIAL_REFCNT, \
+ .ob_flags = _Py_STATICALLY_ALLOCATED_FLAG, \
.ob_type = (type) \
}
+#else
+#define _PyObject_HEAD_INIT(type) \
+ { \
+ .ob_refcnt = _Py_STATIC_IMMORTAL_INITIAL_REFCNT, \
+ .ob_type = (type) \
+ }
+#endif
#endif
#define _PyVarObject_HEAD_INIT(type, size) \
{ \
@@ -127,7 +137,11 @@ static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n)
_Py_AddRefTotal(_PyThreadState_GET(), n);
#endif
#if !defined(Py_GIL_DISABLED)
+#if SIZEOF_VOID_P > 4
+ op->ob_refcnt += (PY_UINT32_T)n;
+#else
op->ob_refcnt += n;
+#endif
#else
if (_Py_IsOwnedByCurrentThread(op)) {
uint32_t local = op->ob_ref_local;
diff --git a/Include/object.h b/Include/object.h
index 3876d84..da7b366 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -71,7 +71,7 @@ whose size is determined when the object is allocated.
#define PyObject_HEAD_INIT(type) \
{ \
0, \
- 0, \
+ _Py_STATICALLY_ALLOCATED_FLAG, \
{ 0 }, \
0, \
_Py_IMMORTAL_REFCNT_LOCAL, \
@@ -81,7 +81,7 @@ whose size is determined when the object is allocated.
#else
#define PyObject_HEAD_INIT(type) \
{ \
- { _Py_IMMORTAL_INITIAL_REFCNT }, \
+ { _Py_STATIC_IMMORTAL_INITIAL_REFCNT }, \
(type) \
},
#endif
@@ -120,9 +120,19 @@ struct _object {
__pragma(warning(disable: 4201))
#endif
union {
- Py_ssize_t ob_refcnt;
#if SIZEOF_VOID_P > 4
- PY_UINT32_T ob_refcnt_split[2];
+ PY_INT64_T ob_refcnt_full; /* This field is needed for efficient initialization with Clang on ARM */
+ struct {
+# if PY_BIG_ENDIAN
+ PY_UINT32_T ob_flags;
+ PY_UINT32_T ob_refcnt;
+# else
+ PY_UINT32_T ob_refcnt;
+ PY_UINT32_T ob_flags;
+# endif
+ };
+#else
+ Py_ssize_t ob_refcnt;
#endif
};
#ifdef _MSC_VER
@@ -142,7 +152,7 @@ struct _object {
// trashcan mechanism as a linked list pointer and by the GC to store the
// computed "gc_refs" refcount.
uintptr_t ob_tid;
- uint16_t _padding;
+ uint16_t ob_flags;
PyMutex ob_mutex; // per-object lock
uint8_t ob_gc_bits; // gc-related state
uint32_t ob_ref_local; // local reference count
diff --git a/Include/refcount.h b/Include/refcount.h
index 141cbd3..6908c42 100644
--- a/Include/refcount.h
+++ b/Include/refcount.h
@@ -19,6 +19,9 @@ immortal. The latter should be the only instances that require
cleanup during runtime finalization.
*/
+/* Leave the low bits for refcount overflow for old stable ABI code */
+#define _Py_STATICALLY_ALLOCATED_FLAG (1 << 7)
+
#if SIZEOF_VOID_P > 4
/*
In 64+ bit systems, any object whose 32 bit reference count is >= 2**31
@@ -39,7 +42,8 @@ beyond the refcount limit. Immortality checks for reference count decreases will
be done by checking the bit sign flag in the lower 32 bits.
*/
-#define _Py_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(3UL << 30))
+#define _Py_IMMORTAL_INITIAL_REFCNT (3UL << 30)
+#define _Py_STATIC_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(_Py_IMMORTAL_INITIAL_REFCNT | (((Py_ssize_t)_Py_STATICALLY_ALLOCATED_FLAG) << 32)))
#else
/*
@@ -54,8 +58,10 @@ immortality, but the execution would still be correct.
Reference count increases and decreases will first go through an immortality
check by comparing the reference count field to the minimum immortality refcount.
*/
-#define _Py_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(3L << 29))
+#define _Py_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(5L << 28))
#define _Py_IMMORTAL_MINIMUM_REFCNT ((Py_ssize_t)(1L << 30))
+#define _Py_STATIC_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(7L << 28))
+#define _Py_STATIC_IMMORTAL_MINIMUM_REFCNT ((Py_ssize_t)(6L << 28))
#endif
// Py_GIL_DISABLED builds indicate immortal objects using `ob_ref_local`, which is
@@ -123,10 +129,21 @@ static inline Py_ALWAYS_INLINE int _Py_IsImmortal(PyObject *op)
#define _Py_IsImmortal(op) _Py_IsImmortal(_PyObject_CAST(op))
+static inline Py_ALWAYS_INLINE int _Py_IsStaticImmortal(PyObject *op)
+{
+#if defined(Py_GIL_DISABLED) || SIZEOF_VOID_P > 4
+ return (op->ob_flags & _Py_STATICALLY_ALLOCATED_FLAG) != 0;
+#else
+ return op->ob_refcnt >= _Py_STATIC_IMMORTAL_MINIMUM_REFCNT;
+#endif
+}
+#define _Py_IsStaticImmortal(op) _Py_IsStaticImmortal(_PyObject_CAST(op))
+
// Py_SET_REFCNT() implementation for stable ABI
PyAPI_FUNC(void) _Py_SetRefcnt(PyObject *ob, Py_ssize_t refcnt);
static inline void Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) {
+ assert(refcnt >= 0);
#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 >= 0x030d0000
// Stable ABI implements Py_SET_REFCNT() as a function call
// on limited C API version 3.13 and newer.
@@ -139,9 +156,12 @@ static inline void Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) {
if (_Py_IsImmortal(ob)) {
return;
}
-
#ifndef Py_GIL_DISABLED
+#if SIZEOF_VOID_P > 4
+ ob->ob_refcnt = (PY_UINT32_T)refcnt;
+#else
ob->ob_refcnt = refcnt;
+#endif
#else
if (_Py_IsOwnedByCurrentThread(ob)) {
if ((size_t)refcnt > (size_t)UINT32_MAX) {
@@ -252,13 +272,13 @@ static inline Py_ALWAYS_INLINE void Py_INCREF(PyObject *op)
_Py_atomic_add_ssize(&op->ob_ref_shared, (1 << _Py_REF_SHARED_SHIFT));
}
#elif SIZEOF_VOID_P > 4
- PY_UINT32_T cur_refcnt = op->ob_refcnt_split[PY_BIG_ENDIAN];
+ PY_UINT32_T cur_refcnt = op->ob_refcnt;
if (((int32_t)cur_refcnt) < 0) {
// the object is immortal
_Py_INCREF_IMMORTAL_STAT_INC();
return;
}
- op->ob_refcnt_split[PY_BIG_ENDIAN] = cur_refcnt + 1;
+ op->ob_refcnt = cur_refcnt + 1;
#else
if (_Py_IsImmortal(op)) {
_Py_INCREF_IMMORTAL_STAT_INC();
@@ -354,7 +374,13 @@ static inline void Py_DECREF(PyObject *op)
#elif defined(Py_REF_DEBUG)
static inline void Py_DECREF(const char *filename, int lineno, PyObject *op)
{
+#if SIZEOF_VOID_P > 4
+ /* If an object has been freed, it will have a negative full refcnt
+ * If it has not it been freed, will have a very large refcnt */
+ if (op->ob_refcnt_full <= 0 || op->ob_refcnt > (UINT32_MAX - (1<<20))) {
+#else
if (op->ob_refcnt <= 0) {
+#endif
_Py_NegativeRefcount(filename, lineno, op);
}
if (_Py_IsImmortal(op)) {