summaryrefslogtreecommitdiffstats
path: root/Include/internal/pycore_atomic_funcs.h
diff options
context:
space:
mode:
authorVictor Stinner <vstinner@python.org>2020-12-23 02:41:08 (GMT)
committerGitHub <noreply@github.com>2020-12-23 02:41:08 (GMT)
commit52a327c1cbb86c7f2f5c460645889b23615261bf (patch)
treedf67ef901c5300349c073e8031c4d410ba016ca7 /Include/internal/pycore_atomic_funcs.h
parent46b5c6be29f6470a20dd0dbd34e794debcee7c04 (diff)
downloadcpython-52a327c1cbb86c7f2f5c460645889b23615261bf.zip
cpython-52a327c1cbb86c7f2f5c460645889b23615261bf.tar.gz
cpython-52a327c1cbb86c7f2f5c460645889b23615261bf.tar.bz2
bpo-39465: Add pycore_atomic_funcs.h header (GH-20766)
Add pycore_atomic_funcs.h internal header file: similar to pycore_atomic.h but don't require to declare variables as atomic. Add _Py_atomic_size_get() and _Py_atomic_size_set() functions.
Diffstat (limited to 'Include/internal/pycore_atomic_funcs.h')
-rw-r--r--Include/internal/pycore_atomic_funcs.h94
1 files changed, 94 insertions, 0 deletions
diff --git a/Include/internal/pycore_atomic_funcs.h b/Include/internal/pycore_atomic_funcs.h
new file mode 100644
index 0000000..a708789
--- /dev/null
+++ b/Include/internal/pycore_atomic_funcs.h
@@ -0,0 +1,94 @@
+/* Atomic functions: similar to pycore_atomic.h, but don't need
+ to declare variables as atomic.
+
+ Py_ssize_t type:
+
+ * value = _Py_atomic_size_get(&var)
+ * _Py_atomic_size_set(&var, value)
+
+ Use sequentially-consistent ordering (__ATOMIC_SEQ_CST memory order):
+ enforce total ordering with all other atomic functions.
+*/
+#ifndef Py_ATOMIC_FUNC_H
+#define Py_ATOMIC_FUNC_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#if defined(_MSC_VER)
+# include <intrin.h> // _InterlockedExchange()
+#endif
+
+
+// Use builtin atomic operations in GCC >= 4.7 and clang
+#ifdef HAVE_BUILTIN_ATOMIC
+
+static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var)
+{
+ return __atomic_load_n(var, __ATOMIC_SEQ_CST);
+}
+
+static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value)
+{
+ __atomic_store_n(var, value, __ATOMIC_SEQ_CST);
+}
+
+#elif defined(_MSC_VER)
+
+static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var)
+{
+#if SIZEOF_VOID_P == 8
+ Py_BUILD_ASSERT(sizeof(__int64) == sizeof(*var));
+ volatile __int64 *volatile_var = (volatile __int64 *)var;
+ __int64 old;
+ do {
+ old = *volatile_var;
+ } while(_InterlockedCompareExchange64(volatile_var, old, old) != old);
+#else
+ Py_BUILD_ASSERT(sizeof(long) == sizeof(*var));
+ volatile long *volatile_var = (volatile long *)var;
+ long old;
+ do {
+ old = *volatile_var;
+ } while(_InterlockedCompareExchange(volatile_var, old, old) != old);
+#endif
+ return old;
+}
+
+static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value)
+{
+#if SIZEOF_VOID_P == 8
+ Py_BUILD_ASSERT(sizeof(__int64) == sizeof(*var));
+ volatile __int64 *volatile_var = (volatile __int64 *)var;
+ _InterlockedExchange64(volatile_var, value);
+#else
+ Py_BUILD_ASSERT(sizeof(long) == sizeof(*var));
+ volatile long *volatile_var = (volatile long *)var;
+ _InterlockedExchange(volatile_var, value);
+#endif
+}
+
+#else
+// Fallback implementation using volatile
+
+static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var)
+{
+ volatile Py_ssize_t *volatile_var = (volatile Py_ssize_t *)var;
+ return *volatile_var;
+}
+
+static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value)
+{
+ volatile Py_ssize_t *volatile_var = (volatile Py_ssize_t *)var;
+ *volatile_var = value;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* Py_ATOMIC_FUNC_H */