1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
|
#ifndef Py_INTERNAL_CEVAL_H
#define Py_INTERNAL_CEVAL_H
#ifdef __cplusplus
extern "C" {
#endif
#ifndef Py_BUILD_CORE
# error "this header requires Py_BUILD_CORE define"
#endif
/* Forward declarations */
struct pyruntimestate;
struct _ceval_runtime_state;
#include "pycore_interp.h" /* PyInterpreterState.eval_frame */
extern void _Py_FinishPendingCalls(PyThreadState *tstate);
extern void _PyEval_InitRuntimeState(struct _ceval_runtime_state *);
extern int _PyEval_InitState(struct _ceval_state *ceval);
extern void _PyEval_FiniState(struct _ceval_state *ceval);
PyAPI_FUNC(void) _PyEval_SignalReceived(PyInterpreterState *interp);
PyAPI_FUNC(int) _PyEval_AddPendingCall(
PyInterpreterState *interp,
int (*func)(void *),
void *arg);
PyAPI_FUNC(void) _PyEval_SignalAsyncExc(PyInterpreterState *interp);
#ifdef HAVE_FORK
extern PyStatus _PyEval_ReInitThreads(PyThreadState *tstate);
#endif
PyAPI_FUNC(void) _PyEval_SetCoroutineOriginTrackingDepth(
PyThreadState *tstate,
int new_depth);
void _PyEval_Fini(void);
extern PyObject *_PyEval_BuiltinsFromGlobals(PyObject *globals);
static inline PyObject*
_PyEval_EvalFrame(PyThreadState *tstate, PyFrameObject *f, int throwflag)
{
return tstate->interp->eval_frame(tstate, f, throwflag);
}
extern PyObject *
_PyEval_Vector(PyThreadState *tstate,
PyFrameConstructor *desc, PyObject *locals,
PyObject* const* args, size_t argcount,
PyObject *kwnames);
#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS
extern int _PyEval_ThreadsInitialized(PyInterpreterState *interp);
#else
extern int _PyEval_ThreadsInitialized(struct pyruntimestate *runtime);
#endif
extern PyStatus _PyEval_InitGIL(PyThreadState *tstate);
extern void _PyEval_FiniGIL(PyInterpreterState *interp);
extern void _PyEval_ReleaseLock(PyThreadState *tstate);
/* --- _Py_EnterRecursiveCall() ----------------------------------------- */
#ifdef USE_STACKCHECK
/* With USE_STACKCHECK macro defined, trigger stack checks in
_Py_CheckRecursiveCall() on every 64th call to Py_EnterRecursiveCall. */
static inline int _Py_MakeRecCheck(PyThreadState *tstate) {
return (++tstate->recursion_depth > tstate->interp->ceval.recursion_limit
|| ++tstate->stackcheck_counter > 64);
}
#else
static inline int _Py_MakeRecCheck(PyThreadState *tstate) {
return (++tstate->recursion_depth > tstate->interp->ceval.recursion_limit);
}
#endif
PyAPI_FUNC(int) _Py_CheckRecursiveCall(
PyThreadState *tstate,
const char *where);
static inline int _Py_EnterRecursiveCall(PyThreadState *tstate,
const char *where) {
return (_Py_MakeRecCheck(tstate) && _Py_CheckRecursiveCall(tstate, where));
}
static inline int _Py_EnterRecursiveCall_inline(const char *where) {
PyThreadState *tstate = PyThreadState_GET();
return _Py_EnterRecursiveCall(tstate, where);
}
#define Py_EnterRecursiveCall(where) _Py_EnterRecursiveCall_inline(where)
static inline void _Py_LeaveRecursiveCall(PyThreadState *tstate) {
tstate->recursion_depth--;
}
static inline void _Py_LeaveRecursiveCall_inline(void) {
PyThreadState *tstate = PyThreadState_GET();
_Py_LeaveRecursiveCall(tstate);
}
#define Py_LeaveRecursiveCall() _Py_LeaveRecursiveCall_inline()
#ifdef __cplusplus
}
#endif
#endif /* !Py_INTERNAL_CEVAL_H */
|