summaryrefslogtreecommitdiffstats
path: root/Python/ceval.c
diff options
context:
space:
mode:
authorMark Shannon <mark@hotpy.org>2022-01-20 11:46:39 (GMT)
committerGitHub <noreply@github.com>2022-01-20 11:46:39 (GMT)
commitb04dfbbe4bd7071d46c8688c2263726ea31d33cd (patch)
tree17989daaffa384df343b53289845fba667e20acc /Python/ceval.c
parentd05a66339b5e07d72d96e4c30a34cc3821bb61a2 (diff)
downloadcpython-b04dfbbe4bd7071d46c8688c2263726ea31d33cd.zip
cpython-b04dfbbe4bd7071d46c8688c2263726ea31d33cd.tar.gz
cpython-b04dfbbe4bd7071d46c8688c2263726ea31d33cd.tar.bz2
bpo-46409: Make generators in bytecode (GH-30633)
* Add RETURN_GENERATOR and JUMP_NO_INTERRUPT opcodes. * Trim frame and generator by word each. * Minor refactor of frame.c * Update test.test_sys to account for smaller frames. * Treat generator functions as normal functions when evaluating and specializing.
Diffstat (limited to 'Python/ceval.c')
-rw-r--r--Python/ceval.c195
1 files changed, 88 insertions, 107 deletions
diff --git a/Python/ceval.c b/Python/ceval.c
index 70a7750..9aaddd9 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -1345,7 +1345,7 @@ eval_frame_handle_pending(PyThreadState *tstate)
#define CHECK_EVAL_BREAKER() \
if (_Py_atomic_load_relaxed(eval_breaker)) { \
- goto check_eval_breaker; \
+ goto handle_eval_breaker; \
}
@@ -1620,12 +1620,6 @@ trace_function_exit(PyThreadState *tstate, InterpreterFrame *frame, PyObject *re
return 0;
}
-static PyObject *
-make_coro(PyThreadState *tstate, PyFunctionObject *func,
- PyObject *locals,
- PyObject* const* args, size_t argcount,
- PyObject *kwnames);
-
static int
skip_backwards_over_extended_args(PyCodeObject *code, int offset)
{
@@ -1760,49 +1754,21 @@ resume_frame:
assert(!_PyErr_Occurred(tstate));
#endif
-check_eval_breaker:
- {
- assert(STACK_LEVEL() >= 0); /* else underflow */
- assert(STACK_LEVEL() <= frame->f_code->co_stacksize); /* else overflow */
- assert(!_PyErr_Occurred(tstate));
-
- /* Do periodic things. Doing this every time through
- the loop would add too much overhead, so we do it
- only every Nth instruction. We also do it if
- ``pending.calls_to_do'' is set, i.e. when an asynchronous
- event needs attention (e.g. a signal handler or
- async I/O handler); see Py_AddPendingCall() and
- Py_MakePendingCalls() above. */
-
- if (_Py_atomic_load_relaxed(eval_breaker)) {
- opcode = _Py_OPCODE(*next_instr);
- if (opcode != BEFORE_ASYNC_WITH &&
- opcode != SEND &&
- _Py_OPCODE(next_instr[-1]) != SEND) {
- /* Few cases where we skip running signal handlers and other
- pending calls:
- - If we're about to enter the 'with:'. It will prevent
- emitting a resource warning in the common idiom
- 'with open(path) as file:'.
- - If we're about to enter the 'async with:'.
- - If we're about to enter the 'try:' of a try/finally (not
- *very* useful, but might help in some cases and it's
- traditional)
- - If we're resuming a chain of nested 'yield from' or
- 'await' calls, then each frame is parked with YIELD_FROM
- as its next opcode. If the user hit control-C we want to
- wait until we've reached the innermost frame before
- running the signal handler and raising KeyboardInterrupt
- (see bpo-30039).
- */
- if (eval_frame_handle_pending(tstate) != 0) {
- goto error;
- }
- }
- }
+ DISPATCH();
+handle_eval_breaker:
+
+ /* Do periodic things, like check for signals and async I/0.
+ * We need to do reasonably frequently, but not too frequently.
+ * All loops should include a check of the eval breaker.
+ * We also check on return from any builtin function.
+ */
+ if (eval_frame_handle_pending(tstate) != 0) {
+ goto error;
+ }
DISPATCH();
+ {
/* Start instructions */
#if USE_COMPUTED_GOTOS
{
@@ -1834,6 +1800,9 @@ check_eval_breaker:
next_instr = first_instr + nexti;
}
frame->f_state = FRAME_EXECUTING;
+ if (_Py_atomic_load_relaxed(eval_breaker) && oparg < 2) {
+ goto handle_eval_breaker;
+ }
DISPATCH();
}
@@ -4152,6 +4121,17 @@ check_eval_breaker:
DISPATCH();
}
+ TARGET(JUMP_NO_INTERRUPT) {
+ /* This bytecode is used in the `yield from` or `await` loop.
+ * If there is an interrupt, we want it handled in the innermost
+ * generator or coroutine, so we deliberately do not check it here.
+ * (see bpo-30039).
+ */
+ frame->f_state = FRAME_EXECUTING;
+ JUMPTO(oparg);
+ DISPATCH();
+ }
+
TARGET(JUMP_ABSOLUTE_QUICK) {
assert(oparg < INSTR_OFFSET());
JUMPTO(oparg);
@@ -4627,28 +4607,25 @@ check_eval_breaker:
// Check if the call can be inlined or not
if (Py_TYPE(function) == &PyFunction_Type && tstate->interp->eval_frame == NULL) {
int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(function))->co_flags;
- int is_generator = code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR);
- if (!is_generator) {
- PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : PyFunction_GET_GLOBALS(function);
- STACK_SHRINK(oparg);
- InterpreterFrame *new_frame = _PyEvalFramePushAndInit(
- tstate, (PyFunctionObject *)function, locals,
- stack_pointer, nargs, kwnames
- );
- STACK_SHRINK(postcall_shrink);
- RESET_STACK_ADJUST_FOR_CALLS;
- // The frame has stolen all the arguments from the stack,
- // so there is no need to clean them up.
- Py_XDECREF(kwnames);
- Py_DECREF(function);
- if (new_frame == NULL) {
- goto error;
- }
- _PyFrame_SetStackPointer(frame, stack_pointer);
- new_frame->previous = frame;
- cframe.current_frame = frame = new_frame;
- goto start_frame;
+ PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : PyFunction_GET_GLOBALS(function);
+ STACK_SHRINK(oparg);
+ InterpreterFrame *new_frame = _PyEvalFramePushAndInit(
+ tstate, (PyFunctionObject *)function, locals,
+ stack_pointer, nargs, kwnames
+ );
+ STACK_SHRINK(postcall_shrink);
+ RESET_STACK_ADJUST_FOR_CALLS;
+ // The frame has stolen all the arguments from the stack,
+ // so there is no need to clean them up.
+ Py_XDECREF(kwnames);
+ Py_DECREF(function);
+ if (new_frame == NULL) {
+ goto error;
}
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ new_frame->previous = frame;
+ cframe.current_frame = frame = new_frame;
+ goto start_frame;
}
/* Callable is not a normal Python function */
PyObject *res;
@@ -5076,6 +5053,40 @@ check_eval_breaker:
DISPATCH();
}
+ TARGET(RETURN_GENERATOR) {
+ PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(frame->f_func);
+ if (gen == NULL) {
+ goto error;
+ }
+ assert(EMPTY());
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ InterpreterFrame *gen_frame = (InterpreterFrame *)gen->gi_iframe;
+ _PyFrame_Copy(frame, gen_frame);
+ assert(frame->frame_obj == NULL);
+ gen->gi_frame_valid = 1;
+ gen_frame->is_generator = true;
+ gen_frame->f_state = FRAME_CREATED;
+ _Py_LeaveRecursiveCall(tstate);
+ if (!frame->is_entry) {
+ InterpreterFrame *prev = frame->previous;
+ _PyThreadState_PopFrame(tstate, frame);
+ frame = cframe.current_frame = prev;
+ _PyFrame_StackPush(frame, (PyObject *)gen);
+ goto resume_frame;
+ }
+ /* Make sure that frame is in a valid state */
+ frame->stacktop = 0;
+ frame->f_locals = NULL;
+ Py_INCREF(frame->f_func);
+ Py_INCREF(frame->f_code);
+ /* Restore previous cframe and return. */
+ tstate->cframe = cframe.previous;
+ tstate->cframe->use_tracing = cframe.use_tracing;
+ assert(tstate->cframe->current_frame == frame->previous);
+ assert(!_PyErr_Occurred(tstate));
+ return (PyObject *)gen;
+ }
+
TARGET(BUILD_SLICE) {
PyObject *start, *stop, *step, *slice;
if (oparg == 3)
@@ -5222,11 +5233,14 @@ check_eval_breaker:
frame->f_lasti = INSTR_OFFSET();
TRACING_NEXTOPARG();
if (opcode == RESUME) {
+ if (oparg < 2) {
+ CHECK_EVAL_BREAKER();
+ }
/* Call tracing */
TRACE_FUNCTION_ENTRY();
DTRACE_FUNCTION_ENTRY();
}
- else {
+ else if (frame->f_state > FRAME_CREATED) {
/* line-by-line tracing support */
if (PyDTrace_LINE_ENABLED()) {
maybe_dtrace_line(frame, &tstate->trace_info, instr_prev);
@@ -5962,33 +5976,6 @@ fail_post_args:
}
/* Consumes all the references to the args */
-static PyObject *
-make_coro(PyThreadState *tstate, PyFunctionObject *func,
- PyObject *locals,
- PyObject* const* args, size_t argcount,
- PyObject *kwnames)
-{
- assert (((PyCodeObject *)func->func_code)->co_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR));
- PyObject *gen = _Py_MakeCoro(func);
- if (gen == NULL) {
- return NULL;
- }
- InterpreterFrame *frame = (InterpreterFrame *)((PyGenObject *)gen)->gi_iframe;
- PyCodeObject *code = (PyCodeObject *)func->func_code;
- _PyFrame_InitializeSpecials(frame, func, locals, code->co_nlocalsplus);
- for (int i = 0; i < code->co_nlocalsplus; i++) {
- frame->localsplus[i] = NULL;
- }
- ((PyGenObject *)gen)->gi_frame_valid = 1;
- if (initialize_locals(tstate, func, frame->localsplus, args, argcount, kwnames)) {
- Py_DECREF(gen);
- return NULL;
- }
- frame->generator = gen;
- return gen;
-}
-
-/* Consumes all the references to the args */
static InterpreterFrame *
_PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func,
PyObject *locals, PyObject* const* args,
@@ -6041,10 +6028,7 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func,
PyObject* const* args, size_t argcount,
PyObject *kwnames)
{
- PyCodeObject *code = (PyCodeObject *)func->func_code;
- /* _PyEvalFramePushAndInit and make_coro consume
- * all the references to their arguments
- */
+ /* _PyEvalFramePushAndInit consumes all the references to its arguments */
for (size_t i = 0; i < argcount; i++) {
Py_INCREF(args[i]);
}
@@ -6054,19 +6038,16 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func,
Py_INCREF(args[i+argcount]);
}
}
- int is_coro = code->co_flags &
- (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR);
- if (is_coro) {
- return make_coro(tstate, func, locals, args, argcount, kwnames);
- }
InterpreterFrame *frame = _PyEvalFramePushAndInit(
tstate, func, locals, args, argcount, kwnames);
if (frame == NULL) {
return NULL;
}
PyObject *retval = _PyEval_EvalFrame(tstate, frame, 0);
- assert(frame->stacktop >= 0);
- assert(_PyFrame_GetStackPointer(frame) == _PyFrame_Stackbase(frame));
+ assert(
+ _PyFrame_GetStackPointer(frame) == _PyFrame_Stackbase(frame) ||
+ _PyFrame_GetStackPointer(frame) == frame->localsplus
+ );
_PyEvalFrameClearAndPop(tstate, frame);
return retval;
}