summaryrefslogtreecommitdiffstats
path: root/Python/generated_cases.c.h
diff options
context:
space:
mode:
Diffstat (limited to 'Python/generated_cases.c.h')
-rw-r--r--Python/generated_cases.c.h100
1 files changed, 71 insertions, 29 deletions
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 632cbc7..eff246f 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -25,7 +25,7 @@
lhs = stack_pointer[-2];
uint16_t counter = read_u16(&this_instr[1].cache);
(void)counter;
- #if ENABLE_SPECIALIZATION
+ #if ENABLE_SPECIALIZATION_FT
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
next_instr = this_instr;
_PyFrame_SetStackPointer(frame, stack_pointer);
@@ -35,7 +35,7 @@
}
OPCODE_DEFERRED_INC(BINARY_OP);
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
- #endif /* ENABLE_SPECIALIZATION */
+ #endif /* ENABLE_SPECIALIZATION_FT */
assert(NB_ADD <= oparg);
assert(oparg <= NB_INPLACE_XOR);
}
@@ -435,8 +435,8 @@
container = stack_pointer[-2];
uint16_t counter = read_u16(&this_instr[1].cache);
(void)counter;
- assert(frame->stackpointer == NULL);
#if ENABLE_SPECIALIZATION
+ assert(frame->stackpointer == NULL);
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
next_instr = this_instr;
_PyFrame_SetStackPointer(frame, stack_pointer);
@@ -1066,8 +1066,8 @@
_PyFrame_SetStackPointer(frame, stack_pointer);
_PyInterpreterFrame *shim = _PyFrame_PushTrampolineUnchecked(
tstate, (PyCodeObject *)&_Py_InitCleanup, 1, frame);
+ assert(_PyFrame_GetBytecode(shim)[0].op.code == EXIT_INIT_CHECK);
stack_pointer = _PyFrame_GetStackPointer(frame);
- assert(_PyCode_CODE(_PyFrame_GetCode(shim))[0].op.code == EXIT_INIT_CHECK);
/* Push self onto stack of shim */
shim->localsplus[0] = PyStackRef_DUP(self[0]);
_PyFrame_SetStackPointer(frame, stack_pointer);
@@ -4711,7 +4711,9 @@
int original_opcode = 0;
if (tstate->tracing) {
PyCodeObject *code = _PyFrame_GetCode(frame);
- original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyCode_CODE(code))].original_opcode;
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyFrame_GetBytecode(frame))].original_opcode;
+ stack_pointer = _PyFrame_GetStackPointer(frame);
next_instr = this_instr;
} else {
_PyFrame_SetStackPointer(frame, stack_pointer);
@@ -4759,9 +4761,7 @@
assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_False);
int offset = flag * oparg;
- #if ENABLE_SPECIALIZATION
- this_instr[1].cache = (this_instr[1].cache << 1) | flag;
- #endif
+ RECORD_BRANCH_TAKEN(this_instr[1].cache, flag);
INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
DISPATCH();
}
@@ -4782,9 +4782,7 @@
PyStackRef_CLOSE(value_stackref);
offset = 0;
}
- #if ENABLE_SPECIALIZATION
- this_instr[1].cache = (this_instr[1].cache << 1) | flag;
- #endif
+ RECORD_BRANCH_TAKEN(this_instr[1].cache, flag);
INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
DISPATCH();
}
@@ -4822,9 +4820,7 @@
assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_True);
int offset = flag * oparg;
- #if ENABLE_SPECIALIZATION
- this_instr[1].cache = (this_instr[1].cache << 1) | flag;
- #endif
+ RECORD_BRANCH_TAKEN(this_instr[1].cache, flag);
INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
DISPATCH();
}
@@ -4834,6 +4830,28 @@
(void)this_instr;
next_instr += 1;
INSTRUCTION_STATS(INSTRUMENTED_RESUME);
+ // _LOAD_BYTECODE
+ {
+ #ifdef Py_GIL_DISABLED
+ if (frame->tlbc_index !=
+ ((_PyThreadStateImpl *)tstate)->tlbc_index) {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ _Py_CODEUNIT *bytecode =
+ _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame));
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (bytecode == NULL) goto error;
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ int off = this_instr - _PyFrame_GetBytecode(frame);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ frame->tlbc_index = ((_PyThreadStateImpl *)tstate)->tlbc_index;
+ frame->instr_ptr = bytecode + off;
+ // Make sure this_instr gets reset correctley for any uops that
+ // follow
+ next_instr = frame->instr_ptr;
+ DISPATCH();
+ }
+ #endif
+ }
// _MAYBE_INSTRUMENT
{
if (tstate->tracing == 0) {
@@ -6646,9 +6664,7 @@
cond = stack_pointer[-1];
assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_False);
- #if ENABLE_SPECIALIZATION
- this_instr[1].cache = (this_instr[1].cache << 1) | flag;
- #endif
+ RECORD_BRANCH_TAKEN(this_instr[1].cache, flag);
JUMPBY(oparg * flag);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@@ -6680,9 +6696,7 @@
cond = b;
assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_True);
- #if ENABLE_SPECIALIZATION
- this_instr[1].cache = (this_instr[1].cache << 1) | flag;
- #endif
+ RECORD_BRANCH_TAKEN(this_instr[1].cache, flag);
JUMPBY(oparg * flag);
}
stack_pointer += -1;
@@ -6715,9 +6729,7 @@
cond = b;
assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_False);
- #if ENABLE_SPECIALIZATION
- this_instr[1].cache = (this_instr[1].cache << 1) | flag;
- #endif
+ RECORD_BRANCH_TAKEN(this_instr[1].cache, flag);
JUMPBY(oparg * flag);
}
stack_pointer += -1;
@@ -6735,9 +6747,7 @@
cond = stack_pointer[-1];
assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_True);
- #if ENABLE_SPECIALIZATION
- this_instr[1].cache = (this_instr[1].cache << 1) | flag;
- #endif
+ RECORD_BRANCH_TAKEN(this_instr[1].cache, flag);
JUMPBY(oparg * flag);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@@ -6832,7 +6842,11 @@
if (oparg) {
PyObject *lasti = PyStackRef_AsPyObjectBorrow(values[0]);
if (PyLong_Check(lasti)) {
- frame->instr_ptr = _PyCode_CODE(_PyFrame_GetCode(frame)) + PyLong_AsLong(lasti);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ frame->instr_ptr = _PyFrame_GetBytecode(frame) + PyLong_AsLong(lasti);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
assert(!_PyErr_Occurred(tstate));
}
else {
@@ -6844,6 +6858,8 @@
Py_DECREF(exc);
goto error;
}
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
}
assert(exc && PyExceptionInstance_Check(exc));
stack_pointer += -1;
@@ -6871,6 +6887,28 @@
PREDICTED(RESUME);
_Py_CODEUNIT* const this_instr = next_instr - 1;
(void)this_instr;
+ // _LOAD_BYTECODE
+ {
+ #ifdef Py_GIL_DISABLED
+ if (frame->tlbc_index !=
+ ((_PyThreadStateImpl *)tstate)->tlbc_index) {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ _Py_CODEUNIT *bytecode =
+ _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame));
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (bytecode == NULL) goto error;
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ int off = this_instr - _PyFrame_GetBytecode(frame);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ frame->tlbc_index = ((_PyThreadStateImpl *)tstate)->tlbc_index;
+ frame->instr_ptr = bytecode + off;
+ // Make sure this_instr gets reset correctley for any uops that
+ // follow
+ next_instr = frame->instr_ptr;
+ DISPATCH();
+ }
+ #endif
+ }
// _MAYBE_INSTRUMENT
{
if (tstate->tracing == 0) {
@@ -6890,11 +6928,11 @@
}
// _QUICKEN_RESUME
{
- #if ENABLE_SPECIALIZATION
+ #if ENABLE_SPECIALIZATION_FT
if (tstate->tracing == 0 && this_instr->op.code == RESUME) {
FT_ATOMIC_STORE_UINT8_RELAXED(this_instr->op.code, RESUME_CHECK);
}
- #endif /* ENABLE_SPECIALIZATION */
+ #endif /* ENABLE_SPECIALIZATION_FT */
}
// _CHECK_PERIODIC_IF_NOT_YIELD_FROM
{
@@ -6925,6 +6963,10 @@
uintptr_t version = FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version);
assert((version & _PY_EVAL_EVENTS_MASK) == 0);
DEOPT_IF(eval_breaker != version, RESUME);
+ #ifdef Py_GIL_DISABLED
+ DEOPT_IF(frame->tlbc_index !=
+ ((_PyThreadStateImpl *)tstate)->tlbc_index, RESUME);
+ #endif
DISPATCH();
}