summaryrefslogtreecommitdiffstats
path: root/Python/generated_cases.c.h
diff options
context:
space:
mode:
authorMark Shannon <mark@hotpy.org>2023-11-01 10:53:27 (GMT)
committerGitHub <noreply@github.com>2023-11-01 10:53:27 (GMT)
commitb14e882428ceda1e5852a1c22772e7f88927bded (patch)
tree424ee1d0b844ab753208a480dd6386cc4ced9578 /Python/generated_cases.c.h
parenteaf67e37a2da28c1241362e3b4ff1202945c83c5 (diff)
downloadcpython-b14e882428ceda1e5852a1c22772e7f88927bded.zip
cpython-b14e882428ceda1e5852a1c22772e7f88927bded.tar.gz
cpython-b14e882428ceda1e5852a1c22772e7f88927bded.tar.bz2
GH-111485: Use micro-ops to split specialization code from base action (GH-111561)
Diffstat (limited to 'Python/generated_cases.c.h')
-rw-r--r--Python/generated_cases.c.h792
1 files changed, 445 insertions, 347 deletions
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 8b6ebe4..1c05e2d 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -332,20 +332,28 @@
static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size");
PyObject *value;
PyObject *res;
+ // _SPECIALIZE_TO_BOOL
value = stack_pointer[-1];
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- next_instr = this_instr;
- _Py_Specialize_ToBool(value, next_instr);
- DISPATCH_SAME_OPARG();
+ {
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ next_instr = this_instr;
+ _Py_Specialize_ToBool(value, next_instr);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(TO_BOOL, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
+ }
+ // _TO_BOOL
+ {
+ int err = PyObject_IsTrue(value);
+ Py_DECREF(value);
+ if (err < 0) goto pop_1_error;
+ res = err ? Py_True : Py_False;
}
- STAT_INC(TO_BOOL, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- int err = PyObject_IsTrue(value);
- Py_DECREF(value);
- if (err < 0) goto pop_1_error;
- res = err ? Py_True : Py_False;
stack_pointer[-1] = res;
DISPATCH();
}
@@ -708,21 +716,29 @@
PyObject *sub;
PyObject *container;
PyObject *res;
+ // _SPECIALIZE_BINARY_SUBSCR
sub = stack_pointer[-1];
container = stack_pointer[-2];
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- next_instr = this_instr;
- _Py_Specialize_BinarySubscr(container, sub, next_instr);
- DISPATCH_SAME_OPARG();
+ {
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ next_instr = this_instr;
+ _Py_Specialize_BinarySubscr(container, sub, next_instr);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(BINARY_SUBSCR, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
+ }
+ // _BINARY_SUBSCR
+ {
+ res = PyObject_GetItem(container, sub);
+ Py_DECREF(container);
+ Py_DECREF(sub);
+ if (res == NULL) goto pop_2_error;
}
- STAT_INC(BINARY_SUBSCR, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- res = PyObject_GetItem(container, sub);
- Py_DECREF(container);
- Py_DECREF(sub);
- if (res == NULL) goto pop_2_error;
STACK_SHRINK(1);
stack_pointer[-1] = res;
DISPATCH();
@@ -962,24 +978,32 @@
PyObject *sub;
PyObject *container;
PyObject *v;
+ // _SPECIALIZE_STORE_SUBSCR
sub = stack_pointer[-1];
container = stack_pointer[-2];
+ {
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ next_instr = this_instr;
+ _Py_Specialize_StoreSubscr(container, sub, next_instr);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(STORE_SUBSCR, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
+ }
+ // _STORE_SUBSCR
v = stack_pointer[-3];
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- next_instr = this_instr;
- _Py_Specialize_StoreSubscr(container, sub, next_instr);
- DISPATCH_SAME_OPARG();
+ {
+ /* container[sub] = v */
+ int err = PyObject_SetItem(container, sub, v);
+ Py_DECREF(v);
+ Py_DECREF(container);
+ Py_DECREF(sub);
+ if (err) goto pop_3_error;
}
- STAT_INC(STORE_SUBSCR, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- /* container[sub] = v */
- int err = PyObject_SetItem(container, sub, v);
- Py_DECREF(v);
- Py_DECREF(container);
- Py_DECREF(sub);
- if (err) goto pop_3_error;
STACK_SHRINK(3);
DISPATCH();
}
@@ -1378,56 +1402,64 @@
PREDICTED(SEND);
_Py_CODEUNIT *this_instr = next_instr - 2;
static_assert(INLINE_CACHE_ENTRIES_SEND == 1, "incorrect cache size");
- PyObject *v;
PyObject *receiver;
+ PyObject *v;
PyObject *retval;
- v = stack_pointer[-1];
+ // _SPECIALIZE_SEND
receiver = stack_pointer[-2];
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- next_instr = this_instr;
- _Py_Specialize_Send(receiver, next_instr);
- DISPATCH_SAME_OPARG();
- }
- STAT_INC(SEND, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- assert(frame != &entry_frame);
- if ((tstate->interp->eval_frame == NULL) &&
- (Py_TYPE(receiver) == &PyGen_Type || Py_TYPE(receiver) == &PyCoro_Type) &&
- ((PyGenObject *)receiver)->gi_frame_state < FRAME_EXECUTING)
{
- PyGenObject *gen = (PyGenObject *)receiver;
- _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe;
- STACK_SHRINK(1);
- _PyFrame_StackPush(gen_frame, v);
- gen->gi_frame_state = FRAME_EXECUTING;
- gen->gi_exc_state.previous_item = tstate->exc_info;
- tstate->exc_info = &gen->gi_exc_state;
- assert(1 + INLINE_CACHE_ENTRIES_SEND == next_instr - this_instr);
- frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_SEND + oparg);
- DISPATCH_INLINED(gen_frame);
- }
- if (Py_IsNone(v) && PyIter_Check(receiver)) {
- retval = Py_TYPE(receiver)->tp_iternext(receiver);
- }
- else {
- retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v);
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ next_instr = this_instr;
+ _Py_Specialize_Send(receiver, next_instr);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(SEND, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
}
- if (retval == NULL) {
- if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)
- ) {
- monitor_raise(tstate, frame, this_instr);
+ // _SEND
+ v = stack_pointer[-1];
+ {
+ assert(frame != &entry_frame);
+ if ((tstate->interp->eval_frame == NULL) &&
+ (Py_TYPE(receiver) == &PyGen_Type || Py_TYPE(receiver) == &PyCoro_Type) &&
+ ((PyGenObject *)receiver)->gi_frame_state < FRAME_EXECUTING)
+ {
+ PyGenObject *gen = (PyGenObject *)receiver;
+ _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe;
+ STACK_SHRINK(1);
+ _PyFrame_StackPush(gen_frame, v);
+ gen->gi_frame_state = FRAME_EXECUTING;
+ gen->gi_exc_state.previous_item = tstate->exc_info;
+ tstate->exc_info = &gen->gi_exc_state;
+ assert(1 + INLINE_CACHE_ENTRIES_SEND == next_instr - this_instr);
+ frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_SEND + oparg);
+ DISPATCH_INLINED(gen_frame);
}
- if (_PyGen_FetchStopIterationValue(&retval) == 0) {
- assert(retval != NULL);
- JUMPBY(oparg);
+ if (Py_IsNone(v) && PyIter_Check(receiver)) {
+ retval = Py_TYPE(receiver)->tp_iternext(receiver);
}
else {
- goto error;
+ retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v);
}
+ if (retval == NULL) {
+ if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)
+ ) {
+ monitor_raise(tstate, frame, this_instr);
+ }
+ if (_PyGen_FetchStopIterationValue(&retval) == 0) {
+ assert(retval != NULL);
+ JUMPBY(oparg);
+ }
+ else {
+ goto error;
+ }
+ }
+ Py_DECREF(v);
}
- Py_DECREF(v);
stack_pointer[-1] = retval;
DISPATCH();
}
@@ -1694,20 +1726,27 @@
_Py_CODEUNIT *this_instr = next_instr - 2;
static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size");
PyObject *seq;
+ // _SPECIALIZE_UNPACK_SEQUENCE
seq = stack_pointer[-1];
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- next_instr = this_instr;
- _Py_Specialize_UnpackSequence(seq, next_instr, oparg);
- DISPATCH_SAME_OPARG();
+ {
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ next_instr = this_instr;
+ _Py_Specialize_UnpackSequence(seq, next_instr, oparg);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(UNPACK_SEQUENCE, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
+ }
+ // _UNPACK_SEQUENCE
+ {
+ PyObject **top = stack_pointer + oparg - 1;
+ int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top);
+ Py_DECREF(seq);
+ if (res == 0) goto pop_1_error;
}
- STAT_INC(UNPACK_SEQUENCE, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- PyObject **top = stack_pointer + oparg - 1;
- int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top);
- Py_DECREF(seq);
- if (res == 0) goto pop_1_error;
STACK_SHRINK(1);
STACK_GROW(oparg);
DISPATCH();
@@ -1799,23 +1838,31 @@
static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size");
PyObject *owner;
PyObject *v;
+ // _SPECIALIZE_STORE_ATTR
owner = stack_pointer[-1];
+ {
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
+ next_instr = this_instr;
+ _Py_Specialize_StoreAttr(owner, next_instr, name);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(STORE_ATTR, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
+ }
+ // _STORE_ATTR
v = stack_pointer[-2];
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
+ {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
- next_instr = this_instr;
- _Py_Specialize_StoreAttr(owner, next_instr, name);
- DISPATCH_SAME_OPARG();
+ int err = PyObject_SetAttr(owner, name, v);
+ Py_DECREF(v);
+ Py_DECREF(owner);
+ if (err) goto pop_2_error;
}
- STAT_INC(STORE_ATTR, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
- int err = PyObject_SetAttr(owner, name, v);
- Py_DECREF(v);
- Py_DECREF(owner);
- if (err) goto pop_2_error;
STACK_SHRINK(2);
DISPATCH();
}
@@ -1968,51 +2015,58 @@
static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size");
PyObject *res;
PyObject *null = NULL;
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- next_instr = this_instr;
- _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
- DISPATCH_SAME_OPARG();
- }
- STAT_INC(LOAD_GLOBAL, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- if (PyDict_CheckExact(GLOBALS())
- && PyDict_CheckExact(BUILTINS()))
+ // _SPECIALIZE_LOAD_GLOBAL
{
- res = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(),
- (PyDictObject *)BUILTINS(),
- name);
- if (res == NULL) {
- if (!_PyErr_Occurred(tstate)) {
- /* _PyDict_LoadGlobal() returns NULL without raising
- * an exception if the key doesn't exist */
- _PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
- }
- if (true) goto error;
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
+ next_instr = this_instr;
+ _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
+ DISPATCH_SAME_OPARG();
}
- Py_INCREF(res);
+ STAT_INC(LOAD_GLOBAL, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
}
- else {
- /* Slow-path if globals or builtins is not a dict */
-
- /* namespace 1: globals */
- if (PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0) goto error;
- if (res == NULL) {
- /* namespace 2: builtins */
- if (PyMapping_GetOptionalItem(BUILTINS(), name, &res) < 0) goto error;
+ // _LOAD_GLOBAL
+ {
+ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
+ if (PyDict_CheckExact(GLOBALS())
+ && PyDict_CheckExact(BUILTINS()))
+ {
+ res = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(),
+ (PyDictObject *)BUILTINS(),
+ name);
if (res == NULL) {
- _PyEval_FormatExcCheckArg(
- tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
+ if (!_PyErr_Occurred(tstate)) {
+ /* _PyDict_LoadGlobal() returns NULL without raising
+ * an exception if the key doesn't exist */
+ _PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
+ NAME_ERROR_MSG, name);
+ }
if (true) goto error;
}
+ Py_INCREF(res);
+ }
+ else {
+ /* Slow-path if globals or builtins is not a dict */
+ /* namespace 1: globals */
+ if (PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0) goto error;
+ if (res == NULL) {
+ /* namespace 2: builtins */
+ if (PyMapping_GetOptionalItem(BUILTINS(), name, &res) < 0) goto error;
+ if (res == NULL) {
+ _PyEval_FormatExcCheckArg(
+ tstate, PyExc_NameError,
+ NAME_ERROR_MSG, name);
+ if (true) goto error;
+ }
+ }
}
+ null = NULL;
}
- null = NULL;
STACK_GROW(1);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = res;
@@ -2498,63 +2552,69 @@
PREDICTED(LOAD_SUPER_ATTR);
_Py_CODEUNIT *this_instr = next_instr - 2;
static_assert(INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR == 1, "incorrect cache size");
- PyObject *self;
PyObject *class;
PyObject *global_super;
+ PyObject *self;
PyObject *attr;
PyObject *null = NULL;
- self = stack_pointer[-1];
+ // _SPECIALIZE_LOAD_SUPER_ATTR
class = stack_pointer[-2];
global_super = stack_pointer[-3];
- TIER_ONE_ONLY
- PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2);
- #if ENABLE_SPECIALIZATION
- int load_method = oparg & 1;
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- next_instr = this_instr;
- _Py_Specialize_LoadSuperAttr(global_super, class, next_instr, load_method);
- DISPATCH_SAME_OPARG();
- }
- STAT_INC(LOAD_SUPER_ATTR, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
-
- if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
- PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
- int err = _Py_call_instrumentation_2args(
- tstate, PY_MONITORING_EVENT_CALL,
- frame, this_instr, global_super, arg);
- if (err) goto pop_3_error;
- }
-
- // we make no attempt to optimize here; specializations should
- // handle any case whose performance we care about
- PyObject *stack[] = {class, self};
- PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL);
- if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
- PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
- if (super == NULL) {
- _Py_call_instrumentation_exc2(
- tstate, PY_MONITORING_EVENT_C_RAISE,
- frame, this_instr, global_super, arg);
+ {
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ int load_method = oparg & 1;
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ next_instr = this_instr;
+ _Py_Specialize_LoadSuperAttr(global_super, class, next_instr, load_method);
+ DISPATCH_SAME_OPARG();
}
- else {
+ STAT_INC(LOAD_SUPER_ATTR, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
+ }
+ // _LOAD_SUPER_ATTR
+ self = stack_pointer[-1];
+ {
+ TIER_ONE_ONLY
+ if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
+ PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
int err = _Py_call_instrumentation_2args(
- tstate, PY_MONITORING_EVENT_C_RETURN,
- frame, this_instr, global_super, arg);
- if (err < 0) {
- Py_CLEAR(super);
+ tstate, PY_MONITORING_EVENT_CALL,
+ frame, this_instr, global_super, arg);
+ if (err) goto pop_3_error;
+ }
+ // we make no attempt to optimize here; specializations should
+ // handle any case whose performance we care about
+ PyObject *stack[] = {class, self};
+ PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL);
+ if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
+ PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
+ if (super == NULL) {
+ _Py_call_instrumentation_exc2(
+ tstate, PY_MONITORING_EVENT_C_RAISE,
+ frame, this_instr, global_super, arg);
+ }
+ else {
+ int err = _Py_call_instrumentation_2args(
+ tstate, PY_MONITORING_EVENT_C_RETURN,
+ frame, this_instr, global_super, arg);
+ if (err < 0) {
+ Py_CLEAR(super);
+ }
}
}
+ Py_DECREF(global_super);
+ Py_DECREF(class);
+ Py_DECREF(self);
+ if (super == NULL) goto pop_3_error;
+ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2);
+ attr = PyObject_GetAttr(super, name);
+ Py_DECREF(super);
+ if (attr == NULL) goto pop_3_error;
+ null = NULL;
}
- Py_DECREF(global_super);
- Py_DECREF(class);
- Py_DECREF(self);
- if (super == NULL) goto pop_3_error;
- attr = PyObject_GetAttr(super, name);
- Py_DECREF(super);
- if (attr == NULL) goto pop_3_error;
- null = NULL;
STACK_SHRINK(2);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr;
@@ -2637,49 +2697,55 @@
PyObject *owner;
PyObject *attr;
PyObject *self_or_null = NULL;
+ // _SPECIALIZE_LOAD_ATTR
owner = stack_pointer[-1];
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- next_instr = this_instr;
- _Py_Specialize_LoadAttr(owner, next_instr, name);
- DISPATCH_SAME_OPARG();
+ {
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
+ next_instr = this_instr;
+ _Py_Specialize_LoadAttr(owner, next_instr, name);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(LOAD_ATTR, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
}
- STAT_INC(LOAD_ATTR, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
- if (oparg & 1) {
- /* Designed to work in tandem with CALL, pushes two values. */
- attr = NULL;
- if (_PyObject_GetMethod(owner, name, &attr)) {
- /* We can bypass temporary bound method object.
- meth is unbound method and obj is self.
-
- meth | self | arg1 | ... | argN
- */
- assert(attr != NULL); // No errors on this branch
- self_or_null = owner; // Transfer ownership
+ // _LOAD_ATTR
+ {
+ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
+ if (oparg & 1) {
+ /* Designed to work in tandem with CALL, pushes two values. */
+ attr = NULL;
+ if (_PyObject_GetMethod(owner, name, &attr)) {
+ /* We can bypass temporary bound method object.
+ meth is unbound method and obj is self.
+ meth | self | arg1 | ... | argN
+ */
+ assert(attr != NULL); // No errors on this branch
+ self_or_null = owner; // Transfer ownership
+ }
+ else {
+ /* meth is not an unbound method (but a regular attr, or
+ something was returned by a descriptor protocol). Set
+ the second element of the stack to NULL, to signal
+ CALL that it's not a method call.
+ NULL | meth | arg1 | ... | argN
+ */
+ Py_DECREF(owner);
+ if (attr == NULL) goto pop_1_error;
+ self_or_null = NULL;
+ }
}
else {
- /* meth is not an unbound method (but a regular attr, or
- something was returned by a descriptor protocol). Set
- the second element of the stack to NULL, to signal
- CALL that it's not a method call.
-
- NULL | meth | arg1 | ... | argN
- */
+ /* Classic, pushes one value. */
+ attr = PyObject_GetAttr(owner, name);
Py_DECREF(owner);
if (attr == NULL) goto pop_1_error;
- self_or_null = NULL;
}
}
- else {
- /* Classic, pushes one value. */
- attr = PyObject_GetAttr(owner, name);
- Py_DECREF(owner);
- if (attr == NULL) goto pop_1_error;
- }
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr;
if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = self_or_null; }
@@ -3076,27 +3142,35 @@
PyObject *right;
PyObject *left;
PyObject *res;
+ // _SPECIALIZE_COMPARE_OP
right = stack_pointer[-1];
left = stack_pointer[-2];
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- next_instr = this_instr;
- _Py_Specialize_CompareOp(left, right, next_instr, oparg);
- DISPATCH_SAME_OPARG();
+ {
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ next_instr = this_instr;
+ _Py_Specialize_CompareOp(left, right, next_instr, oparg);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(COMPARE_OP, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
}
- STAT_INC(COMPARE_OP, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- assert((oparg >> 5) <= Py_GE);
- res = PyObject_RichCompare(left, right, oparg >> 5);
- Py_DECREF(left);
- Py_DECREF(right);
- if (res == NULL) goto pop_2_error;
- if (oparg & 16) {
- int res_bool = PyObject_IsTrue(res);
- Py_DECREF(res);
- if (res_bool < 0) goto pop_2_error;
- res = res_bool ? Py_True : Py_False;
+ // _COMPARE_OP
+ {
+ assert((oparg >> 5) <= Py_GE);
+ res = PyObject_RichCompare(left, right, oparg >> 5);
+ Py_DECREF(left);
+ Py_DECREF(right);
+ if (res == NULL) goto pop_2_error;
+ if (oparg & 16) {
+ int res_bool = PyObject_IsTrue(res);
+ Py_DECREF(res);
+ if (res_bool < 0) goto pop_2_error;
+ res = res_bool ? Py_True : Py_False;
+ }
}
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3630,36 +3704,44 @@
static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size");
PyObject *iter;
PyObject *next;
+ // _SPECIALIZE_FOR_ITER
iter = stack_pointer[-1];
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- next_instr = this_instr;
- _Py_Specialize_ForIter(iter, next_instr, oparg);
- DISPATCH_SAME_OPARG();
+ {
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ next_instr = this_instr;
+ _Py_Specialize_ForIter(iter, next_instr, oparg);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(FOR_ITER, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
}
- STAT_INC(FOR_ITER, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
- next = (*Py_TYPE(iter)->tp_iternext)(iter);
- if (next == NULL) {
- if (_PyErr_Occurred(tstate)) {
- if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
- goto error;
+ // _FOR_ITER
+ {
+ /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
+ next = (*Py_TYPE(iter)->tp_iternext)(iter);
+ if (next == NULL) {
+ if (_PyErr_Occurred(tstate)) {
+ if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
+ goto error;
+ }
+ monitor_raise(tstate, frame, this_instr);
+ _PyErr_Clear(tstate);
}
- monitor_raise(tstate, frame, this_instr);
- _PyErr_Clear(tstate);
+ /* iterator ended normally */
+ assert(next_instr[oparg].op.code == END_FOR ||
+ next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
+ Py_DECREF(iter);
+ STACK_SHRINK(1);
+ /* Jump forward oparg, then skip following END_FOR instruction */
+ JUMPBY(oparg + 1);
+ DISPATCH();
}
- /* iterator ended normally */
- assert(next_instr[oparg].op.code == END_FOR ||
- next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
- Py_DECREF(iter);
- STACK_SHRINK(1);
- /* Jump forward oparg, then skip following END_FOR instruction */
- JUMPBY(oparg + 1);
- DISPATCH();
+ // Common case: no jump, leave it to the code generator
}
- // Common case: no jump, leave it to the code generator
STACK_GROW(1);
stack_pointer[-1] = next;
DISPATCH();
@@ -4211,84 +4293,92 @@
PyObject *self_or_null;
PyObject *callable;
PyObject *res;
+ // _SPECIALIZE_CALL
args = stack_pointer - oparg;
self_or_null = stack_pointer[-1 - oparg];
callable = stack_pointer[-2 - oparg];
- // oparg counts all of the args, but *not* self:
- int total_args = oparg;
- if (self_or_null != NULL) {
- args--;
- total_args++;
- }
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- next_instr = this_instr;
- _Py_Specialize_Call(callable, next_instr, total_args);
- DISPATCH_SAME_OPARG();
- }
- STAT_INC(CALL, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- if (self_or_null == NULL && Py_TYPE(callable) == &PyMethod_Type) {
- args--;
- total_args++;
- PyObject *self = ((PyMethodObject *)callable)->im_self;
- args[0] = Py_NewRef(self);
- PyObject *method = ((PyMethodObject *)callable)->im_func;
- args[-1] = Py_NewRef(method);
- Py_DECREF(callable);
- callable = method;
- }
- // Check if the call can be inlined or not
- if (Py_TYPE(callable) == &PyFunction_Type &&
- tstate->interp->eval_frame == NULL &&
- ((PyFunctionObject *)callable)->vectorcall == _PyFunction_Vectorcall)
{
- int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable))->co_flags;
- PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable));
- _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit(
- tstate, (PyFunctionObject *)callable, locals,
- args, total_args, NULL
- );
- // Manipulate stack directly since we leave using DISPATCH_INLINED().
- STACK_SHRINK(oparg + 2);
- // The frame has stolen all the arguments from the stack,
- // so there is no need to clean them up.
- if (new_frame == NULL) {
- goto error;
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ next_instr = this_instr;
+ _Py_Specialize_Call(callable, next_instr, oparg + (self_or_null != NULL));
+ DISPATCH_SAME_OPARG();
}
- assert(1 + INLINE_CACHE_ENTRIES_CALL == next_instr - this_instr);
- frame->return_offset = 1 + INLINE_CACHE_ENTRIES_CALL;
- DISPATCH_INLINED(new_frame);
+ STAT_INC(CALL, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
}
- /* Callable is not a normal Python function */
- res = PyObject_Vectorcall(
- callable, args,
- total_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
- NULL);
- if (opcode == INSTRUMENTED_CALL) {
- PyObject *arg = total_args == 0 ?
- &_PyInstrumentation_MISSING : args[0];
- if (res == NULL) {
- _Py_call_instrumentation_exc2(
- tstate, PY_MONITORING_EVENT_C_RAISE,
- frame, this_instr, callable, arg);
+ // _CALL
+ {
+ // oparg counts all of the args, but *not* self:
+ int total_args = oparg;
+ if (self_or_null != NULL) {
+ args--;
+ total_args++;
}
- else {
- int err = _Py_call_instrumentation_2args(
- tstate, PY_MONITORING_EVENT_C_RETURN,
- frame, this_instr, callable, arg);
- if (err < 0) {
- Py_CLEAR(res);
+ else if (Py_TYPE(callable) == &PyMethod_Type) {
+ args--;
+ total_args++;
+ PyObject *self = ((PyMethodObject *)callable)->im_self;
+ args[0] = Py_NewRef(self);
+ PyObject *method = ((PyMethodObject *)callable)->im_func;
+ args[-1] = Py_NewRef(method);
+ Py_DECREF(callable);
+ callable = method;
+ }
+ // Check if the call can be inlined or not
+ if (Py_TYPE(callable) == &PyFunction_Type &&
+ tstate->interp->eval_frame == NULL &&
+ ((PyFunctionObject *)callable)->vectorcall == _PyFunction_Vectorcall)
+ {
+ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable))->co_flags;
+ PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable));
+ _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit(
+ tstate, (PyFunctionObject *)callable, locals,
+ args, total_args, NULL
+ );
+ // Manipulate stack directly since we leave using DISPATCH_INLINED().
+ STACK_SHRINK(oparg + 2);
+ // The frame has stolen all the arguments from the stack,
+ // so there is no need to clean them up.
+ if (new_frame == NULL) {
+ goto error;
}
+ assert(1 + INLINE_CACHE_ENTRIES_CALL == next_instr - this_instr);
+ frame->return_offset = 1 + INLINE_CACHE_ENTRIES_CALL;
+ DISPATCH_INLINED(new_frame);
}
+ /* Callable is not a normal Python function */
+ res = PyObject_Vectorcall(
+ callable, args,
+ total_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
+ NULL);
+ if (opcode == INSTRUMENTED_CALL) {
+ PyObject *arg = total_args == 0 ?
+ &_PyInstrumentation_MISSING : args[0];
+ if (res == NULL) {
+ _Py_call_instrumentation_exc2(
+ tstate, PY_MONITORING_EVENT_C_RAISE,
+ frame, this_instr, callable, arg);
+ }
+ else {
+ int err = _Py_call_instrumentation_2args(
+ tstate, PY_MONITORING_EVENT_C_RETURN,
+ frame, this_instr, callable, arg);
+ if (err < 0) {
+ Py_CLEAR(res);
+ }
+ }
+ }
+ assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
+ Py_DECREF(callable);
+ for (int i = 0; i < total_args; i++) {
+ Py_DECREF(args[i]);
+ }
+ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
}
- assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
- Py_DECREF(callable);
- for (int i = 0; i < total_args; i++) {
- Py_DECREF(args[i]);
- }
- if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -5505,24 +5595,32 @@
PyObject *rhs;
PyObject *lhs;
PyObject *res;
+ // _SPECIALIZE_BINARY_OP
rhs = stack_pointer[-1];
lhs = stack_pointer[-2];
- #if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
- next_instr = this_instr;
- _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY);
- DISPATCH_SAME_OPARG();
+ {
+ uint16_t counter = read_u16(&this_instr[1].cache);
+ TIER_ONE_ONLY
+ #if ENABLE_SPECIALIZATION
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ next_instr = this_instr;
+ _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(BINARY_OP, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
+ #endif /* ENABLE_SPECIALIZATION */
+ assert(NB_ADD <= oparg);
+ assert(oparg <= NB_INPLACE_XOR);
+ }
+ // _BINARY_OP
+ {
+ assert(_PyEval_BinaryOps[oparg]);
+ res = _PyEval_BinaryOps[oparg](lhs, rhs);
+ Py_DECREF(lhs);
+ Py_DECREF(rhs);
+ if (res == NULL) goto pop_2_error;
}
- STAT_INC(BINARY_OP, deferred);
- DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
- #endif /* ENABLE_SPECIALIZATION */
- assert(NB_ADD <= oparg);
- assert(oparg <= NB_INPLACE_XOR);
- assert(_PyEval_BinaryOps[oparg]);
- res = _PyEval_BinaryOps[oparg](lhs, rhs);
- Py_DECREF(lhs);
- Py_DECREF(rhs);
- if (res == NULL) goto pop_2_error;
STACK_SHRINK(1);
stack_pointer[-1] = res;
DISPATCH();