diff options
Diffstat (limited to 'Python/specialize.c')
-rw-r--r-- | Python/specialize.c | 172 |
1 files changed, 86 insertions, 86 deletions
diff --git a/Python/specialize.c b/Python/specialize.c index 4ede312..c9555f8 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -282,7 +282,7 @@ _PyCode_Quicken(PyCodeObject *code) _Py_CODEUNIT *instructions = _PyCode_CODE(code); for (int i = 0; i < Py_SIZE(code); i++) { int previous_opcode = opcode; - opcode = _PyOpcode_Deopt[_Py_OPCODE(instructions[i])]; + opcode = _PyOpcode_Deopt[instructions[i].op.code]; int caches = _PyOpcode_Caches[opcode]; if (caches) { instructions[i + 1].cache = adaptive_counter_warmup(); @@ -291,31 +291,31 @@ _PyCode_Quicken(PyCodeObject *code) } switch (previous_opcode << 8 | opcode) { case LOAD_CONST << 8 | LOAD_FAST: - instructions[i - 1].opcode = LOAD_CONST__LOAD_FAST; + instructions[i - 1].op.code = LOAD_CONST__LOAD_FAST; break; case LOAD_FAST << 8 | LOAD_CONST: - instructions[i - 1].opcode = LOAD_FAST__LOAD_CONST; + instructions[i - 1].op.code = LOAD_FAST__LOAD_CONST; break; case LOAD_FAST << 8 | LOAD_FAST: - instructions[i - 1].opcode = LOAD_FAST__LOAD_FAST; + instructions[i - 1].op.code = LOAD_FAST__LOAD_FAST; break; case STORE_FAST << 8 | LOAD_FAST: - instructions[i - 1].opcode = STORE_FAST__LOAD_FAST; + instructions[i - 1].op.code = STORE_FAST__LOAD_FAST; break; case STORE_FAST << 8 | STORE_FAST: - instructions[i - 1].opcode = STORE_FAST__STORE_FAST; + instructions[i - 1].op.code = STORE_FAST__STORE_FAST; break; case COMPARE_OP << 8 | POP_JUMP_IF_TRUE: case COMPARE_OP << 8 | POP_JUMP_IF_FALSE: { - int oparg = instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].oparg; + int oparg = instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].op.arg; assert((oparg >> 4) <= Py_GE); int mask = compare_masks[oparg >> 4]; if (opcode == POP_JUMP_IF_FALSE) { mask = mask ^ 0xf; } - instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].opcode = COMPARE_AND_BRANCH; - instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].oparg = (oparg & 0xf0) | mask; + instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].op.code = COMPARE_AND_BRANCH; + instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].op.arg = (oparg & 0xf0) | mask; break; } } @@ -519,7 +519,7 @@ specialize_module_load_attr( } write_u32(cache->version, keys_version); cache->index = (uint16_t)index; - _py_set_opcode(instr, LOAD_ATTR_MODULE); + instr->op.code = LOAD_ATTR_MODULE; return 0; } @@ -674,7 +674,7 @@ specialize_dict_access( } write_u32(cache->version, type->tp_version_tag); cache->index = (uint16_t)index; - _py_set_opcode(instr, values_op); + instr->op.code = values_op; } else { PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); @@ -694,7 +694,7 @@ specialize_dict_access( } cache->index = (uint16_t)index; write_u32(cache->version, type->tp_version_tag); - _py_set_opcode(instr, hint_op); + instr->op.code = hint_op; } return 1; } @@ -739,7 +739,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) goto fail; case METHOD: { - int oparg = _Py_OPARG(*instr); + int oparg = instr->op.arg; if (oparg & 1) { if (specialize_attr_loadmethod(owner, instr, name, descr, kind)) { goto success; @@ -775,7 +775,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) write_u32(lm_cache->type_version, type->tp_version_tag); /* borrowed */ write_obj(lm_cache->descr, fget); - _py_set_opcode(instr, LOAD_ATTR_PROPERTY); + instr->op.code = LOAD_ATTR_PROPERTY; goto success; } case OBJECT_SLOT: @@ -799,7 +799,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) assert(offset > 0); cache->index = (uint16_t)offset; write_u32(cache->version, type->tp_version_tag); - _py_set_opcode(instr, LOAD_ATTR_SLOT); + instr->op.code = LOAD_ATTR_SLOT; goto success; } case DUNDER_CLASS: @@ -808,7 +808,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) assert(offset == (uint16_t)offset); cache->index = (uint16_t)offset; write_u32(cache->version, type->tp_version_tag); - _py_set_opcode(instr, LOAD_ATTR_SLOT); + instr->op.code = LOAD_ATTR_SLOT; goto success; } case OTHER_SLOT: @@ -836,7 +836,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) /* borrowed */ write_obj(lm_cache->descr, descr); write_u32(lm_cache->type_version, type->tp_version_tag); - _py_set_opcode(instr, LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN); + instr->op.code = LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN; goto success; } case BUILTIN_CLASSMETHOD: @@ -867,7 +867,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) fail: STAT_INC(LOAD_ATTR, failure); assert(!PyErr_Occurred()); - _py_set_opcode(instr, LOAD_ATTR); + instr->op.code = LOAD_ATTR; cache->counter = adaptive_counter_backoff(cache->counter); return; success: @@ -927,7 +927,7 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) assert(offset > 0); cache->index = (uint16_t)offset; write_u32(cache->version, type->tp_version_tag); - _py_set_opcode(instr, STORE_ATTR_SLOT); + instr->op.code = STORE_ATTR_SLOT; goto success; } case DUNDER_CLASS: @@ -963,7 +963,7 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) fail: STAT_INC(STORE_ATTR, failure); assert(!PyErr_Occurred()); - _py_set_opcode(instr, STORE_ATTR); + instr->op.code = STORE_ATTR; cache->counter = adaptive_counter_backoff(cache->counter); return; success: @@ -1027,7 +1027,7 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr, case NON_DESCRIPTOR: write_u32(cache->type_version, ((PyTypeObject *)owner)->tp_version_tag); write_obj(cache->descr, descr); - _py_set_opcode(instr, LOAD_ATTR_CLASS); + instr->op.code = LOAD_ATTR_CLASS; return 0; #ifdef Py_STATS case ABSENT: @@ -1069,7 +1069,7 @@ PyObject *descr, DescriptorClassification kind) return 0; } write_u32(cache->keys_version, keys_version); - _py_set_opcode(instr, LOAD_ATTR_METHOD_WITH_VALUES); + instr->op.code = LOAD_ATTR_METHOD_WITH_VALUES; } else { Py_ssize_t dictoffset = owner_cls->tp_dictoffset; @@ -1078,7 +1078,7 @@ PyObject *descr, DescriptorClassification kind) return 0; } if (dictoffset == 0) { - _py_set_opcode(instr, LOAD_ATTR_METHOD_NO_DICT); + instr->op.code = LOAD_ATTR_METHOD_NO_DICT; } else { PyObject *dict = *(PyObject **) ((char *)owner + dictoffset); @@ -1088,7 +1088,7 @@ PyObject *descr, DescriptorClassification kind) } assert(owner_cls->tp_dictoffset > 0); assert(owner_cls->tp_dictoffset <= INT16_MAX); - _py_set_opcode(instr, LOAD_ATTR_METHOD_LAZY_DICT); + instr->op.code = LOAD_ATTR_METHOD_LAZY_DICT; } } /* `descr` is borrowed. This is safe for methods (even inherited ones from @@ -1146,7 +1146,7 @@ _Py_Specialize_LoadGlobal( } cache->index = (uint16_t)index; write_u32(cache->module_keys_version, keys_version); - _py_set_opcode(instr, LOAD_GLOBAL_MODULE); + instr->op.code = LOAD_GLOBAL_MODULE; goto success; } if (!PyDict_CheckExact(builtins)) { @@ -1184,12 +1184,12 @@ _Py_Specialize_LoadGlobal( cache->index = (uint16_t)index; write_u32(cache->module_keys_version, globals_version); cache->builtin_keys_version = (uint16_t)builtins_version; - _py_set_opcode(instr, LOAD_GLOBAL_BUILTIN); + instr->op.code = LOAD_GLOBAL_BUILTIN; goto success; fail: STAT_INC(LOAD_GLOBAL, failure); assert(!PyErr_Occurred()); - _py_set_opcode(instr, LOAD_GLOBAL); + instr->op.code = LOAD_GLOBAL; cache->counter = adaptive_counter_backoff(cache->counter); return; success: @@ -1295,7 +1295,7 @@ _Py_Specialize_BinarySubscr( if (container_type == &PyList_Type) { if (PyLong_CheckExact(sub)) { if (Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1) { - _py_set_opcode(instr, BINARY_SUBSCR_LIST_INT); + instr->op.code = BINARY_SUBSCR_LIST_INT; goto success; } SPECIALIZATION_FAIL(BINARY_SUBSCR, SPEC_FAIL_OUT_OF_RANGE); @@ -1308,7 +1308,7 @@ _Py_Specialize_BinarySubscr( if (container_type == &PyTuple_Type) { if (PyLong_CheckExact(sub)) { if (Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1) { - _py_set_opcode(instr, BINARY_SUBSCR_TUPLE_INT); + instr->op.code = BINARY_SUBSCR_TUPLE_INT; goto success; } SPECIALIZATION_FAIL(BINARY_SUBSCR, SPEC_FAIL_OUT_OF_RANGE); @@ -1319,7 +1319,7 @@ _Py_Specialize_BinarySubscr( goto fail; } if (container_type == &PyDict_Type) { - _py_set_opcode(instr, BINARY_SUBSCR_DICT); + instr->op.code = BINARY_SUBSCR_DICT; goto success; } PyTypeObject *cls = Py_TYPE(container); @@ -1350,7 +1350,7 @@ _Py_Specialize_BinarySubscr( } cache->func_version = version; ((PyHeapTypeObject *)container_type)->_spec_cache.getitem = descriptor; - _py_set_opcode(instr, BINARY_SUBSCR_GETITEM); + instr->op.code = BINARY_SUBSCR_GETITEM; goto success; } SPECIALIZATION_FAIL(BINARY_SUBSCR, @@ -1358,7 +1358,7 @@ _Py_Specialize_BinarySubscr( fail: STAT_INC(BINARY_SUBSCR, failure); assert(!PyErr_Occurred()); - _py_set_opcode(instr, BINARY_SUBSCR); + instr->op.code = BINARY_SUBSCR; cache->counter = adaptive_counter_backoff(cache->counter); return; success: @@ -1378,7 +1378,7 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins if ((Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1) && ((PyLongObject *)sub)->long_value.ob_digit[0] < (size_t)PyList_GET_SIZE(container)) { - _py_set_opcode(instr, STORE_SUBSCR_LIST_INT); + instr->op.code = STORE_SUBSCR_LIST_INT; goto success; } else { @@ -1396,8 +1396,8 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins } } if (container_type == &PyDict_Type) { - _py_set_opcode(instr, STORE_SUBSCR_DICT); - goto success; + instr->op.code = STORE_SUBSCR_DICT; + goto success; } #ifdef Py_STATS PyMappingMethods *as_mapping = container_type->tp_as_mapping; @@ -1463,7 +1463,7 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins fail: STAT_INC(STORE_SUBSCR, failure); assert(!PyErr_Occurred()); - _py_set_opcode(instr, STORE_SUBSCR); + instr->op.code = STORE_SUBSCR; cache->counter = adaptive_counter_backoff(cache->counter); return; success: @@ -1482,23 +1482,23 @@ specialize_class_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, return -1; } if (tp->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) { - int oparg = _Py_OPARG(*instr); + int oparg = instr->op.arg; if (nargs == 1 && kwnames == NULL && oparg == 1) { if (tp == &PyUnicode_Type) { - _py_set_opcode(instr, CALL_NO_KW_STR_1); + instr->op.code = CALL_NO_KW_STR_1; return 0; } else if (tp == &PyType_Type) { - _py_set_opcode(instr, CALL_NO_KW_TYPE_1); + instr->op.code = CALL_NO_KW_TYPE_1; return 0; } else if (tp == &PyTuple_Type) { - _py_set_opcode(instr, CALL_NO_KW_TUPLE_1); + instr->op.code = CALL_NO_KW_TUPLE_1; return 0; } } if (tp->tp_vectorcall != NULL) { - _py_set_opcode(instr, CALL_BUILTIN_CLASS); + instr->op.code = CALL_BUILTIN_CLASS; return 0; } SPECIALIZATION_FAIL(CALL, tp == &PyUnicode_Type ? @@ -1573,7 +1573,7 @@ specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr, SPECIALIZATION_FAIL(CALL, SPEC_FAIL_WRONG_NUMBER_ARGUMENTS); return -1; } - _py_set_opcode(instr, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS); + instr->op.code = CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS; return 0; } case METH_O: { @@ -1584,21 +1584,21 @@ specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr, PyInterpreterState *interp = _PyInterpreterState_GET(); PyObject *list_append = interp->callable_cache.list_append; _Py_CODEUNIT next = instr[INLINE_CACHE_ENTRIES_CALL + 1]; - bool pop = (_Py_OPCODE(next) == POP_TOP); - int oparg = _Py_OPARG(*instr); + bool pop = (next.op.code == POP_TOP); + int oparg = instr->op.arg; if ((PyObject *)descr == list_append && oparg == 1 && pop) { - _py_set_opcode(instr, CALL_NO_KW_LIST_APPEND); + instr->op.code = CALL_NO_KW_LIST_APPEND; return 0; } - _py_set_opcode(instr, CALL_NO_KW_METHOD_DESCRIPTOR_O); + instr->op.code = CALL_NO_KW_METHOD_DESCRIPTOR_O; return 0; } case METH_FASTCALL: { - _py_set_opcode(instr, CALL_NO_KW_METHOD_DESCRIPTOR_FAST); + instr->op.code = CALL_NO_KW_METHOD_DESCRIPTOR_FAST; return 0; } case METH_FASTCALL | METH_KEYWORDS: { - _py_set_opcode(instr, CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS); + instr->op.code = CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS; return 0; } } @@ -1649,14 +1649,14 @@ specialize_py_call(PyFunctionObject *func, _Py_CODEUNIT *instr, int nargs, write_u32(cache->func_version, version); cache->min_args = min_args; if (argcount == nargs) { - _py_set_opcode(instr, bound_method ? CALL_BOUND_METHOD_EXACT_ARGS : CALL_PY_EXACT_ARGS); + instr->op.code = bound_method ? CALL_BOUND_METHOD_EXACT_ARGS : CALL_PY_EXACT_ARGS; } else if (bound_method) { SPECIALIZATION_FAIL(CALL, SPEC_FAIL_CALL_BOUND_METHOD); return -1; } else { - _py_set_opcode(instr, CALL_PY_WITH_DEFAULTS); + instr->op.code = CALL_PY_WITH_DEFAULTS; } return 0; } @@ -1683,10 +1683,10 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, /* len(o) */ PyInterpreterState *interp = _PyInterpreterState_GET(); if (callable == interp->callable_cache.len) { - _py_set_opcode(instr, CALL_NO_KW_LEN); + instr->op.code = CALL_NO_KW_LEN; return 0; } - _py_set_opcode(instr, CALL_NO_KW_BUILTIN_O); + instr->op.code = CALL_NO_KW_BUILTIN_O; return 0; } case METH_FASTCALL: { @@ -1698,15 +1698,15 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, /* isinstance(o1, o2) */ PyInterpreterState *interp = _PyInterpreterState_GET(); if (callable == interp->callable_cache.isinstance) { - _py_set_opcode(instr, CALL_NO_KW_ISINSTANCE); + instr->op.code = CALL_NO_KW_ISINSTANCE; return 0; } } - _py_set_opcode(instr, CALL_NO_KW_BUILTIN_FAST); + instr->op.code = CALL_NO_KW_BUILTIN_FAST; return 0; } case METH_FASTCALL | METH_KEYWORDS: { - _py_set_opcode(instr, CALL_BUILTIN_FAST_WITH_KEYWORDS); + instr->op.code = CALL_BUILTIN_FAST_WITH_KEYWORDS; return 0; } default: @@ -1785,7 +1785,7 @@ _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, if (fail) { STAT_INC(CALL, failure); assert(!PyErr_Occurred()); - _py_set_opcode(instr, CALL); + instr->op.code = CALL; cache->counter = adaptive_counter_backoff(cache->counter); } else { @@ -1880,21 +1880,21 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, } if (PyUnicode_CheckExact(lhs)) { _Py_CODEUNIT next = instr[INLINE_CACHE_ENTRIES_BINARY_OP + 1]; - bool to_store = (_Py_OPCODE(next) == STORE_FAST || - _Py_OPCODE(next) == STORE_FAST__LOAD_FAST); - if (to_store && locals[_Py_OPARG(next)] == lhs) { - _py_set_opcode(instr, BINARY_OP_INPLACE_ADD_UNICODE); + bool to_store = (next.op.code == STORE_FAST || + next.op.code == STORE_FAST__LOAD_FAST); + if (to_store && locals[next.op.arg] == lhs) { + instr->op.code = BINARY_OP_INPLACE_ADD_UNICODE; goto success; } - _py_set_opcode(instr, BINARY_OP_ADD_UNICODE); + instr->op.code = BINARY_OP_ADD_UNICODE; goto success; } if (PyLong_CheckExact(lhs)) { - _py_set_opcode(instr, BINARY_OP_ADD_INT); + instr->op.code = BINARY_OP_ADD_INT; goto success; } if (PyFloat_CheckExact(lhs)) { - _py_set_opcode(instr, BINARY_OP_ADD_FLOAT); + instr->op.code = BINARY_OP_ADD_FLOAT; goto success; } break; @@ -1904,11 +1904,11 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, break; } if (PyLong_CheckExact(lhs)) { - _py_set_opcode(instr, BINARY_OP_MULTIPLY_INT); + instr->op.code = BINARY_OP_MULTIPLY_INT; goto success; } if (PyFloat_CheckExact(lhs)) { - _py_set_opcode(instr, BINARY_OP_MULTIPLY_FLOAT); + instr->op.code = BINARY_OP_MULTIPLY_FLOAT; goto success; } break; @@ -1918,18 +1918,18 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, break; } if (PyLong_CheckExact(lhs)) { - _py_set_opcode(instr, BINARY_OP_SUBTRACT_INT); + instr->op.code = BINARY_OP_SUBTRACT_INT; goto success; } if (PyFloat_CheckExact(lhs)) { - _py_set_opcode(instr, BINARY_OP_SUBTRACT_FLOAT); + instr->op.code = BINARY_OP_SUBTRACT_FLOAT; goto success; } break; } SPECIALIZATION_FAIL(BINARY_OP, binary_op_fail_kind(oparg, lhs, rhs)); STAT_INC(BINARY_OP, failure); - _py_set_opcode(instr, BINARY_OP); + instr->op.code = BINARY_OP; cache->counter = adaptive_counter_backoff(cache->counter); return; success: @@ -1981,7 +1981,7 @@ _Py_Specialize_CompareAndBranch(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *inst assert(_PyOpcode_Caches[COMPARE_AND_BRANCH] == INLINE_CACHE_ENTRIES_COMPARE_OP); _PyCompareOpCache *cache = (_PyCompareOpCache *)(instr + 1); #ifndef NDEBUG - int next_opcode = _Py_OPCODE(instr[INLINE_CACHE_ENTRIES_COMPARE_OP + 1]); + int next_opcode = instr[INLINE_CACHE_ENTRIES_COMPARE_OP + 1].op.code; assert(next_opcode == POP_JUMP_IF_FALSE || next_opcode == POP_JUMP_IF_TRUE); #endif if (Py_TYPE(lhs) != Py_TYPE(rhs)) { @@ -1989,12 +1989,12 @@ _Py_Specialize_CompareAndBranch(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *inst goto failure; } if (PyFloat_CheckExact(lhs)) { - _py_set_opcode(instr, COMPARE_AND_BRANCH_FLOAT); + instr->op.code = COMPARE_AND_BRANCH_FLOAT; goto success; } if (PyLong_CheckExact(lhs)) { if (Py_ABS(Py_SIZE(lhs)) <= 1 && Py_ABS(Py_SIZE(rhs)) <= 1) { - _py_set_opcode(instr, COMPARE_AND_BRANCH_INT); + instr->op.code = COMPARE_AND_BRANCH_INT; goto success; } else { @@ -2009,14 +2009,14 @@ _Py_Specialize_CompareAndBranch(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *inst goto failure; } else { - _py_set_opcode(instr, COMPARE_AND_BRANCH_STR); + instr->op.code = COMPARE_AND_BRANCH_STR; goto success; } } SPECIALIZATION_FAIL(COMPARE_AND_BRANCH, compare_op_fail_kind(lhs, rhs)); failure: STAT_INC(COMPARE_AND_BRANCH, failure); - _py_set_opcode(instr, COMPARE_AND_BRANCH); + instr->op.code = COMPARE_AND_BRANCH; cache->counter = adaptive_counter_backoff(cache->counter); return; success: @@ -2051,10 +2051,10 @@ _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg) goto failure; } if (PyTuple_GET_SIZE(seq) == 2) { - _py_set_opcode(instr, UNPACK_SEQUENCE_TWO_TUPLE); + instr->op.code = UNPACK_SEQUENCE_TWO_TUPLE; goto success; } - _py_set_opcode(instr, UNPACK_SEQUENCE_TUPLE); + instr->op.code = UNPACK_SEQUENCE_TUPLE; goto success; } if (PyList_CheckExact(seq)) { @@ -2062,13 +2062,13 @@ _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg) SPECIALIZATION_FAIL(UNPACK_SEQUENCE, SPEC_FAIL_EXPECTED_ERROR); goto failure; } - _py_set_opcode(instr, UNPACK_SEQUENCE_LIST); + instr->op.code = UNPACK_SEQUENCE_LIST; goto success; } SPECIALIZATION_FAIL(UNPACK_SEQUENCE, unpack_sequence_fail_kind(seq)); failure: STAT_INC(UNPACK_SEQUENCE, failure); - _py_set_opcode(instr, UNPACK_SEQUENCE); + instr->op.code = UNPACK_SEQUENCE; cache->counter = adaptive_counter_backoff(cache->counter); return; success: @@ -2156,28 +2156,28 @@ _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg) _PyForIterCache *cache = (_PyForIterCache *)(instr + 1); PyTypeObject *tp = Py_TYPE(iter); _Py_CODEUNIT next = instr[1+INLINE_CACHE_ENTRIES_FOR_ITER]; - int next_op = _PyOpcode_Deopt[_Py_OPCODE(next)]; + int next_op = _PyOpcode_Deopt[next.op.code]; if (tp == &PyListIter_Type) { - _py_set_opcode(instr, FOR_ITER_LIST); + instr->op.code = FOR_ITER_LIST; goto success; } else if (tp == &PyTupleIter_Type) { - _py_set_opcode(instr, FOR_ITER_TUPLE); + instr->op.code = FOR_ITER_TUPLE; goto success; } else if (tp == &PyRangeIter_Type && next_op == STORE_FAST) { - _py_set_opcode(instr, FOR_ITER_RANGE); + instr->op.code = FOR_ITER_RANGE; goto success; } else if (tp == &PyGen_Type && oparg <= SHRT_MAX) { - assert(_Py_OPCODE(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1]) == END_FOR); - _py_set_opcode(instr, FOR_ITER_GEN); + assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR); + instr->op.code = FOR_ITER_GEN; goto success; } SPECIALIZATION_FAIL(FOR_ITER, _PySpecialization_ClassifyIterator(iter)); STAT_INC(FOR_ITER, failure); - _py_set_opcode(instr, FOR_ITER); + instr->op.code = FOR_ITER; cache->counter = adaptive_counter_backoff(cache->counter); return; success: @@ -2193,13 +2193,13 @@ _Py_Specialize_Send(PyObject *receiver, _Py_CODEUNIT *instr) _PySendCache *cache = (_PySendCache *)(instr + 1); PyTypeObject *tp = Py_TYPE(receiver); if (tp == &PyGen_Type || tp == &PyCoro_Type) { - _py_set_opcode(instr, SEND_GEN); + instr->op.code = SEND_GEN; goto success; } SPECIALIZATION_FAIL(SEND, _PySpecialization_ClassifyIterator(receiver)); STAT_INC(SEND, failure); - _py_set_opcode(instr, SEND); + instr->op.code = SEND; cache->counter = adaptive_counter_backoff(cache->counter); return; success: |