summaryrefslogtreecommitdiffstats
path: root/Python
diff options
context:
space:
mode:
authorSteve Dower <steve.dower@python.org>2023-02-20 14:56:48 (GMT)
committerGitHub <noreply@github.com>2023-02-20 14:56:48 (GMT)
commita99eb5cd9947629a6745a4ad99cb07af1c287b5d (patch)
tree2d2b82cef5aa1c514eda32eaca115e2723fd7f1c /Python
parentc00faf79438cc7f0d98af2679c695f747e4369a3 (diff)
downloadcpython-a99eb5cd9947629a6745a4ad99cb07af1c287b5d.zip
cpython-a99eb5cd9947629a6745a4ad99cb07af1c287b5d.tar.gz
cpython-a99eb5cd9947629a6745a4ad99cb07af1c287b5d.tar.bz2
gh-101907: Stop using `_Py_OPCODE` and `_Py_OPARG` macros (GH-101912)
* gh-101907: Removes use of non-standard C++ extension from Include/cpython/code.h * Make cases_generator correct on Windows
Diffstat (limited to 'Python')
-rw-r--r--Python/bytecodes.c34
-rw-r--r--Python/ceval.c12
-rw-r--r--Python/ceval_macros.h18
-rw-r--r--Python/compile.c20
-rw-r--r--Python/generated_cases.c.h34
-rw-r--r--Python/specialize.c172
6 files changed, 145 insertions, 145 deletions
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 84747f1..c5959f2 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -246,9 +246,9 @@ dummy_func(
DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
_Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP];
- assert(_Py_OPCODE(true_next) == STORE_FAST ||
- _Py_OPCODE(true_next) == STORE_FAST__LOAD_FAST);
- PyObject **target_local = &GETLOCAL(_Py_OPARG(true_next));
+ assert(true_next.op.code == STORE_FAST ||
+ true_next.op.code == STORE_FAST__LOAD_FAST);
+ PyObject **target_local = &GETLOCAL(true_next.op.arg);
DEOPT_IF(*target_local != left, BINARY_OP);
STAT_INC(BINARY_OP, hit);
/* Handle `left = left + right` or `left += right` for str.
@@ -1748,10 +1748,10 @@ dummy_func(
Py_DECREF(left);
Py_DECREF(right);
ERROR_IF(cond == NULL, error);
- assert(_Py_OPCODE(next_instr[1]) == POP_JUMP_IF_FALSE ||
- _Py_OPCODE(next_instr[1]) == POP_JUMP_IF_TRUE);
- bool jump_on_true = _Py_OPCODE(next_instr[1]) == POP_JUMP_IF_TRUE;
- int offset = _Py_OPARG(next_instr[1]);
+ assert(next_instr[1].op.code == POP_JUMP_IF_FALSE ||
+ next_instr[1].op.code == POP_JUMP_IF_TRUE);
+ bool jump_on_true = next_instr[1].op.code == POP_JUMP_IF_TRUE;
+ int offset = next_instr[1].op.arg;
int err = PyObject_IsTrue(cond);
Py_DECREF(cond);
if (err < 0) {
@@ -1774,7 +1774,7 @@ dummy_func(
_Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc);
_Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
if (sign_ish & oparg) {
- int offset = _Py_OPARG(next_instr[1]);
+ int offset = next_instr[1].op.arg;
JUMPBY(offset);
}
}
@@ -1795,7 +1795,7 @@ dummy_func(
_Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
_Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
if (sign_ish & oparg) {
- int offset = _Py_OPARG(next_instr[1]);
+ int offset = next_instr[1].op.arg;
JUMPBY(offset);
}
}
@@ -1814,7 +1814,7 @@ dummy_func(
assert((oparg & 0xf) == COMPARISON_NOT_EQUALS || (oparg & 0xf) == COMPARISON_EQUALS);
assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS);
if ((res + COMPARISON_NOT_EQUALS) & oparg) {
- int offset = _Py_OPARG(next_instr[1]);
+ int offset = next_instr[1].op.arg;
JUMPBY(offset);
}
}
@@ -2122,7 +2122,7 @@ dummy_func(
_PyErr_Clear(tstate);
}
/* iterator ended normally */
- assert(_Py_OPCODE(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg]) == END_FOR);
+ assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR);
Py_DECREF(iter);
STACK_SHRINK(1);
/* Jump forward oparg, then skip following END_FOR instruction */
@@ -2186,7 +2186,7 @@ dummy_func(
DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
_Py_CODEUNIT next = next_instr[INLINE_CACHE_ENTRIES_FOR_ITER];
- assert(_PyOpcode_Deopt[_Py_OPCODE(next)] == STORE_FAST);
+ assert(_PyOpcode_Deopt[next.op.code] == STORE_FAST);
if (r->len <= 0) {
STACK_SHRINK(1);
Py_DECREF(r);
@@ -2197,7 +2197,7 @@ dummy_func(
long value = r->start;
r->start = value + r->step;
r->len--;
- if (_PyLong_AssignValue(&GETLOCAL(_Py_OPARG(next)), value) < 0) {
+ if (_PyLong_AssignValue(&GETLOCAL(next.op.arg), value) < 0) {
goto error;
}
// The STORE_FAST is already done.
@@ -2220,7 +2220,7 @@ dummy_func(
gen->gi_exc_state.previous_item = tstate->exc_info;
tstate->exc_info = &gen->gi_exc_state;
JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg);
- assert(_Py_OPCODE(*next_instr) == END_FOR);
+ assert(next_instr->op.code == END_FOR);
DISPATCH_INLINED(gen_frame);
}
@@ -2809,7 +2809,7 @@ dummy_func(
STACK_SHRINK(3);
// CALL + POP_TOP
JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1);
- assert(_Py_OPCODE(next_instr[-1]) == POP_TOP);
+ assert(next_instr[-1].op.code == POP_TOP);
DISPATCH();
}
@@ -3118,8 +3118,8 @@ dummy_func(
inst(EXTENDED_ARG, (--)) {
assert(oparg);
assert(cframe.use_tracing == 0);
- opcode = _Py_OPCODE(*next_instr);
- oparg = oparg << 8 | _Py_OPARG(*next_instr);
+ opcode = next_instr->op.code;
+ oparg = oparg << 8 | next_instr->op.arg;
PRE_DISPATCH_GOTO();
DISPATCH_GOTO();
}
diff --git a/Python/ceval.c b/Python/ceval.c
index 308ef52..b85231a 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -132,8 +132,8 @@ lltrace_instruction(_PyInterpreterFrame *frame,
objects enters the interpreter recursively. It is also slow.
So you might want to comment it out. */
dump_stack(frame, stack_pointer);
- int oparg = _Py_OPARG(*next_instr);
- int opcode = _Py_OPCODE(*next_instr);
+ int oparg = next_instr->op.arg;
+ int opcode = next_instr->op.code;
const char *opname = _PyOpcode_OpName[opcode];
assert(opname != NULL);
int offset = (int)(next_instr - _PyCode_CODE(frame->f_code));
@@ -920,8 +920,8 @@ handle_eval_breaker:
// CPython hasn't ever traced the instruction after an EXTENDED_ARG.
// Inline the EXTENDED_ARG here, so we can avoid branching there:
INSTRUCTION_START(EXTENDED_ARG);
- opcode = _Py_OPCODE(*next_instr);
- oparg = oparg << 8 | _Py_OPARG(*next_instr);
+ opcode = next_instr->op.code;
+ oparg = oparg << 8 | next_instr->op.arg;
// Make sure the next instruction isn't a RESUME, since that needs
// to trace properly (and shouldn't have an EXTENDED_ARG, anyways):
assert(opcode != RESUME);
@@ -946,7 +946,7 @@ handle_eval_breaker:
#endif
/* Tell C compilers not to hold the opcode variable in the loop.
next_instr points the current instruction without TARGET(). */
- opcode = _Py_OPCODE(*next_instr);
+ opcode = next_instr->op.code;
_PyErr_Format(tstate, PyExc_SystemError,
"%U:%d: unknown opcode %d",
frame->f_code->co_filename,
@@ -2196,7 +2196,7 @@ maybe_call_line_trace(Py_tracefunc func, PyObject *obj,
(_PyInterpreterFrame_LASTI(frame) < instr_prev &&
// SEND has no quickened forms, so no need to use _PyOpcode_Deopt
// here:
- _Py_OPCODE(*frame->prev_instr) != SEND);
+ frame->prev_instr->op.code != SEND);
if (trace) {
result = call_trace(func, obj, tstate, frame, PyTrace_LINE, Py_None);
}
diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h
index 691bf8e..ac1fec7 100644
--- a/Python/ceval_macros.h
+++ b/Python/ceval_macros.h
@@ -100,7 +100,7 @@
#define DISPATCH_SAME_OPARG() \
{ \
- opcode = _Py_OPCODE(*next_instr); \
+ opcode = next_instr->op.code; \
PRE_DISPATCH_GOTO(); \
opcode |= cframe.use_tracing OR_DTRACE_LINE; \
DISPATCH_GOTO(); \
@@ -143,8 +143,8 @@ GETITEM(PyObject *v, Py_ssize_t i) {
#define INSTR_OFFSET() ((int)(next_instr - _PyCode_CODE(frame->f_code)))
#define NEXTOPARG() do { \
_Py_CODEUNIT word = *next_instr; \
- opcode = _Py_OPCODE(word); \
- oparg = _Py_OPARG(word); \
+ opcode = word.op.code; \
+ oparg = word.op.arg; \
} while (0)
#define JUMPTO(x) (next_instr = _PyCode_CODE(frame->f_code) + (x))
#define JUMPBY(x) (next_instr += (x))
@@ -180,14 +180,14 @@ GETITEM(PyObject *v, Py_ssize_t i) {
#if USE_COMPUTED_GOTOS
#define PREDICT(op) if (0) goto PREDICT_ID(op)
#else
-#define PREDICT(op) \
+#define PREDICT(next_op) \
do { \
_Py_CODEUNIT word = *next_instr; \
- opcode = _Py_OPCODE(word) | cframe.use_tracing OR_DTRACE_LINE; \
- if (opcode == op) { \
- oparg = _Py_OPARG(word); \
- INSTRUCTION_START(op); \
- goto PREDICT_ID(op); \
+ opcode = word.op.code | cframe.use_tracing OR_DTRACE_LINE; \
+ if (opcode == next_op) { \
+ oparg = word.op.arg; \
+ INSTRUCTION_START(next_op); \
+ goto PREDICT_ID(next_op); \
} \
} while(0)
#endif
diff --git a/Python/compile.c b/Python/compile.c
index c3b344c..3f620be 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -274,31 +274,31 @@ write_instr(_Py_CODEUNIT *codestr, struct instr *instruction, int ilen)
int caches = _PyOpcode_Caches[opcode];
switch (ilen - caches) {
case 4:
- codestr->opcode = EXTENDED_ARG;
- codestr->oparg = (oparg >> 24) & 0xFF;
+ codestr->op.code = EXTENDED_ARG;
+ codestr->op.arg = (oparg >> 24) & 0xFF;
codestr++;
/* fall through */
case 3:
- codestr->opcode = EXTENDED_ARG;
- codestr->oparg = (oparg >> 16) & 0xFF;
+ codestr->op.code = EXTENDED_ARG;
+ codestr->op.arg = (oparg >> 16) & 0xFF;
codestr++;
/* fall through */
case 2:
- codestr->opcode = EXTENDED_ARG;
- codestr->oparg = (oparg >> 8) & 0xFF;
+ codestr->op.code = EXTENDED_ARG;
+ codestr->op.arg = (oparg >> 8) & 0xFF;
codestr++;
/* fall through */
case 1:
- codestr->opcode = opcode;
- codestr->oparg = oparg & 0xFF;
+ codestr->op.code = opcode;
+ codestr->op.arg = oparg & 0xFF;
codestr++;
break;
default:
Py_UNREACHABLE();
}
while (caches--) {
- codestr->opcode = CACHE;
- codestr->oparg = 0;
+ codestr->op.code = CACHE;
+ codestr->op.arg = 0;
codestr++;
}
}
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 730dfb7..487e63d 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -339,9 +339,9 @@
DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
_Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP];
- assert(_Py_OPCODE(true_next) == STORE_FAST ||
- _Py_OPCODE(true_next) == STORE_FAST__LOAD_FAST);
- PyObject **target_local = &GETLOCAL(_Py_OPARG(true_next));
+ assert(true_next.op.code == STORE_FAST ||
+ true_next.op.code == STORE_FAST__LOAD_FAST);
+ PyObject **target_local = &GETLOCAL(true_next.op.arg);
DEOPT_IF(*target_local != left, BINARY_OP);
STAT_INC(BINARY_OP, hit);
/* Handle `left = left + right` or `left += right` for str.
@@ -2199,10 +2199,10 @@
Py_DECREF(left);
Py_DECREF(right);
if (cond == NULL) goto pop_2_error;
- assert(_Py_OPCODE(next_instr[1]) == POP_JUMP_IF_FALSE ||
- _Py_OPCODE(next_instr[1]) == POP_JUMP_IF_TRUE);
- bool jump_on_true = _Py_OPCODE(next_instr[1]) == POP_JUMP_IF_TRUE;
- int offset = _Py_OPARG(next_instr[1]);
+ assert(next_instr[1].op.code == POP_JUMP_IF_FALSE ||
+ next_instr[1].op.code == POP_JUMP_IF_TRUE);
+ bool jump_on_true = next_instr[1].op.code == POP_JUMP_IF_TRUE;
+ int offset = next_instr[1].op.arg;
int err = PyObject_IsTrue(cond);
Py_DECREF(cond);
if (err < 0) {
@@ -2230,7 +2230,7 @@
_Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc);
_Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
if (sign_ish & oparg) {
- int offset = _Py_OPARG(next_instr[1]);
+ int offset = next_instr[1].op.arg;
JUMPBY(offset);
}
STACK_SHRINK(2);
@@ -2255,7 +2255,7 @@
_Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
_Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
if (sign_ish & oparg) {
- int offset = _Py_OPARG(next_instr[1]);
+ int offset = next_instr[1].op.arg;
JUMPBY(offset);
}
STACK_SHRINK(2);
@@ -2278,7 +2278,7 @@
assert((oparg & 0xf) == COMPARISON_NOT_EQUALS || (oparg & 0xf) == COMPARISON_EQUALS);
assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS);
if ((res + COMPARISON_NOT_EQUALS) & oparg) {
- int offset = _Py_OPARG(next_instr[1]);
+ int offset = next_instr[1].op.arg;
JUMPBY(offset);
}
STACK_SHRINK(2);
@@ -2682,7 +2682,7 @@
_PyErr_Clear(tstate);
}
/* iterator ended normally */
- assert(_Py_OPCODE(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg]) == END_FOR);
+ assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR);
Py_DECREF(iter);
STACK_SHRINK(1);
/* Jump forward oparg, then skip following END_FOR instruction */
@@ -2761,7 +2761,7 @@
DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
_Py_CODEUNIT next = next_instr[INLINE_CACHE_ENTRIES_FOR_ITER];
- assert(_PyOpcode_Deopt[_Py_OPCODE(next)] == STORE_FAST);
+ assert(_PyOpcode_Deopt[next.op.code] == STORE_FAST);
if (r->len <= 0) {
STACK_SHRINK(1);
Py_DECREF(r);
@@ -2772,7 +2772,7 @@
long value = r->start;
r->start = value + r->step;
r->len--;
- if (_PyLong_AssignValue(&GETLOCAL(_Py_OPARG(next)), value) < 0) {
+ if (_PyLong_AssignValue(&GETLOCAL(next.op.arg), value) < 0) {
goto error;
}
// The STORE_FAST is already done.
@@ -2795,7 +2795,7 @@
gen->gi_exc_state.previous_item = tstate->exc_info;
tstate->exc_info = &gen->gi_exc_state;
JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg);
- assert(_Py_OPCODE(*next_instr) == END_FOR);
+ assert(next_instr->op.code == END_FOR);
DISPATCH_INLINED(gen_frame);
}
@@ -3516,7 +3516,7 @@
STACK_SHRINK(3);
// CALL + POP_TOP
JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1);
- assert(_Py_OPCODE(next_instr[-1]) == POP_TOP);
+ assert(next_instr[-1].op.code == POP_TOP);
DISPATCH();
}
@@ -3903,8 +3903,8 @@
TARGET(EXTENDED_ARG) {
assert(oparg);
assert(cframe.use_tracing == 0);
- opcode = _Py_OPCODE(*next_instr);
- oparg = oparg << 8 | _Py_OPARG(*next_instr);
+ opcode = next_instr->op.code;
+ oparg = oparg << 8 | next_instr->op.arg;
PRE_DISPATCH_GOTO();
DISPATCH_GOTO();
}
diff --git a/Python/specialize.c b/Python/specialize.c
index 4ede312..c9555f8 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -282,7 +282,7 @@ _PyCode_Quicken(PyCodeObject *code)
_Py_CODEUNIT *instructions = _PyCode_CODE(code);
for (int i = 0; i < Py_SIZE(code); i++) {
int previous_opcode = opcode;
- opcode = _PyOpcode_Deopt[_Py_OPCODE(instructions[i])];
+ opcode = _PyOpcode_Deopt[instructions[i].op.code];
int caches = _PyOpcode_Caches[opcode];
if (caches) {
instructions[i + 1].cache = adaptive_counter_warmup();
@@ -291,31 +291,31 @@ _PyCode_Quicken(PyCodeObject *code)
}
switch (previous_opcode << 8 | opcode) {
case LOAD_CONST << 8 | LOAD_FAST:
- instructions[i - 1].opcode = LOAD_CONST__LOAD_FAST;
+ instructions[i - 1].op.code = LOAD_CONST__LOAD_FAST;
break;
case LOAD_FAST << 8 | LOAD_CONST:
- instructions[i - 1].opcode = LOAD_FAST__LOAD_CONST;
+ instructions[i - 1].op.code = LOAD_FAST__LOAD_CONST;
break;
case LOAD_FAST << 8 | LOAD_FAST:
- instructions[i - 1].opcode = LOAD_FAST__LOAD_FAST;
+ instructions[i - 1].op.code = LOAD_FAST__LOAD_FAST;
break;
case STORE_FAST << 8 | LOAD_FAST:
- instructions[i - 1].opcode = STORE_FAST__LOAD_FAST;
+ instructions[i - 1].op.code = STORE_FAST__LOAD_FAST;
break;
case STORE_FAST << 8 | STORE_FAST:
- instructions[i - 1].opcode = STORE_FAST__STORE_FAST;
+ instructions[i - 1].op.code = STORE_FAST__STORE_FAST;
break;
case COMPARE_OP << 8 | POP_JUMP_IF_TRUE:
case COMPARE_OP << 8 | POP_JUMP_IF_FALSE:
{
- int oparg = instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].oparg;
+ int oparg = instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].op.arg;
assert((oparg >> 4) <= Py_GE);
int mask = compare_masks[oparg >> 4];
if (opcode == POP_JUMP_IF_FALSE) {
mask = mask ^ 0xf;
}
- instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].opcode = COMPARE_AND_BRANCH;
- instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].oparg = (oparg & 0xf0) | mask;
+ instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].op.code = COMPARE_AND_BRANCH;
+ instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].op.arg = (oparg & 0xf0) | mask;
break;
}
}
@@ -519,7 +519,7 @@ specialize_module_load_attr(
}
write_u32(cache->version, keys_version);
cache->index = (uint16_t)index;
- _py_set_opcode(instr, LOAD_ATTR_MODULE);
+ instr->op.code = LOAD_ATTR_MODULE;
return 0;
}
@@ -674,7 +674,7 @@ specialize_dict_access(
}
write_u32(cache->version, type->tp_version_tag);
cache->index = (uint16_t)index;
- _py_set_opcode(instr, values_op);
+ instr->op.code = values_op;
}
else {
PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv);
@@ -694,7 +694,7 @@ specialize_dict_access(
}
cache->index = (uint16_t)index;
write_u32(cache->version, type->tp_version_tag);
- _py_set_opcode(instr, hint_op);
+ instr->op.code = hint_op;
}
return 1;
}
@@ -739,7 +739,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
goto fail;
case METHOD:
{
- int oparg = _Py_OPARG(*instr);
+ int oparg = instr->op.arg;
if (oparg & 1) {
if (specialize_attr_loadmethod(owner, instr, name, descr, kind)) {
goto success;
@@ -775,7 +775,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
write_u32(lm_cache->type_version, type->tp_version_tag);
/* borrowed */
write_obj(lm_cache->descr, fget);
- _py_set_opcode(instr, LOAD_ATTR_PROPERTY);
+ instr->op.code = LOAD_ATTR_PROPERTY;
goto success;
}
case OBJECT_SLOT:
@@ -799,7 +799,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
assert(offset > 0);
cache->index = (uint16_t)offset;
write_u32(cache->version, type->tp_version_tag);
- _py_set_opcode(instr, LOAD_ATTR_SLOT);
+ instr->op.code = LOAD_ATTR_SLOT;
goto success;
}
case DUNDER_CLASS:
@@ -808,7 +808,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
assert(offset == (uint16_t)offset);
cache->index = (uint16_t)offset;
write_u32(cache->version, type->tp_version_tag);
- _py_set_opcode(instr, LOAD_ATTR_SLOT);
+ instr->op.code = LOAD_ATTR_SLOT;
goto success;
}
case OTHER_SLOT:
@@ -836,7 +836,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
/* borrowed */
write_obj(lm_cache->descr, descr);
write_u32(lm_cache->type_version, type->tp_version_tag);
- _py_set_opcode(instr, LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN);
+ instr->op.code = LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN;
goto success;
}
case BUILTIN_CLASSMETHOD:
@@ -867,7 +867,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
fail:
STAT_INC(LOAD_ATTR, failure);
assert(!PyErr_Occurred());
- _py_set_opcode(instr, LOAD_ATTR);
+ instr->op.code = LOAD_ATTR;
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -927,7 +927,7 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
assert(offset > 0);
cache->index = (uint16_t)offset;
write_u32(cache->version, type->tp_version_tag);
- _py_set_opcode(instr, STORE_ATTR_SLOT);
+ instr->op.code = STORE_ATTR_SLOT;
goto success;
}
case DUNDER_CLASS:
@@ -963,7 +963,7 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
fail:
STAT_INC(STORE_ATTR, failure);
assert(!PyErr_Occurred());
- _py_set_opcode(instr, STORE_ATTR);
+ instr->op.code = STORE_ATTR;
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -1027,7 +1027,7 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr,
case NON_DESCRIPTOR:
write_u32(cache->type_version, ((PyTypeObject *)owner)->tp_version_tag);
write_obj(cache->descr, descr);
- _py_set_opcode(instr, LOAD_ATTR_CLASS);
+ instr->op.code = LOAD_ATTR_CLASS;
return 0;
#ifdef Py_STATS
case ABSENT:
@@ -1069,7 +1069,7 @@ PyObject *descr, DescriptorClassification kind)
return 0;
}
write_u32(cache->keys_version, keys_version);
- _py_set_opcode(instr, LOAD_ATTR_METHOD_WITH_VALUES);
+ instr->op.code = LOAD_ATTR_METHOD_WITH_VALUES;
}
else {
Py_ssize_t dictoffset = owner_cls->tp_dictoffset;
@@ -1078,7 +1078,7 @@ PyObject *descr, DescriptorClassification kind)
return 0;
}
if (dictoffset == 0) {
- _py_set_opcode(instr, LOAD_ATTR_METHOD_NO_DICT);
+ instr->op.code = LOAD_ATTR_METHOD_NO_DICT;
}
else {
PyObject *dict = *(PyObject **) ((char *)owner + dictoffset);
@@ -1088,7 +1088,7 @@ PyObject *descr, DescriptorClassification kind)
}
assert(owner_cls->tp_dictoffset > 0);
assert(owner_cls->tp_dictoffset <= INT16_MAX);
- _py_set_opcode(instr, LOAD_ATTR_METHOD_LAZY_DICT);
+ instr->op.code = LOAD_ATTR_METHOD_LAZY_DICT;
}
}
/* `descr` is borrowed. This is safe for methods (even inherited ones from
@@ -1146,7 +1146,7 @@ _Py_Specialize_LoadGlobal(
}
cache->index = (uint16_t)index;
write_u32(cache->module_keys_version, keys_version);
- _py_set_opcode(instr, LOAD_GLOBAL_MODULE);
+ instr->op.code = LOAD_GLOBAL_MODULE;
goto success;
}
if (!PyDict_CheckExact(builtins)) {
@@ -1184,12 +1184,12 @@ _Py_Specialize_LoadGlobal(
cache->index = (uint16_t)index;
write_u32(cache->module_keys_version, globals_version);
cache->builtin_keys_version = (uint16_t)builtins_version;
- _py_set_opcode(instr, LOAD_GLOBAL_BUILTIN);
+ instr->op.code = LOAD_GLOBAL_BUILTIN;
goto success;
fail:
STAT_INC(LOAD_GLOBAL, failure);
assert(!PyErr_Occurred());
- _py_set_opcode(instr, LOAD_GLOBAL);
+ instr->op.code = LOAD_GLOBAL;
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -1295,7 +1295,7 @@ _Py_Specialize_BinarySubscr(
if (container_type == &PyList_Type) {
if (PyLong_CheckExact(sub)) {
if (Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1) {
- _py_set_opcode(instr, BINARY_SUBSCR_LIST_INT);
+ instr->op.code = BINARY_SUBSCR_LIST_INT;
goto success;
}
SPECIALIZATION_FAIL(BINARY_SUBSCR, SPEC_FAIL_OUT_OF_RANGE);
@@ -1308,7 +1308,7 @@ _Py_Specialize_BinarySubscr(
if (container_type == &PyTuple_Type) {
if (PyLong_CheckExact(sub)) {
if (Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1) {
- _py_set_opcode(instr, BINARY_SUBSCR_TUPLE_INT);
+ instr->op.code = BINARY_SUBSCR_TUPLE_INT;
goto success;
}
SPECIALIZATION_FAIL(BINARY_SUBSCR, SPEC_FAIL_OUT_OF_RANGE);
@@ -1319,7 +1319,7 @@ _Py_Specialize_BinarySubscr(
goto fail;
}
if (container_type == &PyDict_Type) {
- _py_set_opcode(instr, BINARY_SUBSCR_DICT);
+ instr->op.code = BINARY_SUBSCR_DICT;
goto success;
}
PyTypeObject *cls = Py_TYPE(container);
@@ -1350,7 +1350,7 @@ _Py_Specialize_BinarySubscr(
}
cache->func_version = version;
((PyHeapTypeObject *)container_type)->_spec_cache.getitem = descriptor;
- _py_set_opcode(instr, BINARY_SUBSCR_GETITEM);
+ instr->op.code = BINARY_SUBSCR_GETITEM;
goto success;
}
SPECIALIZATION_FAIL(BINARY_SUBSCR,
@@ -1358,7 +1358,7 @@ _Py_Specialize_BinarySubscr(
fail:
STAT_INC(BINARY_SUBSCR, failure);
assert(!PyErr_Occurred());
- _py_set_opcode(instr, BINARY_SUBSCR);
+ instr->op.code = BINARY_SUBSCR;
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -1378,7 +1378,7 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins
if ((Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1)
&& ((PyLongObject *)sub)->long_value.ob_digit[0] < (size_t)PyList_GET_SIZE(container))
{
- _py_set_opcode(instr, STORE_SUBSCR_LIST_INT);
+ instr->op.code = STORE_SUBSCR_LIST_INT;
goto success;
}
else {
@@ -1396,8 +1396,8 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins
}
}
if (container_type == &PyDict_Type) {
- _py_set_opcode(instr, STORE_SUBSCR_DICT);
- goto success;
+ instr->op.code = STORE_SUBSCR_DICT;
+ goto success;
}
#ifdef Py_STATS
PyMappingMethods *as_mapping = container_type->tp_as_mapping;
@@ -1463,7 +1463,7 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins
fail:
STAT_INC(STORE_SUBSCR, failure);
assert(!PyErr_Occurred());
- _py_set_opcode(instr, STORE_SUBSCR);
+ instr->op.code = STORE_SUBSCR;
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -1482,23 +1482,23 @@ specialize_class_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
return -1;
}
if (tp->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) {
- int oparg = _Py_OPARG(*instr);
+ int oparg = instr->op.arg;
if (nargs == 1 && kwnames == NULL && oparg == 1) {
if (tp == &PyUnicode_Type) {
- _py_set_opcode(instr, CALL_NO_KW_STR_1);
+ instr->op.code = CALL_NO_KW_STR_1;
return 0;
}
else if (tp == &PyType_Type) {
- _py_set_opcode(instr, CALL_NO_KW_TYPE_1);
+ instr->op.code = CALL_NO_KW_TYPE_1;
return 0;
}
else if (tp == &PyTuple_Type) {
- _py_set_opcode(instr, CALL_NO_KW_TUPLE_1);
+ instr->op.code = CALL_NO_KW_TUPLE_1;
return 0;
}
}
if (tp->tp_vectorcall != NULL) {
- _py_set_opcode(instr, CALL_BUILTIN_CLASS);
+ instr->op.code = CALL_BUILTIN_CLASS;
return 0;
}
SPECIALIZATION_FAIL(CALL, tp == &PyUnicode_Type ?
@@ -1573,7 +1573,7 @@ specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr,
SPECIALIZATION_FAIL(CALL, SPEC_FAIL_WRONG_NUMBER_ARGUMENTS);
return -1;
}
- _py_set_opcode(instr, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS);
+ instr->op.code = CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS;
return 0;
}
case METH_O: {
@@ -1584,21 +1584,21 @@ specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr,
PyInterpreterState *interp = _PyInterpreterState_GET();
PyObject *list_append = interp->callable_cache.list_append;
_Py_CODEUNIT next = instr[INLINE_CACHE_ENTRIES_CALL + 1];
- bool pop = (_Py_OPCODE(next) == POP_TOP);
- int oparg = _Py_OPARG(*instr);
+ bool pop = (next.op.code == POP_TOP);
+ int oparg = instr->op.arg;
if ((PyObject *)descr == list_append && oparg == 1 && pop) {
- _py_set_opcode(instr, CALL_NO_KW_LIST_APPEND);
+ instr->op.code = CALL_NO_KW_LIST_APPEND;
return 0;
}
- _py_set_opcode(instr, CALL_NO_KW_METHOD_DESCRIPTOR_O);
+ instr->op.code = CALL_NO_KW_METHOD_DESCRIPTOR_O;
return 0;
}
case METH_FASTCALL: {
- _py_set_opcode(instr, CALL_NO_KW_METHOD_DESCRIPTOR_FAST);
+ instr->op.code = CALL_NO_KW_METHOD_DESCRIPTOR_FAST;
return 0;
}
case METH_FASTCALL | METH_KEYWORDS: {
- _py_set_opcode(instr, CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS);
+ instr->op.code = CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS;
return 0;
}
}
@@ -1649,14 +1649,14 @@ specialize_py_call(PyFunctionObject *func, _Py_CODEUNIT *instr, int nargs,
write_u32(cache->func_version, version);
cache->min_args = min_args;
if (argcount == nargs) {
- _py_set_opcode(instr, bound_method ? CALL_BOUND_METHOD_EXACT_ARGS : CALL_PY_EXACT_ARGS);
+ instr->op.code = bound_method ? CALL_BOUND_METHOD_EXACT_ARGS : CALL_PY_EXACT_ARGS;
}
else if (bound_method) {
SPECIALIZATION_FAIL(CALL, SPEC_FAIL_CALL_BOUND_METHOD);
return -1;
}
else {
- _py_set_opcode(instr, CALL_PY_WITH_DEFAULTS);
+ instr->op.code = CALL_PY_WITH_DEFAULTS;
}
return 0;
}
@@ -1683,10 +1683,10 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
/* len(o) */
PyInterpreterState *interp = _PyInterpreterState_GET();
if (callable == interp->callable_cache.len) {
- _py_set_opcode(instr, CALL_NO_KW_LEN);
+ instr->op.code = CALL_NO_KW_LEN;
return 0;
}
- _py_set_opcode(instr, CALL_NO_KW_BUILTIN_O);
+ instr->op.code = CALL_NO_KW_BUILTIN_O;
return 0;
}
case METH_FASTCALL: {
@@ -1698,15 +1698,15 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
/* isinstance(o1, o2) */
PyInterpreterState *interp = _PyInterpreterState_GET();
if (callable == interp->callable_cache.isinstance) {
- _py_set_opcode(instr, CALL_NO_KW_ISINSTANCE);
+ instr->op.code = CALL_NO_KW_ISINSTANCE;
return 0;
}
}
- _py_set_opcode(instr, CALL_NO_KW_BUILTIN_FAST);
+ instr->op.code = CALL_NO_KW_BUILTIN_FAST;
return 0;
}
case METH_FASTCALL | METH_KEYWORDS: {
- _py_set_opcode(instr, CALL_BUILTIN_FAST_WITH_KEYWORDS);
+ instr->op.code = CALL_BUILTIN_FAST_WITH_KEYWORDS;
return 0;
}
default:
@@ -1785,7 +1785,7 @@ _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
if (fail) {
STAT_INC(CALL, failure);
assert(!PyErr_Occurred());
- _py_set_opcode(instr, CALL);
+ instr->op.code = CALL;
cache->counter = adaptive_counter_backoff(cache->counter);
}
else {
@@ -1880,21 +1880,21 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
}
if (PyUnicode_CheckExact(lhs)) {
_Py_CODEUNIT next = instr[INLINE_CACHE_ENTRIES_BINARY_OP + 1];
- bool to_store = (_Py_OPCODE(next) == STORE_FAST ||
- _Py_OPCODE(next) == STORE_FAST__LOAD_FAST);
- if (to_store && locals[_Py_OPARG(next)] == lhs) {
- _py_set_opcode(instr, BINARY_OP_INPLACE_ADD_UNICODE);
+ bool to_store = (next.op.code == STORE_FAST ||
+ next.op.code == STORE_FAST__LOAD_FAST);
+ if (to_store && locals[next.op.arg] == lhs) {
+ instr->op.code = BINARY_OP_INPLACE_ADD_UNICODE;
goto success;
}
- _py_set_opcode(instr, BINARY_OP_ADD_UNICODE);
+ instr->op.code = BINARY_OP_ADD_UNICODE;
goto success;
}
if (PyLong_CheckExact(lhs)) {
- _py_set_opcode(instr, BINARY_OP_ADD_INT);
+ instr->op.code = BINARY_OP_ADD_INT;
goto success;
}
if (PyFloat_CheckExact(lhs)) {
- _py_set_opcode(instr, BINARY_OP_ADD_FLOAT);
+ instr->op.code = BINARY_OP_ADD_FLOAT;
goto success;
}
break;
@@ -1904,11 +1904,11 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
break;
}
if (PyLong_CheckExact(lhs)) {
- _py_set_opcode(instr, BINARY_OP_MULTIPLY_INT);
+ instr->op.code = BINARY_OP_MULTIPLY_INT;
goto success;
}
if (PyFloat_CheckExact(lhs)) {
- _py_set_opcode(instr, BINARY_OP_MULTIPLY_FLOAT);
+ instr->op.code = BINARY_OP_MULTIPLY_FLOAT;
goto success;
}
break;
@@ -1918,18 +1918,18 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
break;
}
if (PyLong_CheckExact(lhs)) {
- _py_set_opcode(instr, BINARY_OP_SUBTRACT_INT);
+ instr->op.code = BINARY_OP_SUBTRACT_INT;
goto success;
}
if (PyFloat_CheckExact(lhs)) {
- _py_set_opcode(instr, BINARY_OP_SUBTRACT_FLOAT);
+ instr->op.code = BINARY_OP_SUBTRACT_FLOAT;
goto success;
}
break;
}
SPECIALIZATION_FAIL(BINARY_OP, binary_op_fail_kind(oparg, lhs, rhs));
STAT_INC(BINARY_OP, failure);
- _py_set_opcode(instr, BINARY_OP);
+ instr->op.code = BINARY_OP;
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -1981,7 +1981,7 @@ _Py_Specialize_CompareAndBranch(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *inst
assert(_PyOpcode_Caches[COMPARE_AND_BRANCH] == INLINE_CACHE_ENTRIES_COMPARE_OP);
_PyCompareOpCache *cache = (_PyCompareOpCache *)(instr + 1);
#ifndef NDEBUG
- int next_opcode = _Py_OPCODE(instr[INLINE_CACHE_ENTRIES_COMPARE_OP + 1]);
+ int next_opcode = instr[INLINE_CACHE_ENTRIES_COMPARE_OP + 1].op.code;
assert(next_opcode == POP_JUMP_IF_FALSE || next_opcode == POP_JUMP_IF_TRUE);
#endif
if (Py_TYPE(lhs) != Py_TYPE(rhs)) {
@@ -1989,12 +1989,12 @@ _Py_Specialize_CompareAndBranch(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *inst
goto failure;
}
if (PyFloat_CheckExact(lhs)) {
- _py_set_opcode(instr, COMPARE_AND_BRANCH_FLOAT);
+ instr->op.code = COMPARE_AND_BRANCH_FLOAT;
goto success;
}
if (PyLong_CheckExact(lhs)) {
if (Py_ABS(Py_SIZE(lhs)) <= 1 && Py_ABS(Py_SIZE(rhs)) <= 1) {
- _py_set_opcode(instr, COMPARE_AND_BRANCH_INT);
+ instr->op.code = COMPARE_AND_BRANCH_INT;
goto success;
}
else {
@@ -2009,14 +2009,14 @@ _Py_Specialize_CompareAndBranch(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *inst
goto failure;
}
else {
- _py_set_opcode(instr, COMPARE_AND_BRANCH_STR);
+ instr->op.code = COMPARE_AND_BRANCH_STR;
goto success;
}
}
SPECIALIZATION_FAIL(COMPARE_AND_BRANCH, compare_op_fail_kind(lhs, rhs));
failure:
STAT_INC(COMPARE_AND_BRANCH, failure);
- _py_set_opcode(instr, COMPARE_AND_BRANCH);
+ instr->op.code = COMPARE_AND_BRANCH;
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -2051,10 +2051,10 @@ _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg)
goto failure;
}
if (PyTuple_GET_SIZE(seq) == 2) {
- _py_set_opcode(instr, UNPACK_SEQUENCE_TWO_TUPLE);
+ instr->op.code = UNPACK_SEQUENCE_TWO_TUPLE;
goto success;
}
- _py_set_opcode(instr, UNPACK_SEQUENCE_TUPLE);
+ instr->op.code = UNPACK_SEQUENCE_TUPLE;
goto success;
}
if (PyList_CheckExact(seq)) {
@@ -2062,13 +2062,13 @@ _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg)
SPECIALIZATION_FAIL(UNPACK_SEQUENCE, SPEC_FAIL_EXPECTED_ERROR);
goto failure;
}
- _py_set_opcode(instr, UNPACK_SEQUENCE_LIST);
+ instr->op.code = UNPACK_SEQUENCE_LIST;
goto success;
}
SPECIALIZATION_FAIL(UNPACK_SEQUENCE, unpack_sequence_fail_kind(seq));
failure:
STAT_INC(UNPACK_SEQUENCE, failure);
- _py_set_opcode(instr, UNPACK_SEQUENCE);
+ instr->op.code = UNPACK_SEQUENCE;
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -2156,28 +2156,28 @@ _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg)
_PyForIterCache *cache = (_PyForIterCache *)(instr + 1);
PyTypeObject *tp = Py_TYPE(iter);
_Py_CODEUNIT next = instr[1+INLINE_CACHE_ENTRIES_FOR_ITER];
- int next_op = _PyOpcode_Deopt[_Py_OPCODE(next)];
+ int next_op = _PyOpcode_Deopt[next.op.code];
if (tp == &PyListIter_Type) {
- _py_set_opcode(instr, FOR_ITER_LIST);
+ instr->op.code = FOR_ITER_LIST;
goto success;
}
else if (tp == &PyTupleIter_Type) {
- _py_set_opcode(instr, FOR_ITER_TUPLE);
+ instr->op.code = FOR_ITER_TUPLE;
goto success;
}
else if (tp == &PyRangeIter_Type && next_op == STORE_FAST) {
- _py_set_opcode(instr, FOR_ITER_RANGE);
+ instr->op.code = FOR_ITER_RANGE;
goto success;
}
else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
- assert(_Py_OPCODE(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1]) == END_FOR);
- _py_set_opcode(instr, FOR_ITER_GEN);
+ assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR);
+ instr->op.code = FOR_ITER_GEN;
goto success;
}
SPECIALIZATION_FAIL(FOR_ITER,
_PySpecialization_ClassifyIterator(iter));
STAT_INC(FOR_ITER, failure);
- _py_set_opcode(instr, FOR_ITER);
+ instr->op.code = FOR_ITER;
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -2193,13 +2193,13 @@ _Py_Specialize_Send(PyObject *receiver, _Py_CODEUNIT *instr)
_PySendCache *cache = (_PySendCache *)(instr + 1);
PyTypeObject *tp = Py_TYPE(receiver);
if (tp == &PyGen_Type || tp == &PyCoro_Type) {
- _py_set_opcode(instr, SEND_GEN);
+ instr->op.code = SEND_GEN;
goto success;
}
SPECIALIZATION_FAIL(SEND,
_PySpecialization_ClassifyIterator(receiver));
STAT_INC(SEND, failure);
- _py_set_opcode(instr, SEND);
+ instr->op.code = SEND;
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success: