summaryrefslogtreecommitdiffstats
path: root/Python/bytecodes.c
diff options
context:
space:
mode:
Diffstat (limited to 'Python/bytecodes.c')
-rw-r--r--Python/bytecodes.c120
1 files changed, 58 insertions, 62 deletions
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 0fc0b3b..b43625f 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -859,13 +859,11 @@ dummy_func(
}
}
- // stack effect: (__0 -- __array[oparg])
- inst(UNPACK_SEQUENCE) {
+ inst(UNPACK_SEQUENCE, (unused/1, seq -- unused[oparg])) {
#if ENABLE_SPECIALIZATION
_PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
- PyObject *seq = TOP();
next_instr--;
_Py_Specialize_UnpackSequence(seq, next_instr, oparg);
DISPATCH_SAME_OPARG();
@@ -873,27 +871,19 @@ dummy_func(
STAT_INC(UNPACK_SEQUENCE, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
#endif /* ENABLE_SPECIALIZATION */
- PyObject *seq = POP();
- PyObject **top = stack_pointer + oparg;
- if (!unpack_iterable(tstate, seq, oparg, -1, top)) {
- Py_DECREF(seq);
- goto error;
- }
- STACK_GROW(oparg);
+ PyObject **top = stack_pointer + oparg - 1;
+ int res = unpack_iterable(tstate, seq, oparg, -1, top);
Py_DECREF(seq);
- JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE);
+ ERROR_IF(res == 0, error);
}
- // stack effect: (__0 -- __array[oparg])
- inst(UNPACK_SEQUENCE_TWO_TUPLE) {
- PyObject *seq = TOP();
+ inst(UNPACK_SEQUENCE_TWO_TUPLE, (unused/1, seq -- v1, v0)) {
DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE);
DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE);
STAT_INC(UNPACK_SEQUENCE, hit);
- SET_TOP(Py_NewRef(PyTuple_GET_ITEM(seq, 1)));
- PUSH(Py_NewRef(PyTuple_GET_ITEM(seq, 0)));
+ v1 = Py_NewRef(PyTuple_GET_ITEM(seq, 1));
+ v0 = Py_NewRef(PyTuple_GET_ITEM(seq, 0));
Py_DECREF(seq);
- JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE);
}
// stack effect: (__0 -- __array[oparg])
@@ -926,17 +916,12 @@ dummy_func(
JUMPBY(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE);
}
- // error: UNPACK_EX has irregular stack effect
- inst(UNPACK_EX) {
+ inst(UNPACK_EX, (seq -- unused[oparg & 0xFF], unused, unused[oparg >> 8])) {
int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8);
- PyObject *seq = POP();
- PyObject **top = stack_pointer + totalargs;
- if (!unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top)) {
- Py_DECREF(seq);
- goto error;
- }
- STACK_GROW(totalargs);
+ PyObject **top = stack_pointer + totalargs - 1;
+ int res = unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top);
Py_DECREF(seq);
+ ERROR_IF(res == 0, error);
}
family(store_attr, INLINE_CACHE_ENTRIES_STORE_ATTR) = {
@@ -2066,27 +2051,35 @@ dummy_func(
PREDICT(LOAD_CONST);
}
- // stack effect: ( -- __0)
- inst(FOR_ITER) {
+ // Most members of this family are "secretly" super-instructions.
+ // When the loop is exhausted, they jump, and the jump target is
+ // always END_FOR, which pops two values off the stack.
+ // This is optimized by skipping that instruction and combining
+ // its effect (popping 'iter' instead of pushing 'next'.)
+
+ family(for_iter, INLINE_CACHE_ENTRIES_FOR_ITER) = {
+ FOR_ITER,
+ FOR_ITER_LIST,
+ FOR_ITER_TUPLE,
+ FOR_ITER_RANGE,
+ FOR_ITER_GEN,
+ };
+
+ inst(FOR_ITER, (unused/1, iter -- iter, next)) {
#if ENABLE_SPECIALIZATION
_PyForIterCache *cache = (_PyForIterCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
- _Py_Specialize_ForIter(TOP(), next_instr, oparg);
+ _Py_Specialize_ForIter(iter, next_instr, oparg);
DISPATCH_SAME_OPARG();
}
STAT_INC(FOR_ITER, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
#endif /* ENABLE_SPECIALIZATION */
- /* before: [iter]; after: [iter, iter()] *or* [] */
- PyObject *iter = TOP();
- PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter);
- if (next != NULL) {
- PUSH(next);
- JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER);
- }
- else {
+ /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
+ next = (*Py_TYPE(iter)->tp_iternext)(iter);
+ if (next == NULL) {
if (_PyErr_Occurred(tstate)) {
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
goto error;
@@ -2098,63 +2091,66 @@ dummy_func(
}
/* iterator ended normally */
assert(_Py_OPCODE(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg]) == END_FOR);
- STACK_SHRINK(1);
Py_DECREF(iter);
- /* Skip END_FOR */
+ STACK_SHRINK(1);
+ /* Jump forward oparg, then skip following END_FOR instruction */
JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1);
+ DISPATCH();
}
+ // Common case: no jump, leave it to the code generator
}
- // stack effect: ( -- __0)
- inst(FOR_ITER_LIST) {
+ inst(FOR_ITER_LIST, (unused/1, iter -- iter, next)) {
assert(cframe.use_tracing == 0);
- _PyListIterObject *it = (_PyListIterObject *)TOP();
- DEOPT_IF(Py_TYPE(it) != &PyListIter_Type, FOR_ITER);
+ DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER);
+ _PyListIterObject *it = (_PyListIterObject *)iter;
STAT_INC(FOR_ITER, hit);
PyListObject *seq = it->it_seq;
if (seq) {
if (it->it_index < PyList_GET_SIZE(seq)) {
- PyObject *next = PyList_GET_ITEM(seq, it->it_index++);
- PUSH(Py_NewRef(next));
- JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER);
+ next = Py_NewRef(PyList_GET_ITEM(seq, it->it_index++));
goto end_for_iter_list; // End of this instruction
}
it->it_seq = NULL;
Py_DECREF(seq);
}
+ Py_DECREF(iter);
STACK_SHRINK(1);
- Py_DECREF(it);
+ /* Jump forward oparg, then skip following END_FOR instruction */
JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1);
+ DISPATCH();
end_for_iter_list:
+ // Common case: no jump, leave it to the code generator
}
- // stack effect: ( -- __0)
- inst(FOR_ITER_TUPLE) {
+ inst(FOR_ITER_TUPLE, (unused/1, iter -- iter, next)) {
assert(cframe.use_tracing == 0);
- _PyTupleIterObject *it = (_PyTupleIterObject *)TOP();
+ _PyTupleIterObject *it = (_PyTupleIterObject *)iter;
DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
PyTupleObject *seq = it->it_seq;
if (seq) {
if (it->it_index < PyTuple_GET_SIZE(seq)) {
- PyObject *next = PyTuple_GET_ITEM(seq, it->it_index++);
- PUSH(Py_NewRef(next));
- JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER);
+ next = Py_NewRef(PyTuple_GET_ITEM(seq, it->it_index++));
goto end_for_iter_tuple; // End of this instruction
}
it->it_seq = NULL;
Py_DECREF(seq);
}
+ Py_DECREF(iter);
STACK_SHRINK(1);
- Py_DECREF(it);
+ /* Jump forward oparg, then skip following END_FOR instruction */
JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1);
+ DISPATCH();
end_for_iter_tuple:
+ // Common case: no jump, leave it to the code generator
}
- // stack effect: ( -- __0)
- inst(FOR_ITER_RANGE) {
+ // This is slightly different, when the loop isn't terminated we
+ // jump over the immediately following STORE_FAST instruction.
+ inst(FOR_ITER_RANGE, (unused/1, iter -- iter, unused)) {
assert(cframe.use_tracing == 0);
- _PyRangeIterObject *r = (_PyRangeIterObject *)TOP();
+ _PyRangeIterObject *r = (_PyRangeIterObject *)iter;
DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
_Py_CODEUNIT next = next_instr[INLINE_CACHE_ENTRIES_FOR_ITER];
@@ -2162,6 +2158,7 @@ dummy_func(
if (r->len <= 0) {
STACK_SHRINK(1);
Py_DECREF(r);
+ // Jump over END_FOR instruction.
JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1);
}
else {
@@ -2174,11 +2171,13 @@ dummy_func(
// The STORE_FAST is already done.
JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + 1);
}
+ DISPATCH();
}
- inst(FOR_ITER_GEN) {
+ // This is *not* a super-instruction, unique in the family.
+ inst(FOR_ITER_GEN, (unused/1, iter -- iter, unused)) {
assert(cframe.use_tracing == 0);
- PyGenObject *gen = (PyGenObject *)TOP();
+ PyGenObject *gen = (PyGenObject *)iter;
DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER);
DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER);
STAT_INC(FOR_ITER, hit);
@@ -3168,9 +3167,6 @@ family(call, INLINE_CACHE_ENTRIES_CALL) = {
CALL_NO_KW_LIST_APPEND, CALL_NO_KW_METHOD_DESCRIPTOR_FAST, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS,
CALL_NO_KW_METHOD_DESCRIPTOR_O, CALL_NO_KW_STR_1, CALL_NO_KW_TUPLE_1,
CALL_NO_KW_TYPE_1 };
-family(for_iter, INLINE_CACHE_ENTRIES_FOR_ITER) = {
- FOR_ITER, FOR_ITER_LIST,
- FOR_ITER_RANGE };
family(store_fast) = { STORE_FAST, STORE_FAST__LOAD_FAST, STORE_FAST__STORE_FAST };
family(unpack_sequence, INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE) = {
UNPACK_SEQUENCE, UNPACK_SEQUENCE_LIST,