summaryrefslogtreecommitdiffstats
path: root/Python
diff options
context:
space:
mode:
Diffstat (limited to 'Python')
-rw-r--r--Python/bytecodes.c32
-rw-r--r--Python/codegen.c11
-rw-r--r--Python/executor_cases.c.h76
-rw-r--r--Python/flowgraph.c50
-rw-r--r--Python/generated_cases.c.h119
-rw-r--r--Python/instrumentation.c5
-rw-r--r--Python/opcode_targets.h4
-rw-r--r--Python/optimizer_bytecodes.c11
-rw-r--r--Python/optimizer_cases.c.h21
-rw-r--r--Python/specialize.c14
10 files changed, 204 insertions, 139 deletions
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 057ee0a..b7469c2 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -255,10 +255,26 @@ dummy_func(
value2 = PyStackRef_DUP(GETLOCAL(oparg2));
}
+ family(LOAD_CONST, 0) = {
+ LOAD_CONST_IMMORTAL,
+ };
+
pure inst(LOAD_CONST, (-- value)) {
value = PyStackRef_FromPyObjectNew(GETITEM(FRAME_CO_CONSTS, oparg));
}
+ inst(LOAD_CONST_IMMORTAL, (-- value)) {
+ PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
+ assert(_Py_IsImmortal(obj));
+ value = PyStackRef_FromPyObjectImmortal(obj);
+ }
+
+ replicate(4) inst(LOAD_SMALL_INT, (-- value)) {
+ assert(oparg < _PY_NSMALLPOSINTS);
+ PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
+ value = PyStackRef_FromPyObjectImmortal(obj);
+ }
+
replicate(8) inst(STORE_FAST, (value --)) {
SETLOCAL(oparg, value);
DEAD(value);
@@ -979,10 +995,9 @@ dummy_func(
return result;
}
- // The stack effect here is ambiguous.
- // We definitely pop the return value off the stack on entry.
- // We also push it onto the stack on exit, but that's a
- // different frame, and it's accounted for by _PUSH_FRAME.
+ // The stack effect here is a bit misleading.
+ // retval is popped from the stack, but res
+ // is pushed to a different frame, the callers' frame.
inst(RETURN_VALUE, (retval -- res)) {
#if TIER_ONE
assert(frame != &entry_frame);
@@ -1013,15 +1028,6 @@ dummy_func(
_RETURN_VALUE_EVENT +
RETURN_VALUE;
- macro(RETURN_CONST) =
- LOAD_CONST +
- RETURN_VALUE;
-
- macro(INSTRUMENTED_RETURN_CONST) =
- LOAD_CONST +
- _RETURN_VALUE_EVENT +
- RETURN_VALUE;
-
inst(GET_AITER, (obj -- iter)) {
unaryfunc getter = NULL;
PyObject *obj_o = PyStackRef_AsPyObjectBorrow(obj);
diff --git a/Python/codegen.c b/Python/codegen.c
index bfacc6f..976c942 100644
--- a/Python/codegen.c
+++ b/Python/codegen.c
@@ -280,6 +280,14 @@ codegen_addop_noarg(instr_sequence *seq, int opcode, location loc)
static int
codegen_addop_load_const(compiler *c, location loc, PyObject *o)
{
+ if (PyLong_CheckExact(o)) {
+ int overflow;
+ long val = PyLong_AsLongAndOverflow(o, &overflow);
+ if (!overflow && val >= 0 && val < 256 && val < _PY_NSMALLPOSINTS) {
+ ADDOP_I(c, loc, LOAD_SMALL_INT, val);
+ return SUCCESS;
+ }
+ }
Py_ssize_t arg = _PyCompile_AddConst(c, o);
if (arg < 0) {
return ERROR;
@@ -656,6 +664,9 @@ codegen_setup_annotations_scope(compiler *c, location loc,
codegen_enter_scope(c, name, COMPILE_SCOPE_ANNOTATIONS,
key, loc.lineno, NULL, &umd));
+ // Insert None into consts to prevent an annotation
+ // appearing to be a docstring
+ _PyCompile_AddConst(c, Py_None);
// if .format != 1: raise NotImplementedError
_Py_DECLARE_STR(format, ".format");
ADDOP_I(c, loc, LOAD_FAST, 0);
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 3a7015c..27b7e32 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -210,6 +210,82 @@
break;
}
+ case _LOAD_CONST_IMMORTAL: {
+ _PyStackRef value;
+ oparg = CURRENT_OPARG();
+ PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
+ assert(_Py_IsImmortal(obj));
+ value = PyStackRef_FromPyObjectImmortal(obj);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_SMALL_INT_0: {
+ _PyStackRef value;
+ oparg = 0;
+ assert(oparg == CURRENT_OPARG());
+ assert(oparg < _PY_NSMALLPOSINTS);
+ PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
+ value = PyStackRef_FromPyObjectImmortal(obj);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_SMALL_INT_1: {
+ _PyStackRef value;
+ oparg = 1;
+ assert(oparg == CURRENT_OPARG());
+ assert(oparg < _PY_NSMALLPOSINTS);
+ PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
+ value = PyStackRef_FromPyObjectImmortal(obj);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_SMALL_INT_2: {
+ _PyStackRef value;
+ oparg = 2;
+ assert(oparg == CURRENT_OPARG());
+ assert(oparg < _PY_NSMALLPOSINTS);
+ PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
+ value = PyStackRef_FromPyObjectImmortal(obj);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_SMALL_INT_3: {
+ _PyStackRef value;
+ oparg = 3;
+ assert(oparg == CURRENT_OPARG());
+ assert(oparg < _PY_NSMALLPOSINTS);
+ PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
+ value = PyStackRef_FromPyObjectImmortal(obj);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_SMALL_INT: {
+ _PyStackRef value;
+ oparg = CURRENT_OPARG();
+ assert(oparg < _PY_NSMALLPOSINTS);
+ PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
+ value = PyStackRef_FromPyObjectImmortal(obj);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
case _STORE_FAST_0: {
_PyStackRef value;
oparg = 0;
diff --git a/Python/flowgraph.c b/Python/flowgraph.c
index 3888629..5418131 100644
--- a/Python/flowgraph.c
+++ b/Python/flowgraph.c
@@ -283,7 +283,7 @@ dump_instr(cfg_instr *i)
static inline int
basicblock_returns(const basicblock *b) {
cfg_instr *last = basicblock_last_instr(b);
- return last && (last->i_opcode == RETURN_VALUE || last->i_opcode == RETURN_CONST);
+ return last && last->i_opcode == RETURN_VALUE;
}
static void
@@ -515,22 +515,6 @@ no_redundant_jumps(cfg_builder *g) {
}
return true;
}
-
-static bool
-all_exits_have_lineno(basicblock *entryblock) {
- for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
- for (int i = 0; i < b->b_iused; i++) {
- cfg_instr *instr = &b->b_instr[i];
- if (instr->i_opcode == RETURN_VALUE) {
- if (instr->i_loc.lineno < 0) {
- assert(0);
- return false;
- }
- }
- }
- }
- return true;
-}
#endif
/***** CFG preprocessing (jump targets and exceptions) *****/
@@ -1131,7 +1115,7 @@ remove_redundant_nops_and_pairs(basicblock *entryblock)
int opcode = instr->i_opcode;
bool is_redundant_pair = false;
if (opcode == POP_TOP) {
- if (prev_opcode == LOAD_CONST) {
+ if (prev_opcode == LOAD_CONST || prev_opcode == LOAD_SMALL_INT) {
is_redundant_pair = true;
}
else if (prev_opcode == COPY && prev_oparg == 1) {
@@ -1280,14 +1264,23 @@ jump_thread(basicblock *bb, cfg_instr *inst, cfg_instr *target, int opcode)
return false;
}
+static int
+loads_const(int opcode)
+{
+ return OPCODE_HAS_CONST(opcode) || opcode == LOAD_SMALL_INT;
+}
+
static PyObject*
get_const_value(int opcode, int oparg, PyObject *co_consts)
{
PyObject *constant = NULL;
- assert(OPCODE_HAS_CONST(opcode));
+ assert(loads_const(opcode));
if (opcode == LOAD_CONST) {
constant = PyList_GET_ITEM(co_consts, oparg);
}
+ if (opcode == LOAD_SMALL_INT) {
+ return PyLong_FromLong(oparg);
+ }
if (constant == NULL) {
PyErr_SetString(PyExc_SystemError,
@@ -1345,7 +1338,7 @@ fold_tuple_on_constants(PyObject *const_cache,
assert(inst[n].i_oparg == n);
for (int i = 0; i < n; i++) {
- if (!OPCODE_HAS_CONST(inst[i].i_opcode)) {
+ if (!loads_const(inst[i].i_opcode)) {
return SUCCESS;
}
}
@@ -1583,7 +1576,7 @@ basicblock_optimize_load_const(PyObject *const_cache, basicblock *bb, PyObject *
oparg = inst->i_oparg;
}
assert(!IS_ASSEMBLER_OPCODE(opcode));
- if (opcode != LOAD_CONST) {
+ if (opcode != LOAD_CONST && opcode != LOAD_SMALL_INT) {
continue;
}
int nextop = i+1 < bb->b_iused ? bb->b_instr[i+1].i_opcode : 0;
@@ -1662,12 +1655,6 @@ basicblock_optimize_load_const(PyObject *const_cache, basicblock *bb, PyObject *
: POP_JUMP_IF_NONE;
break;
}
- case RETURN_VALUE:
- {
- INSTR_SET_OP0(inst, NOP);
- INSTR_SET_OP1(&bb->b_instr[++i], RETURN_CONST, oparg);
- break;
- }
case TO_BOOL:
{
PyObject *cnt = get_const_value(opcode, oparg, consts);
@@ -2120,7 +2107,8 @@ remove_unused_consts(basicblock *entryblock, PyObject *consts)
/* mark used consts */
for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
for (int i = 0; i < b->b_iused; i++) {
- if (OPCODE_HAS_CONST(b->b_instr[i].i_opcode)) {
+ int opcode = b->b_instr[i].i_opcode;
+ if (OPCODE_HAS_CONST(opcode)) {
int index = b->b_instr[i].i_oparg;
index_map[index] = index;
}
@@ -2173,7 +2161,8 @@ remove_unused_consts(basicblock *entryblock, PyObject *consts)
for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
for (int i = 0; i < b->b_iused; i++) {
- if (OPCODE_HAS_CONST(b->b_instr[i].i_opcode)) {
+ int opcode = b->b_instr[i].i_opcode;
+ if (OPCODE_HAS_CONST(opcode)) {
int index = b->b_instr[i].i_oparg;
assert(reverse_index_map[index] >= 0);
assert(reverse_index_map[index] < n_used_consts);
@@ -2594,8 +2583,9 @@ _PyCfg_OptimizeCodeUnit(cfg_builder *g, PyObject *consts, PyObject *const_cache,
RETURN_IF_ERROR(insert_superinstructions(g));
RETURN_IF_ERROR(push_cold_blocks_to_end(g));
- assert(all_exits_have_lineno(g->g_entryblock));
RETURN_IF_ERROR(resolve_line_numbers(g, firstlineno));
+ // temporarily remove assert. See https://github.com/python/cpython/issues/125845
+ // assert(all_exits_have_lineno(g->g_entryblock));
return SUCCESS;
}
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 35af1dc..a615f65 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -4879,59 +4879,6 @@
DISPATCH();
}
- TARGET(INSTRUMENTED_RETURN_CONST) {
- _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr;
- (void)this_instr;
- next_instr += 1;
- INSTRUCTION_STATS(INSTRUMENTED_RETURN_CONST);
- _PyStackRef value;
- _PyStackRef val;
- _PyStackRef retval;
- _PyStackRef res;
- // _LOAD_CONST
- {
- value = PyStackRef_FromPyObjectNew(GETITEM(FRAME_CO_CONSTS, oparg));
- }
- // _RETURN_VALUE_EVENT
- {
- val = value;
- stack_pointer[0] = val;
- stack_pointer += 1;
- assert(WITHIN_STACK_BOUNDS());
- _PyFrame_SetStackPointer(frame, stack_pointer);
- int err = _Py_call_instrumentation_arg(
- tstate, PY_MONITORING_EVENT_PY_RETURN,
- frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (err) goto error;
- }
- // _RETURN_VALUE
- {
- retval = val;
- #if TIER_ONE
- assert(frame != &entry_frame);
- #endif
- _PyStackRef temp = retval;
- stack_pointer += -1;
- assert(WITHIN_STACK_BOUNDS());
- _PyFrame_SetStackPointer(frame, stack_pointer);
- assert(EMPTY());
- _Py_LeaveRecursiveCallPy(tstate);
- // GH-99729: We need to unlink the frame *before* clearing it:
- _PyInterpreterFrame *dying = frame;
- frame = tstate->current_frame = dying->previous;
- _PyEval_FrameClearAndPop(tstate, dying);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- LOAD_IP(frame->return_offset);
- res = temp;
- LLTRACE_RESUME_FRAME();
- }
- stack_pointer[0] = res;
- stack_pointer += 1;
- assert(WITHIN_STACK_BOUNDS());
- DISPATCH();
- }
-
TARGET(INSTRUMENTED_RETURN_VALUE) {
_Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr;
(void)this_instr;
@@ -5901,6 +5848,7 @@
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(LOAD_CONST);
+ PREDICTED(LOAD_CONST);
_PyStackRef value;
value = PyStackRef_FromPyObjectNew(GETITEM(FRAME_CO_CONSTS, oparg));
stack_pointer[0] = value;
@@ -5909,6 +5857,21 @@
DISPATCH();
}
+ TARGET(LOAD_CONST_IMMORTAL) {
+ frame->instr_ptr = next_instr;
+ next_instr += 1;
+ INSTRUCTION_STATS(LOAD_CONST_IMMORTAL);
+ static_assert(0 == 0, "incorrect cache size");
+ _PyStackRef value;
+ PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
+ assert(_Py_IsImmortal(obj));
+ value = PyStackRef_FromPyObjectImmortal(obj);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ DISPATCH();
+ }
+
TARGET(LOAD_DEREF) {
frame->instr_ptr = next_instr;
next_instr += 1;
@@ -6263,6 +6226,20 @@
DISPATCH();
}
+ TARGET(LOAD_SMALL_INT) {
+ frame->instr_ptr = next_instr;
+ next_instr += 1;
+ INSTRUCTION_STATS(LOAD_SMALL_INT);
+ _PyStackRef value;
+ assert(oparg < _PY_NSMALLPOSINTS);
+ PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
+ value = PyStackRef_FromPyObjectImmortal(obj);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ DISPATCH();
+ }
+
TARGET(LOAD_SPECIAL) {
frame->instr_ptr = next_instr;
next_instr += 1;
@@ -6951,42 +6928,6 @@
DISPATCH();
}
- TARGET(RETURN_CONST) {
- frame->instr_ptr = next_instr;
- next_instr += 1;
- INSTRUCTION_STATS(RETURN_CONST);
- _PyStackRef value;
- _PyStackRef retval;
- _PyStackRef res;
- // _LOAD_CONST
- {
- value = PyStackRef_FromPyObjectNew(GETITEM(FRAME_CO_CONSTS, oparg));
- }
- // _RETURN_VALUE
- {
- retval = value;
- #if TIER_ONE
- assert(frame != &entry_frame);
- #endif
- _PyStackRef temp = retval;
- _PyFrame_SetStackPointer(frame, stack_pointer);
- assert(EMPTY());
- _Py_LeaveRecursiveCallPy(tstate);
- // GH-99729: We need to unlink the frame *before* clearing it:
- _PyInterpreterFrame *dying = frame;
- frame = tstate->current_frame = dying->previous;
- _PyEval_FrameClearAndPop(tstate, dying);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- LOAD_IP(frame->return_offset);
- res = temp;
- LLTRACE_RESUME_FRAME();
- }
- stack_pointer[0] = res;
- stack_pointer += 1;
- assert(WITHIN_STACK_BOUNDS());
- DISPATCH();
- }
-
TARGET(RETURN_GENERATOR) {
frame->instr_ptr = next_instr;
next_instr += 1;
diff --git a/Python/instrumentation.c b/Python/instrumentation.c
index e1e494c..d456876 100644
--- a/Python/instrumentation.c
+++ b/Python/instrumentation.c
@@ -56,8 +56,6 @@ PyObject _PyInstrumentation_DISABLE = _PyObject_HEAD_INIT(&PyBaseObject_Type);
PyObject _PyInstrumentation_MISSING = _PyObject_HEAD_INIT(&PyBaseObject_Type);
static const int8_t EVENT_FOR_OPCODE[256] = {
- [RETURN_CONST] = PY_MONITORING_EVENT_PY_RETURN,
- [INSTRUMENTED_RETURN_CONST] = PY_MONITORING_EVENT_PY_RETURN,
[RETURN_VALUE] = PY_MONITORING_EVENT_PY_RETURN,
[INSTRUMENTED_RETURN_VALUE] = PY_MONITORING_EVENT_PY_RETURN,
[CALL] = PY_MONITORING_EVENT_CALL,
@@ -94,7 +92,6 @@ static const int8_t EVENT_FOR_OPCODE[256] = {
static const uint8_t DE_INSTRUMENT[256] = {
[INSTRUMENTED_RESUME] = RESUME,
[INSTRUMENTED_RETURN_VALUE] = RETURN_VALUE,
- [INSTRUMENTED_RETURN_CONST] = RETURN_CONST,
[INSTRUMENTED_CALL] = CALL,
[INSTRUMENTED_CALL_KW] = CALL_KW,
[INSTRUMENTED_CALL_FUNCTION_EX] = CALL_FUNCTION_EX,
@@ -112,8 +109,6 @@ static const uint8_t DE_INSTRUMENT[256] = {
};
static const uint8_t INSTRUMENTED_OPCODES[256] = {
- [RETURN_CONST] = INSTRUMENTED_RETURN_CONST,
- [INSTRUMENTED_RETURN_CONST] = INSTRUMENTED_RETURN_CONST,
[RETURN_VALUE] = INSTRUMENTED_RETURN_VALUE,
[INSTRUMENTED_RETURN_VALUE] = INSTRUMENTED_RETURN_VALUE,
[CALL] = INSTRUMENTED_CALL,
diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h
index 3fc9d31..c93941d 100644
--- a/Python/opcode_targets.h
+++ b/Python/opcode_targets.h
@@ -88,6 +88,7 @@ static void *opcode_targets[256] = {
&&TARGET_LOAD_FROM_DICT_OR_GLOBALS,
&&TARGET_LOAD_GLOBAL,
&&TARGET_LOAD_NAME,
+ &&TARGET_LOAD_SMALL_INT,
&&TARGET_LOAD_SPECIAL,
&&TARGET_LOAD_SUPER_ATTR,
&&TARGET_MAKE_CELL,
@@ -99,7 +100,6 @@ static void *opcode_targets[256] = {
&&TARGET_POP_JUMP_IF_TRUE,
&&TARGET_RAISE_VARARGS,
&&TARGET_RERAISE,
- &&TARGET_RETURN_CONST,
&&TARGET_SEND,
&&TARGET_SET_ADD,
&&TARGET_SET_FUNCTION_ATTRIBUTE,
@@ -206,6 +206,7 @@ static void *opcode_targets[256] = {
&&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_WITH_HINT,
+ &&TARGET_LOAD_CONST_IMMORTAL,
&&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_LOAD_SUPER_ATTR_ATTR,
@@ -249,7 +250,6 @@ static void *opcode_targets[256] = {
&&TARGET_INSTRUMENTED_POP_JUMP_IF_NOT_NONE,
&&TARGET_INSTRUMENTED_RESUME,
&&TARGET_INSTRUMENTED_RETURN_VALUE,
- &&TARGET_INSTRUMENTED_RETURN_CONST,
&&TARGET_INSTRUMENTED_YIELD_VALUE,
&&TARGET_INSTRUMENTED_CALL,
&&TARGET_INSTRUMENTED_JUMP_BACKWARD,
diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c
index f40ad1e..71904c1 100644
--- a/Python/optimizer_bytecodes.c
+++ b/Python/optimizer_bytecodes.c
@@ -445,6 +445,17 @@ dummy_func(void) {
value = sym_new_const(ctx, val);
}
+ op(_LOAD_CONST_IMMORTAL, (-- value)) {
+ PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg);
+ REPLACE_OP(this_instr, _LOAD_CONST_INLINE_BORROW, 0, (uintptr_t)val);
+ value = sym_new_const(ctx, val);
+ }
+
+ op(_LOAD_SMALL_INT, (-- value)) {
+ PyObject *val = PyLong_FromLong(this_instr->oparg);
+ value = sym_new_const(ctx, val);
+ }
+
op(_LOAD_CONST_INLINE, (ptr/4 -- value)) {
value = sym_new_const(ctx, ptr);
}
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 243b3ef..0a7e44e 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -68,6 +68,27 @@
break;
}
+ case _LOAD_CONST_IMMORTAL: {
+ _Py_UopsSymbol *value;
+ PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg);
+ REPLACE_OP(this_instr, _LOAD_CONST_INLINE_BORROW, 0, (uintptr_t)val);
+ value = sym_new_const(ctx, val);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_SMALL_INT: {
+ _Py_UopsSymbol *value;
+ PyObject *val = PyLong_FromLong(this_instr->oparg);
+ value = sym_new_const(ctx, val);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
case _STORE_FAST: {
_Py_UopsSymbol *value;
value = stack_pointer[-1];
diff --git a/Python/specialize.c b/Python/specialize.c
index 4b33a46..ae47809 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -442,11 +442,13 @@ _PyCode_Quicken(PyCodeObject *code)
{
#if ENABLE_SPECIALIZATION
int opcode = 0;
+ int oparg = 0;
_Py_CODEUNIT *instructions = _PyCode_CODE(code);
/* The last code unit cannot have a cache, so we don't need to check it */
for (int i = 0; i < Py_SIZE(code)-1; i++) {
opcode = instructions[i].op.code;
int caches = _PyOpcode_Caches[opcode];
+ oparg = (oparg << 8) | instructions[i].op.arg;
if (caches) {
// The initial value depends on the opcode
switch (opcode) {
@@ -465,6 +467,18 @@ _PyCode_Quicken(PyCodeObject *code)
}
i += caches;
}
+ else if (opcode == LOAD_CONST) {
+ /* We can't do this in the bytecode compiler as
+ * marshalling can intern strings and make them immortal. */
+
+ PyObject *obj = PyTuple_GET_ITEM(code->co_consts, oparg);
+ if (_Py_IsImmortal(obj)) {
+ instructions[i].op.code = LOAD_CONST_IMMORTAL;
+ }
+ }
+ if (opcode != EXTENDED_ARG) {
+ oparg = 0;
+ }
}
#endif /* ENABLE_SPECIALIZATION */
}