summaryrefslogtreecommitdiffstats
path: root/Python/compile.c
diff options
context:
space:
mode:
authorRaymond Hettinger <python@rcn.com>2005-02-07 19:32:38 (GMT)
committerRaymond Hettinger <python@rcn.com>2005-02-07 19:32:38 (GMT)
commit7fcb7869ba82586f68e0cf28c3a25e78457fa0e0 (patch)
treeaa9eef07f6653ca56676489cd68c4d8bbbbc0bc5 /Python/compile.c
parentfe59dc1bd83d769acb47336f3570ec99e6ba16b6 (diff)
downloadcpython-7fcb7869ba82586f68e0cf28c3a25e78457fa0e0.zip
cpython-7fcb7869ba82586f68e0cf28c3a25e78457fa0e0.tar.gz
cpython-7fcb7869ba82586f68e0cf28c3a25e78457fa0e0.tar.bz2
Adopt Skip's idea to optimize lists of constants in the context
of a "in" or "not in" test.
Diffstat (limited to 'Python/compile.c')
-rw-r--r--Python/compile.c20
1 files changed, 13 insertions, 7 deletions
diff --git a/Python/compile.c b/Python/compile.c
index de07c97..dd58aa6 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -397,7 +397,9 @@ intern_strings(PyObject *tuple)
The consts table must still be in list form so that the
new constant (c1, c2, ... cn) can be appended.
Called with codestr pointing to the first LOAD_CONST.
- Bails out with no change if one or more of the LOAD_CONSTs is missing. */
+ Bails out with no change if one or more of the LOAD_CONSTs is missing.
+ Also works for BUILD_LIST when followed by an "in" or "not in" test.
+*/
static int
tuple_of_constants(unsigned char *codestr, int n, PyObject *consts)
{
@@ -406,7 +408,7 @@ tuple_of_constants(unsigned char *codestr, int n, PyObject *consts)
/* Pre-conditions */
assert(PyList_CheckExact(consts));
- assert(codestr[n*3] == BUILD_TUPLE);
+ assert(codestr[n*3] == BUILD_TUPLE || codestr[n*3] == BUILD_LIST);
assert(GETARG(codestr, (n*3)) == n);
for (i=0 ; i<n ; i++)
assert(codestr[i*3] == LOAD_CONST);
@@ -753,24 +755,28 @@ optimize_code(PyObject *code, PyObject* consts, PyObject *names, PyObject *linen
cumlc = 0;
break;
- /* Try to fold tuples of constants.
+ /* Try to fold tuples of constants (includes a case for lists
+ which are only used for "in" and "not in" tests).
Skip over BUILD_SEQN 1 UNPACK_SEQN 1.
Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2.
Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */
case BUILD_TUPLE:
+ case BUILD_LIST:
j = GETARG(codestr, i);
h = i - 3 * j;
if (h >= 0 &&
j <= lastlc &&
- ISBASICBLOCK(blocks, h, 3*(j+1)) &&
+ (opcode == BUILD_TUPLE &&
+ ISBASICBLOCK(blocks, h, 3*(j+1)) ||
+ opcode == BUILD_LIST &&
+ codestr[i+3]==COMPARE_OP &&
+ ISBASICBLOCK(blocks, h, 3*(j+2)) &&
+ (GETARG(codestr,i+3)==6 || GETARG(codestr,i+3)==7)) &&
tuple_of_constants(&codestr[h], j, consts)) {
assert(codestr[i] == LOAD_CONST);
cumlc = 1;
break;
}
- /* Intentional fallthrough */
- case BUILD_LIST:
- j = GETARG(codestr, i);
if (codestr[i+3] != UNPACK_SEQUENCE ||
!ISBASICBLOCK(blocks,i,6) ||
j != GETARG(codestr, i+3))