summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGeorg Brandl <georg@python.org>2007-03-18 19:01:53 (GMT)
committerGeorg Brandl <georg@python.org>2007-03-18 19:01:53 (GMT)
commitdde002899db8d04ac25d630fcc3a27e8bbf282ea (patch)
tree336d26b7a0e0da705cc729688de862bea896b251
parent428f0641ec34902b0cce2cfdca833c79e6fdab7c (diff)
downloadcpython-dde002899db8d04ac25d630fcc3a27e8bbf282ea.zip
cpython-dde002899db8d04ac25d630fcc3a27e8bbf282ea.tar.gz
cpython-dde002899db8d04ac25d630fcc3a27e8bbf282ea.tar.bz2
Make ELLIPSIS a separate token. This makes it a syntax error to write ". . ." for Ellipsis.
-rw-r--r--Grammar/Grammar2
-rw-r--r--Include/token.h7
-rw-r--r--Lib/compiler/transformer.py2
-rw-r--r--Lib/test/test_grammar.py1
-rwxr-xr-xLib/token.py7
-rw-r--r--Lib/tokenize.py6
-rw-r--r--Parser/tokenizer.c30
-rw-r--r--Python/ast.c2
-rw-r--r--Python/graminit.c149
9 files changed, 111 insertions, 95 deletions
diff --git a/Grammar/Grammar b/Grammar/Grammar
index 0e459ca..e4cd3e0 100644
--- a/Grammar/Grammar
+++ b/Grammar/Grammar
@@ -107,7 +107,7 @@ power: atom trailer* ['**' factor]
atom: ('(' [yield_expr|testlist_gexp] ')' |
'[' [listmaker] ']' |
'{' [dictsetmaker] '}' |
- NAME | NUMBER | STRING+ | '.' '.' '.')
+ NAME | NUMBER | STRING+ | '...')
listmaker: test ( list_for | (',' test)* [','] )
testlist_gexp: test ( gen_for | (',' test)* [','] )
lambdef: 'lambda' [varargslist] ':' test
diff --git a/Include/token.h b/Include/token.h
index cdbc965..772a85e 100644
--- a/Include/token.h
+++ b/Include/token.h
@@ -59,10 +59,11 @@ extern "C" {
#define DOUBLESLASHEQUAL 49
#define AT 50
#define RARROW 51
+#define ELLIPSIS 52
/* Don't forget to update the table _PyParser_TokenNames in tokenizer.c! */
-#define OP 52
-#define ERRORTOKEN 53
-#define N_TOKENS 54
+#define OP 53
+#define ERRORTOKEN 54
+#define N_TOKENS 55
/* Special definitions for cooperation with parser */
diff --git a/Lib/compiler/transformer.py b/Lib/compiler/transformer.py
index 79b702c..f07ec97 100644
--- a/Lib/compiler/transformer.py
+++ b/Lib/compiler/transformer.py
@@ -113,7 +113,7 @@ class Transformer:
token.LBRACE: self.atom_lbrace,
token.NUMBER: self.atom_number,
token.STRING: self.atom_string,
- token.DOT: self.atom_ellipsis,
+ token.ELLIPSIS: self.atom_ellipsis,
token.NAME: self.atom_name,
}
self.encoding = None
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
index 1a14756..bd80db6 100644
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -121,6 +121,7 @@ the \'lazy\' dog.\n\
def testEllipsis(self):
x = ...
self.assert_(x is Ellipsis)
+ self.assertRaises(SyntaxError, eval, ".. .")
class GrammarTests(unittest.TestCase):
diff --git a/Lib/token.py b/Lib/token.py
index 147536c..eb48e76 100755
--- a/Lib/token.py
+++ b/Lib/token.py
@@ -61,9 +61,10 @@ DOUBLESLASH = 48
DOUBLESLASHEQUAL = 49
AT = 50
RARROW = 51
-OP = 52
-ERRORTOKEN = 53
-N_TOKENS = 54
+ELLIPSIS = 52
+OP = 53
+ERRORTOKEN = 54
+N_TOKENS = 55
NT_OFFSET = 256
#--end constants--
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index e502da9..cda82ca 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -83,7 +83,7 @@ Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=",
r"~")
Bracket = '[][(){}]'
-Special = group(r'\r?\n', r'[:;.,@]')
+Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
@@ -334,8 +334,8 @@ def generate_tokens(readline):
spos, epos, pos = (lnum, start), (lnum, end), end
token, initial = line[start:end], line[start]
- if initial in numchars or \
- (initial == '.' and token != '.'): # ordinary number
+ if (initial in numchars or # ordinary number
+ (initial == '.' and token != '.' and token != '...')):
yield (NUMBER, token, spos, epos, line)
elif initial in '\r\n':
yield (NL if parenlev > 0 else NEWLINE,
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index 84bd60e..ec3c5db 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -93,6 +93,7 @@ char *_PyParser_TokenNames[] = {
"DOUBLESLASHEQUAL",
"AT",
"RARROW",
+ "ELLIPSIS",
/* This table must match the #defines in token.h! */
"OP",
"<ERRORTOKEN>",
@@ -1082,6 +1083,16 @@ PyToken_ThreeChars(int c1, int c2, int c3)
break;
}
break;
+ case '.':
+ switch (c2) {
+ case '.':
+ switch (c3) {
+ case '.':
+ return ELLIPSIS;
+ }
+ break;
+ }
+ break;
}
return OP;
}
@@ -1278,13 +1289,22 @@ tok_get(register struct tok_state *tok, char **p_start, char **p_end)
c = tok_nextc(tok);
if (isdigit(c)) {
goto fraction;
- }
- else {
+ } else if (c == '.') {
+ c = tok_nextc(tok);
+ if (c == '.') {
+ *p_start = tok->start;
+ *p_end = tok->cur;
+ return ELLIPSIS;
+ } else {
+ tok_backup(tok, c);
+ }
+ tok_backup(tok, '.');
+ } else {
tok_backup(tok, c);
- *p_start = tok->start;
- *p_end = tok->cur;
- return DOT;
}
+ *p_start = tok->start;
+ *p_end = tok->cur;
+ return DOT;
}
/* Number */
diff --git a/Python/ast.c b/Python/ast.c
index d4c8967..8180b42 100644
--- a/Python/ast.c
+++ b/Python/ast.c
@@ -1410,7 +1410,7 @@ ast_for_atom(struct compiling *c, const node *n)
PyArena_AddPyObject(c->c_arena, pynum);
return Num(pynum, LINENO(n), n->n_col_offset, c->c_arena);
}
- case DOT: /* Ellipsis */
+ case ELLIPSIS: /* Ellipsis */
return Ellipsis(LINENO(n), n->n_col_offset, c->c_arena);
case LPAR: /* some parenthesized expressions */
ch = CHILD(n, 1);
diff --git a/Python/graminit.c b/Python/graminit.c
index 1287219..cece6e5 100644
--- a/Python/graminit.c
+++ b/Python/graminit.c
@@ -1336,19 +1336,19 @@ static arc arcs_65_0[7] = {
{19, 4},
{154, 4},
{155, 5},
- {78, 6},
+ {156, 4},
};
static arc arcs_65_1[3] = {
- {48, 7},
- {147, 7},
+ {48, 6},
+ {147, 6},
{15, 4},
};
static arc arcs_65_2[2] = {
- {149, 8},
+ {149, 7},
{150, 4},
};
static arc arcs_65_3[2] = {
- {152, 9},
+ {152, 8},
{153, 4},
};
static arc arcs_65_4[1] = {
@@ -1359,21 +1359,15 @@ static arc arcs_65_5[2] = {
{0, 5},
};
static arc arcs_65_6[1] = {
- {78, 10},
-};
-static arc arcs_65_7[1] = {
{15, 4},
};
-static arc arcs_65_8[1] = {
+static arc arcs_65_7[1] = {
{150, 4},
};
-static arc arcs_65_9[1] = {
+static arc arcs_65_8[1] = {
{153, 4},
};
-static arc arcs_65_10[1] = {
- {78, 4},
-};
-static state states_65[11] = {
+static state states_65[9] = {
{7, arcs_65_0},
{3, arcs_65_1},
{2, arcs_65_2},
@@ -1383,14 +1377,12 @@ static state states_65[11] = {
{1, arcs_65_6},
{1, arcs_65_7},
{1, arcs_65_8},
- {1, arcs_65_9},
- {1, arcs_65_10},
};
static arc arcs_66_0[1] = {
{22, 1},
};
static arc arcs_66_1[3] = {
- {156, 2},
+ {157, 2},
{28, 3},
{0, 1},
};
@@ -1416,7 +1408,7 @@ static arc arcs_67_0[1] = {
{22, 1},
};
static arc arcs_67_1[3] = {
- {157, 2},
+ {158, 2},
{28, 3},
{0, 1},
};
@@ -1471,7 +1463,7 @@ static arc arcs_69_1[2] = {
{15, 5},
};
static arc arcs_69_2[1] = {
- {158, 6},
+ {159, 6},
};
static arc arcs_69_3[1] = {
{19, 5},
@@ -1495,14 +1487,14 @@ static state states_69[7] = {
{1, arcs_69_6},
};
static arc arcs_70_0[1] = {
- {159, 1},
+ {160, 1},
};
static arc arcs_70_1[2] = {
{28, 2},
{0, 1},
};
static arc arcs_70_2[2] = {
- {159, 1},
+ {160, 1},
{0, 2},
};
static state states_70[3] = {
@@ -1520,11 +1512,11 @@ static arc arcs_71_1[2] = {
};
static arc arcs_71_2[3] = {
{22, 3},
- {160, 4},
+ {161, 4},
{0, 2},
};
static arc arcs_71_3[2] = {
- {160, 4},
+ {161, 4},
{0, 3},
};
static arc arcs_71_4[1] = {
@@ -1625,7 +1617,7 @@ static state states_75[8] = {
{1, arcs_75_7},
};
static arc arcs_76_0[1] = {
- {161, 1},
+ {162, 1},
};
static arc arcs_76_1[1] = {
{19, 2},
@@ -1661,7 +1653,7 @@ static state states_76[8] = {
{1, arcs_76_7},
};
static arc arcs_77_0[3] = {
- {162, 1},
+ {163, 1},
{29, 2},
{31, 3},
};
@@ -1676,7 +1668,7 @@ static arc arcs_77_3[1] = {
{22, 6},
};
static arc arcs_77_4[4] = {
- {162, 1},
+ {163, 1},
{29, 2},
{31, 3},
{0, 4},
@@ -1705,7 +1697,7 @@ static arc arcs_78_0[1] = {
{22, 1},
};
static arc arcs_78_1[3] = {
- {157, 2},
+ {158, 2},
{27, 3},
{0, 1},
};
@@ -1722,8 +1714,8 @@ static state states_78[4] = {
{1, arcs_78_3},
};
static arc arcs_79_0[2] = {
- {156, 1},
- {164, 1},
+ {157, 1},
+ {165, 1},
};
static arc arcs_79_1[1] = {
{0, 1},
@@ -1745,7 +1737,7 @@ static arc arcs_80_3[1] = {
{107, 4},
};
static arc arcs_80_4[2] = {
- {163, 5},
+ {164, 5},
{0, 4},
};
static arc arcs_80_5[1] = {
@@ -1766,7 +1758,7 @@ static arc arcs_81_1[1] = {
{108, 2},
};
static arc arcs_81_2[2] = {
- {163, 3},
+ {164, 3},
{0, 2},
};
static arc arcs_81_3[1] = {
@@ -1779,8 +1771,8 @@ static state states_81[4] = {
{1, arcs_81_3},
};
static arc arcs_82_0[2] = {
- {157, 1},
- {166, 1},
+ {158, 1},
+ {167, 1},
};
static arc arcs_82_1[1] = {
{0, 1},
@@ -1802,7 +1794,7 @@ static arc arcs_83_3[1] = {
{109, 4},
};
static arc arcs_83_4[2] = {
- {165, 5},
+ {166, 5},
{0, 4},
};
static arc arcs_83_5[1] = {
@@ -1823,7 +1815,7 @@ static arc arcs_84_1[1] = {
{108, 2},
};
static arc arcs_84_2[2] = {
- {165, 3},
+ {166, 3},
{0, 2},
};
static arc arcs_84_3[1] = {
@@ -1857,7 +1849,7 @@ static state states_86[2] = {
{1, arcs_86_1},
};
static arc arcs_87_0[1] = {
- {169, 1},
+ {170, 1},
};
static arc arcs_87_1[2] = {
{9, 2},
@@ -1873,11 +1865,11 @@ static state states_87[3] = {
};
static dfa dfas[88] = {
{256, "single_input", 0, 3, states_0,
- "\004\050\014\000\000\000\000\240\340\151\070\220\045\200\040\000\000\206\220\014\002\002"},
+ "\004\050\014\000\000\000\000\240\340\051\070\220\045\200\040\000\000\206\220\034\004\004"},
{257, "file_input", 0, 2, states_1,
- "\204\050\014\000\000\000\000\240\340\151\070\220\045\200\040\000\000\206\220\014\002\002"},
+ "\204\050\014\000\000\000\000\240\340\051\070\220\045\200\040\000\000\206\220\034\004\004"},
{258, "eval_input", 0, 3, states_2,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{259, "decorator", 0, 7, states_3,
"\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{260, "decorators", 0, 2, states_4,
@@ -1903,13 +1895,13 @@ static dfa dfas[88] = {
{270, "vfplist", 0, 3, states_14,
"\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{271, "stmt", 0, 2, states_15,
- "\000\050\014\000\000\000\000\240\340\151\070\220\045\200\040\000\000\206\220\014\002\002"},
+ "\000\050\014\000\000\000\000\240\340\051\070\220\045\200\040\000\000\206\220\034\004\004"},
{272, "simple_stmt", 0, 4, states_16,
- "\000\040\010\000\000\000\000\240\340\151\070\000\000\200\040\000\000\206\220\014\000\002"},
+ "\000\040\010\000\000\000\000\240\340\051\070\000\000\200\040\000\000\206\220\034\000\004"},
{273, "small_stmt", 0, 2, states_17,
- "\000\040\010\000\000\000\000\240\340\151\070\000\000\200\040\000\000\206\220\014\000\002"},
+ "\000\040\010\000\000\000\000\240\340\051\070\000\000\200\040\000\000\206\220\034\000\004"},
{274, "expr_stmt", 0, 6, states_18,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{275, "augassign", 0, 2, states_19,
"\000\000\000\000\000\000\376\037\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{276, "del_stmt", 0, 3, states_20,
@@ -1917,7 +1909,7 @@ static dfa dfas[88] = {
{277, "pass_stmt", 0, 2, states_21,
"\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{278, "flow_stmt", 0, 2, states_22,
- "\000\000\000\000\000\000\000\000\340\001\000\000\000\000\000\000\000\000\000\000\000\002"},
+ "\000\000\000\000\000\000\000\000\340\001\000\000\000\000\000\000\000\000\000\000\000\004"},
{279, "break_stmt", 0, 2, states_23,
"\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{280, "continue_stmt", 0, 2, states_24,
@@ -1925,7 +1917,7 @@ static dfa dfas[88] = {
{281, "return_stmt", 0, 3, states_25,
"\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{282, "yield_stmt", 0, 2, states_26,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002"},
+ "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\004"},
{283, "raise_stmt", 0, 7, states_27,
"\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000"},
{284, "import_stmt", 0, 2, states_28,
@@ -1951,7 +1943,7 @@ static dfa dfas[88] = {
{294, "assert_stmt", 0, 5, states_38,
"\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000"},
{295, "compound_stmt", 0, 2, states_39,
- "\000\010\004\000\000\000\000\000\000\000\000\220\045\000\000\000\000\000\000\000\002\000"},
+ "\000\010\004\000\000\000\000\000\000\000\000\220\045\000\000\000\000\000\000\000\004\000"},
{296, "if_stmt", 0, 8, states_40,
"\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"},
{297, "while_stmt", 0, 8, states_41,
@@ -1967,69 +1959,69 @@ static dfa dfas[88] = {
{302, "except_clause", 0, 5, states_46,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000"},
{303, "suite", 0, 5, states_47,
- "\004\040\010\000\000\000\000\240\340\151\070\000\000\200\040\000\000\206\220\014\000\002"},
+ "\004\040\010\000\000\000\000\240\340\051\070\000\000\200\040\000\000\206\220\034\000\004"},
{304, "testlist_safe", 0, 5, states_48,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{305, "old_test", 0, 2, states_49,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{306, "old_lambdef", 0, 5, states_50,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000"},
{307, "test", 0, 6, states_51,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{308, "or_test", 0, 2, states_52,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\040\000\000\206\220\034\000\000"},
{309, "and_test", 0, 2, states_53,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\040\000\000\206\220\034\000\000"},
{310, "not_test", 0, 3, states_54,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\040\000\000\206\220\034\000\000"},
{311, "comparison", 0, 2, states_55,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"},
{312, "comp_op", 0, 4, states_56,
"\000\000\000\000\000\000\000\000\000\000\000\000\002\000\040\177\000\000\000\000\000\000"},
{313, "expr", 0, 2, states_57,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"},
{314, "xor_expr", 0, 2, states_58,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"},
{315, "and_expr", 0, 2, states_59,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"},
{316, "shift_expr", 0, 2, states_60,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"},
{317, "arith_expr", 0, 2, states_61,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"},
{318, "term", 0, 2, states_62,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"},
{319, "factor", 0, 3, states_63,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"},
{320, "power", 0, 4, states_64,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\220\014\000\000"},
- {321, "atom", 0, 11, states_65,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\220\034\000\000"},
+ {321, "atom", 0, 9, states_65,
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\220\034\000\000"},
{322, "listmaker", 0, 5, states_66,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{323, "testlist_gexp", 0, 5, states_67,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{324, "lambdef", 0, 5, states_68,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000"},
{325, "trailer", 0, 7, states_69,
"\000\040\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\020\000\000\000"},
{326, "subscriptlist", 0, 3, states_70,
- "\000\040\210\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\210\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{327, "subscript", 0, 5, states_71,
- "\000\040\210\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\210\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{328, "sliceop", 0, 3, states_72,
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{329, "exprlist", 0, 3, states_73,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"},
{330, "testlist", 0, 3, states_74,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{331, "dictsetmaker", 0, 8, states_75,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{332, "classdef", 0, 8, states_76,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002\000"},
+ "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\004\000"},
{333, "arglist", 0, 8, states_77,
- "\000\040\010\240\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\240\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{334, "argument", 0, 4, states_78,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{335, "list_iter", 0, 2, states_79,
"\000\000\000\000\000\000\000\000\000\000\000\020\001\000\000\000\000\000\000\000\000\000"},
{336, "list_for", 0, 6, states_80,
@@ -2043,13 +2035,13 @@ static dfa dfas[88] = {
{340, "gen_if", 0, 4, states_84,
"\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"},
{341, "testlist1", 0, 2, states_85,
- "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"},
+ "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"},
{342, "encoding_decl", 0, 2, states_86,
"\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{343, "yield_expr", 0, 3, states_87,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002"},
+ "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\004"},
};
-static label labels[170] = {
+static label labels[171] = {
{0, "EMPTY"},
{256, 0},
{4, 0},
@@ -2206,6 +2198,7 @@ static label labels[170] = {
{27, 0},
{2, 0},
{3, 0},
+ {52, 0},
{336, 0},
{339, 0},
{326, 0},
@@ -2224,6 +2217,6 @@ static label labels[170] = {
grammar _PyParser_Grammar = {
88,
dfas,
- {170, labels},
+ {171, labels},
256
};