summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Grammar/Grammar2
-rw-r--r--Include/code.h4
-rw-r--r--Include/compile.h1
-rw-r--r--Include/parsetok.h1
-rw-r--r--Include/pythonrun.h2
-rw-r--r--Lib/__future__.py5
-rw-r--r--Lib/test/test_flufl.py27
-rw-r--r--Parser/parser.c8
-rw-r--r--Parser/parsetok.c38
-rw-r--r--Parser/tokenizer.c1
-rw-r--r--Python/future.c2
-rw-r--r--Python/graminit.c196
-rw-r--r--Python/pythonrun.c2
13 files changed, 168 insertions, 121 deletions
diff --git a/Grammar/Grammar b/Grammar/Grammar
index 64816d9..e9922c1 100644
--- a/Grammar/Grammar
+++ b/Grammar/Grammar
@@ -87,7 +87,7 @@ or_test: and_test ('or' and_test)*
and_test: not_test ('and' not_test)*
not_test: 'not' not_test | comparison
comparison: star_expr (comp_op star_expr)*
-comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not'
+comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
star_expr: ['*'] expr
expr: xor_expr ('|' xor_expr)*
xor_expr: and_expr ('^' and_expr)*
diff --git a/Include/code.h b/Include/code.h
index d738d8d..145a9a1 100644
--- a/Include/code.h
+++ b/Include/code.h
@@ -52,10 +52,12 @@ typedef struct {
#define CO_FUTURE_UNICODE_LITERALS 0x20000
#endif
+#define CO_FUTURE_BARRY_AS_BDFL 0x40000
+
/* This should be defined if a future statement modifies the syntax.
For example, when a keyword is added.
*/
-/* #define PY_PARSER_REQUIRES_FUTURE_KEYWORD */
+#define PY_PARSER_REQUIRES_FUTURE_KEYWORD
#define CO_MAXBLOCKS 20 /* Max static block nesting within a function */
diff --git a/Include/compile.h b/Include/compile.h
index d78f824..7c329b3 100644
--- a/Include/compile.h
+++ b/Include/compile.h
@@ -26,6 +26,7 @@ typedef struct {
#define FUTURE_WITH_STATEMENT "with_statement"
#define FUTURE_PRINT_FUNCTION "print_function"
#define FUTURE_UNICODE_LITERALS "unicode_literals"
+#define FUTURE_BARRY_AS_BDFL "barry_as_FLUFL"
struct _mod; /* Declare the existence of this type */
PyAPI_FUNC(PyCodeObject *) PyAST_Compile(struct _mod *, const char *,
diff --git a/Include/parsetok.h b/Include/parsetok.h
index fa402f8..af80570 100644
--- a/Include/parsetok.h
+++ b/Include/parsetok.h
@@ -30,6 +30,7 @@ typedef struct {
#endif
#define PyPARSE_IGNORE_COOKIE 0x0010
+#define PyPARSE_BARRY_AS_BDFL 0x0020
PyAPI_FUNC(node *) PyParser_ParseString(const char *, grammar *, int,
perrdetail *);
diff --git a/Include/pythonrun.h b/Include/pythonrun.h
index c909e1a..86d9fe2 100644
--- a/Include/pythonrun.h
+++ b/Include/pythonrun.h
@@ -7,7 +7,7 @@
extern "C" {
#endif
-#define PyCF_MASK 0
+#define PyCF_MASK CO_FUTURE_BARRY_AS_BDFL
#define PyCF_MASK_OBSOLETE 0
#define PyCF_SOURCE_IS_UTF8 0x0100
#define PyCF_DONT_IMPLY_DEDENT 0x0200
diff --git a/Lib/__future__.py b/Lib/__future__.py
index 9156459..5ff282f 100644
--- a/Lib/__future__.py
+++ b/Lib/__future__.py
@@ -70,6 +70,7 @@ CO_FUTURE_ABSOLUTE_IMPORT = 0x4000 # perform absolute imports by default
CO_FUTURE_WITH_STATEMENT = 0x8000 # with statement
CO_FUTURE_PRINT_FUNCTION = 0x10000 # print function
CO_FUTURE_UNICODE_LITERALS = 0x20000 # unicode string literals
+CO_FUTURE_BARRY_AS_BDFL = 0x40000
class _Feature:
def __init__(self, optionalRelease, mandatoryRelease, compiler_flag):
@@ -126,3 +127,7 @@ print_function = _Feature((2, 6, 0, "alpha", 2),
unicode_literals = _Feature((2, 6, 0, "alpha", 2),
(3, 0, 0, "alpha", 0),
CO_FUTURE_UNICODE_LITERALS)
+
+barry_as_FLUFL = _Feature((3, 1, 0, "alpha", 2),
+ (3, 9, 0, "alpha", 0),
+ CO_FUTURE_BARRY_AS_BDFL)
diff --git a/Lib/test/test_flufl.py b/Lib/test/test_flufl.py
new file mode 100644
index 0000000..5a709bc
--- /dev/null
+++ b/Lib/test/test_flufl.py
@@ -0,0 +1,27 @@
+import __future__
+import unittest
+
+class FLUFLTests(unittest.TestCase):
+
+ def test_barry_as_bdfl(self):
+ code = "from __future__ import barry_as_FLUFL; 2 {0} 3"
+ compile(code.format('<>'), '<BDFL test>', 'exec',
+ __future__.CO_FUTURE_BARRY_AS_BDFL)
+ self.assertRaises(SyntaxError, compile, code.format('!='),
+ '<FLUFL test>', 'exec',
+ __future__.CO_FUTURE_BARRY_AS_BDFL)
+
+ def test_guido_as_bdfl(self):
+ code = '2 {0} 3'
+ compile(code.format('!='), '<BDFL test>', 'exec')
+ self.assertRaises(SyntaxError, compile, code.format('<>'),
+ '<FLUFL test>', 'exec')
+
+
+def test_main():
+ from test.support import run_unittest
+ run_unittest(FLUFLTests)
+
+
+if __name__ == '__main__':
+ test_main()
diff --git a/Parser/parser.c b/Parser/parser.c
index ff4ce16..83e5e6d 100644
--- a/Parser/parser.c
+++ b/Parser/parser.c
@@ -149,6 +149,7 @@ classify(parser_state *ps, int type, char *str)
strcmp(l->lb_str, s) != 0)
continue;
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
+#if 0
/* Leaving this in as an example */
if (!(ps->p_flags & CO_FUTURE_WITH_STATEMENT)) {
if (s[0] == 'w' && strcmp(s, "with") == 0)
@@ -157,6 +158,7 @@ classify(parser_state *ps, int type, char *str)
break; /* not a keyword yet */
}
#endif
+#endif
D(printf("It's a keyword\n"));
return n - i;
}
@@ -178,6 +180,7 @@ classify(parser_state *ps, int type, char *str)
}
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
+#if 0
/* Leaving this in as an example */
static void
future_hack(parser_state *ps)
@@ -218,6 +221,7 @@ future_hack(parser_state *ps)
}
}
}
+#endif
#endif /* future keyword */
int
@@ -278,11 +282,13 @@ PyParser_AddToken(register parser_state *ps, register int type, char *str,
d->d_name,
ps->p_stack.s_top->s_state));
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
+#if 0
if (d->d_name[0] == 'i' &&
strcmp(d->d_name,
"import_stmt") == 0)
future_hack(ps);
#endif
+#endif
s_pop(&ps->p_stack);
if (s_empty(&ps->p_stack)) {
D(printf(" ACCEPT.\n"));
@@ -296,10 +302,12 @@ PyParser_AddToken(register parser_state *ps, register int type, char *str,
if (s->s_accept) {
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
+#if 0
if (d->d_name[0] == 'i' &&
strcmp(d->d_name, "import_stmt") == 0)
future_hack(ps);
#endif
+#endif
/* Pop this dfa and try again */
s_pop(&ps->p_stack);
D(printf(" Pop ...\n"));
diff --git a/Parser/parsetok.c b/Parser/parsetok.c
index 4c3b506..1470327 100644
--- a/Parser/parsetok.c
+++ b/Parser/parsetok.c
@@ -100,6 +100,7 @@ PyParser_ParseFileFlagsEx(FILE *fp, const char *filename,
}
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
+#if 0
static char with_msg[] =
"%s:%d: Warning: 'with' will become a reserved keyword in Python 2.6\n";
@@ -114,6 +115,7 @@ warn(const char *msg, const char *filename, int lineno)
PySys_WriteStderr(msg, filename, lineno);
}
#endif
+#endif
/* Parse input coming from the given tokenizer structure.
Return error code. */
@@ -133,8 +135,8 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
return NULL;
}
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
- if (*flags & PyPARSE_WITH_IS_KEYWORD)
- ps->p_flags |= CO_FUTURE_WITH_STATEMENT;
+ if (*flags & PyPARSE_BARRY_AS_BDFL)
+ ps->p_flags |= CO_FUTURE_BARRY_AS_BDFL;
#endif
for (;;) {
@@ -177,26 +179,20 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
str[len] = '\0';
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
- /* This is only necessary to support the "as" warning, but
- we don't want to warn about "as" in import statements. */
- if (type == NAME &&
- len == 6 && str[0] == 'i' && strcmp(str, "import") == 0)
- handling_import = 1;
-
- /* Warn about with as NAME */
- if (type == NAME &&
- !(ps->p_flags & CO_FUTURE_WITH_STATEMENT)) {
- if (len == 4 && str[0] == 'w' && strcmp(str, "with") == 0)
- warn(with_msg, err_ret->filename, tok->lineno);
- else if (!(handling_import || handling_with) &&
- len == 2 && str[0] == 'a' &&
- strcmp(str, "as") == 0)
- warn(as_msg, err_ret->filename, tok->lineno);
+ if (type == NOTEQUAL) {
+ if (!(ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) &&
+ strcmp(str, "!=")) {
+ err_ret->error = E_SYNTAX;
+ break;
+ }
+ else if ((ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) &&
+ strcmp(str, "<>")) {
+ err_ret->text = "with Barry as BDFL, use '<>' "
+ "instead of '!='";
+ err_ret->error = E_SYNTAX;
+ break;
+ }
}
- else if (type == NAME &&
- (ps->p_flags & CO_FUTURE_WITH_STATEMENT) &&
- len == 4 && str[0] == 'w' && strcmp(str, "with") == 0)
- handling_with = 1;
#endif
if (a >= tok->line_start)
col_offset = a - tok->line_start;
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index c4f447d..15e8185 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -1040,6 +1040,7 @@ PyToken_TwoChars(int c1, int c2)
break;
case '<':
switch (c2) {
+ case '>': return NOTEQUAL;
case '=': return LESSEQUAL;
case '<': return LEFTSHIFT;
}
diff --git a/Python/future.c b/Python/future.c
index 1a2db1b..4178541 100644
--- a/Python/future.c
+++ b/Python/future.c
@@ -39,6 +39,8 @@ future_check_features(PyFutureFeatures *ff, stmt_ty s, const char *filename)
continue;
} else if (strcmp(feature, FUTURE_UNICODE_LITERALS) == 0) {
continue;
+ } else if (strcmp(feature, FUTURE_BARRY_AS_BDFL) == 0) {
+ ff->ff_features |= CO_FUTURE_BARRY_AS_BDFL;
} else if (strcmp(feature, "braces") == 0) {
PyErr_SetString(PyExc_SyntaxError,
"not a chance");
diff --git a/Python/graminit.c b/Python/graminit.c
index c503a32..4c159bc 100644
--- a/Python/graminit.c
+++ b/Python/graminit.c
@@ -1129,16 +1129,17 @@ static state states_52[2] = {
{1, arcs_52_0},
{2, arcs_52_1},
};
-static arc arcs_53_0[9] = {
+static arc arcs_53_0[10] = {
{118, 1},
{119, 1},
{120, 1},
{121, 1},
{122, 1},
{123, 1},
+ {124, 1},
{95, 1},
{114, 2},
- {124, 3},
+ {125, 3},
};
static arc arcs_53_1[1] = {
{0, 1},
@@ -1151,7 +1152,7 @@ static arc arcs_53_3[2] = {
{0, 3},
};
static state states_53[4] = {
- {9, arcs_53_0},
+ {10, arcs_53_0},
{1, arcs_53_1},
{1, arcs_53_2},
{2, arcs_53_3},
@@ -1172,10 +1173,10 @@ static state states_54[3] = {
{1, arcs_54_2},
};
static arc arcs_55_0[1] = {
- {125, 1},
+ {126, 1},
};
static arc arcs_55_1[2] = {
- {126, 0},
+ {127, 0},
{0, 1},
};
static state states_55[2] = {
@@ -1183,10 +1184,10 @@ static state states_55[2] = {
{2, arcs_55_1},
};
static arc arcs_56_0[1] = {
- {127, 1},
+ {128, 1},
};
static arc arcs_56_1[2] = {
- {128, 0},
+ {129, 0},
{0, 1},
};
static state states_56[2] = {
@@ -1194,10 +1195,10 @@ static state states_56[2] = {
{2, arcs_56_1},
};
static arc arcs_57_0[1] = {
- {129, 1},
+ {130, 1},
};
static arc arcs_57_1[2] = {
- {130, 0},
+ {131, 0},
{0, 1},
};
static state states_57[2] = {
@@ -1205,11 +1206,11 @@ static state states_57[2] = {
{2, arcs_57_1},
};
static arc arcs_58_0[1] = {
- {131, 1},
+ {132, 1},
};
static arc arcs_58_1[3] = {
- {132, 0},
{133, 0},
+ {134, 0},
{0, 1},
};
static state states_58[2] = {
@@ -1217,11 +1218,11 @@ static state states_58[2] = {
{3, arcs_58_1},
};
static arc arcs_59_0[1] = {
- {134, 1},
+ {135, 1},
};
static arc arcs_59_1[3] = {
- {135, 0},
{136, 0},
+ {137, 0},
{0, 1},
};
static state states_59[2] = {
@@ -1229,13 +1230,13 @@ static state states_59[2] = {
{3, arcs_59_1},
};
static arc arcs_60_0[1] = {
- {137, 1},
+ {138, 1},
};
static arc arcs_60_1[5] = {
{31, 0},
- {138, 0},
{139, 0},
{140, 0},
+ {141, 0},
{0, 1},
};
static state states_60[2] = {
@@ -1243,13 +1244,13 @@ static state states_60[2] = {
{5, arcs_60_1},
};
static arc arcs_61_0[4] = {
- {135, 1},
{136, 1},
- {141, 1},
- {142, 2},
+ {137, 1},
+ {142, 1},
+ {143, 2},
};
static arc arcs_61_1[1] = {
- {137, 2},
+ {138, 2},
};
static arc arcs_61_2[1] = {
{0, 2},
@@ -1260,15 +1261,15 @@ static state states_61[3] = {
{1, arcs_61_2},
};
static arc arcs_62_0[1] = {
- {143, 1},
+ {144, 1},
};
static arc arcs_62_1[3] = {
- {144, 1},
+ {145, 1},
{32, 2},
{0, 1},
};
static arc arcs_62_2[1] = {
- {137, 3},
+ {138, 3},
};
static arc arcs_62_3[1] = {
{0, 3},
@@ -1281,44 +1282,44 @@ static state states_62[4] = {
};
static arc arcs_63_0[10] = {
{13, 1},
- {146, 2},
- {148, 3},
+ {147, 2},
+ {149, 3},
{21, 4},
- {151, 4},
- {152, 5},
+ {152, 4},
+ {153, 5},
{77, 4},
- {153, 4},
{154, 4},
{155, 4},
+ {156, 4},
};
static arc arcs_63_1[3] = {
{46, 6},
- {145, 6},
+ {146, 6},
{15, 4},
};
static arc arcs_63_2[2] = {
- {145, 7},
- {147, 4},
+ {146, 7},
+ {148, 4},
};
static arc arcs_63_3[2] = {
- {149, 8},
- {150, 4},
+ {150, 8},
+ {151, 4},
};
static arc arcs_63_4[1] = {
{0, 4},
};
static arc arcs_63_5[2] = {
- {152, 5},
+ {153, 5},
{0, 5},
};
static arc arcs_63_6[1] = {
{15, 4},
};
static arc arcs_63_7[1] = {
- {147, 4},
+ {148, 4},
};
static arc arcs_63_8[1] = {
- {150, 4},
+ {151, 4},
};
static state states_63[9] = {
{10, arcs_63_0},
@@ -1335,7 +1336,7 @@ static arc arcs_64_0[1] = {
{24, 1},
};
static arc arcs_64_1[3] = {
- {156, 2},
+ {157, 2},
{30, 3},
{0, 1},
};
@@ -1359,7 +1360,7 @@ static state states_64[5] = {
};
static arc arcs_65_0[3] = {
{13, 1},
- {146, 2},
+ {147, 2},
{76, 3},
};
static arc arcs_65_1[2] = {
@@ -1367,7 +1368,7 @@ static arc arcs_65_1[2] = {
{15, 5},
};
static arc arcs_65_2[1] = {
- {157, 6},
+ {158, 6},
};
static arc arcs_65_3[1] = {
{21, 5},
@@ -1379,7 +1380,7 @@ static arc arcs_65_5[1] = {
{0, 5},
};
static arc arcs_65_6[1] = {
- {147, 5},
+ {148, 5},
};
static state states_65[7] = {
{3, arcs_65_0},
@@ -1391,14 +1392,14 @@ static state states_65[7] = {
{1, arcs_65_6},
};
static arc arcs_66_0[1] = {
- {158, 1},
+ {159, 1},
};
static arc arcs_66_1[2] = {
{30, 2},
{0, 1},
};
static arc arcs_66_2[2] = {
- {158, 1},
+ {159, 1},
{0, 2},
};
static state states_66[3] = {
@@ -1416,11 +1417,11 @@ static arc arcs_67_1[2] = {
};
static arc arcs_67_2[3] = {
{24, 3},
- {159, 4},
+ {160, 4},
{0, 2},
};
static arc arcs_67_3[2] = {
- {159, 4},
+ {160, 4},
{0, 3},
};
static arc arcs_67_4[1] = {
@@ -1485,7 +1486,7 @@ static arc arcs_71_0[1] = {
};
static arc arcs_71_1[4] = {
{25, 2},
- {156, 3},
+ {157, 3},
{30, 4},
{0, 1},
};
@@ -1500,7 +1501,7 @@ static arc arcs_71_4[2] = {
{0, 4},
};
static arc arcs_71_5[3] = {
- {156, 3},
+ {157, 3},
{30, 7},
{0, 5},
};
@@ -1536,7 +1537,7 @@ static state states_71[11] = {
{2, arcs_71_10},
};
static arc arcs_72_0[1] = {
- {160, 1},
+ {161, 1},
};
static arc arcs_72_1[1] = {
{21, 2},
@@ -1572,7 +1573,7 @@ static state states_72[8] = {
{1, arcs_72_7},
};
static arc arcs_73_0[3] = {
- {161, 1},
+ {162, 1},
{31, 2},
{32, 3},
};
@@ -1587,7 +1588,7 @@ static arc arcs_73_3[1] = {
{24, 6},
};
static arc arcs_73_4[4] = {
- {161, 1},
+ {162, 1},
{31, 2},
{32, 3},
{0, 4},
@@ -1600,7 +1601,7 @@ static arc arcs_73_6[1] = {
{0, 6},
};
static arc arcs_73_7[2] = {
- {161, 5},
+ {162, 5},
{32, 3},
};
static state states_73[8] = {
@@ -1617,7 +1618,7 @@ static arc arcs_74_0[1] = {
{24, 1},
};
static arc arcs_74_1[3] = {
- {156, 2},
+ {157, 2},
{29, 3},
{0, 1},
};
@@ -1634,8 +1635,8 @@ static state states_74[4] = {
{1, arcs_74_3},
};
static arc arcs_75_0[2] = {
- {156, 1},
- {163, 1},
+ {157, 1},
+ {164, 1},
};
static arc arcs_75_1[1] = {
{0, 1},
@@ -1657,7 +1658,7 @@ static arc arcs_76_3[1] = {
{105, 4},
};
static arc arcs_76_4[2] = {
- {162, 5},
+ {163, 5},
{0, 4},
};
static arc arcs_76_5[1] = {
@@ -1678,7 +1679,7 @@ static arc arcs_77_1[1] = {
{107, 2},
};
static arc arcs_77_2[2] = {
- {162, 3},
+ {163, 3},
{0, 2},
};
static arc arcs_77_3[1] = {
@@ -1712,7 +1713,7 @@ static state states_79[2] = {
{1, arcs_79_1},
};
static arc arcs_80_0[1] = {
- {166, 1},
+ {167, 1},
};
static arc arcs_80_1[2] = {
{9, 2},
@@ -1728,11 +1729,11 @@ static state states_80[3] = {
};
static dfa dfas[81] = {
{256, "single_input", 0, 3, states_0,
- "\004\050\060\200\000\000\000\050\370\044\034\144\011\040\004\000\200\041\224\017\101"},
+ "\004\050\060\200\000\000\000\050\370\044\034\144\011\040\004\000\000\103\050\037\202"},
{257, "file_input", 0, 2, states_1,
- "\204\050\060\200\000\000\000\050\370\044\034\144\011\040\004\000\200\041\224\017\101"},
+ "\204\050\060\200\000\000\000\050\370\044\034\144\011\040\004\000\000\103\050\037\202"},
{258, "eval_input", 0, 3, states_2,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{259, "decorator", 0, 7, states_3,
"\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{260, "decorators", 0, 2, states_4,
@@ -1752,13 +1753,13 @@ static dfa dfas[81] = {
{267, "vfpdef", 0, 2, states_11,
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{268, "stmt", 0, 2, states_12,
- "\000\050\060\200\000\000\000\050\370\044\034\144\011\040\004\000\200\041\224\017\101"},
+ "\000\050\060\200\000\000\000\050\370\044\034\144\011\040\004\000\000\103\050\037\202"},
{269, "simple_stmt", 0, 4, states_13,
- "\000\040\040\200\000\000\000\050\370\044\034\000\000\040\004\000\200\041\224\017\100"},
+ "\000\040\040\200\000\000\000\050\370\044\034\000\000\040\004\000\000\103\050\037\200"},
{270, "small_stmt", 0, 2, states_14,
- "\000\040\040\200\000\000\000\050\370\044\034\000\000\040\004\000\200\041\224\017\100"},
+ "\000\040\040\200\000\000\000\050\370\044\034\000\000\040\004\000\000\103\050\037\200"},
{271, "expr_stmt", 0, 6, states_15,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{272, "augassign", 0, 2, states_16,
"\000\000\000\000\000\200\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{273, "del_stmt", 0, 3, states_17,
@@ -1766,7 +1767,7 @@ static dfa dfas[81] = {
{274, "pass_stmt", 0, 2, states_18,
"\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{275, "flow_stmt", 0, 2, states_19,
- "\000\000\000\000\000\000\000\000\170\000\000\000\000\000\000\000\000\000\000\000\100"},
+ "\000\000\000\000\000\000\000\000\170\000\000\000\000\000\000\000\000\000\000\000\200"},
{276, "break_stmt", 0, 2, states_20,
"\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000"},
{277, "continue_stmt", 0, 2, states_21,
@@ -1774,7 +1775,7 @@ static dfa dfas[81] = {
{278, "return_stmt", 0, 3, states_22,
"\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000"},
{279, "yield_stmt", 0, 2, states_23,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\100"},
+ "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200"},
{280, "raise_stmt", 0, 5, states_24,
"\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000"},
{281, "import_stmt", 0, 2, states_25,
@@ -1800,7 +1801,7 @@ static dfa dfas[81] = {
{291, "assert_stmt", 0, 5, states_35,
"\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"},
{292, "compound_stmt", 0, 2, states_36,
- "\000\010\020\000\000\000\000\000\000\000\000\144\011\000\000\000\000\000\000\000\001"},
+ "\000\010\020\000\000\000\000\000\000\000\000\144\011\000\000\000\000\000\000\000\002"},
{293, "if_stmt", 0, 8, states_37,
"\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000"},
{294, "while_stmt", 0, 8, states_38,
@@ -1816,67 +1817,67 @@ static dfa dfas[81] = {
{299, "except_clause", 0, 5, states_43,
"\000\000\000\000\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000"},
{300, "suite", 0, 5, states_44,
- "\004\040\040\200\000\000\000\050\370\044\034\000\000\040\004\000\200\041\224\017\100"},
+ "\004\040\040\200\000\000\000\050\370\044\034\000\000\040\004\000\000\103\050\037\200"},
{301, "test", 0, 6, states_45,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{302, "test_nocond", 0, 2, states_46,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{303, "lambdef", 0, 5, states_47,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000"},
{304, "lambdef_nocond", 0, 5, states_48,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000"},
{305, "or_test", 0, 2, states_49,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\004\000\000\103\050\037\000"},
{306, "and_test", 0, 2, states_50,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\004\000\000\103\050\037\000"},
{307, "not_test", 0, 3, states_51,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\004\000\000\103\050\037\000"},
{308, "comparison", 0, 2, states_52,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\000\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\000\000\000\103\050\037\000"},
{309, "comp_op", 0, 4, states_53,
- "\000\000\000\000\000\000\000\000\000\000\000\200\000\000\304\037\000\000\000\000\000"},
+ "\000\000\000\000\000\000\000\000\000\000\000\200\000\000\304\077\000\000\000\000\000"},
{310, "star_expr", 0, 3, states_54,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\000\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\000\000\000\103\050\037\000"},
{311, "expr", 0, 2, states_55,
- "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\200\041\224\017\000"},
+ "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\103\050\037\000"},
{312, "xor_expr", 0, 2, states_56,
- "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\200\041\224\017\000"},
+ "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\103\050\037\000"},
{313, "and_expr", 0, 2, states_57,
- "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\200\041\224\017\000"},
+ "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\103\050\037\000"},
{314, "shift_expr", 0, 2, states_58,
- "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\200\041\224\017\000"},
+ "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\103\050\037\000"},
{315, "arith_expr", 0, 2, states_59,
- "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\200\041\224\017\000"},
+ "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\103\050\037\000"},
{316, "term", 0, 2, states_60,
- "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\200\041\224\017\000"},
+ "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\103\050\037\000"},
{317, "factor", 0, 3, states_61,
- "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\200\041\224\017\000"},
+ "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\103\050\037\000"},
{318, "power", 0, 4, states_62,
- "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\224\017\000"},
+ "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\050\037\000"},
{319, "atom", 0, 9, states_63,
- "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\224\017\000"},
+ "\000\040\040\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\050\037\000"},
{320, "testlist_comp", 0, 5, states_64,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{321, "trailer", 0, 7, states_65,
- "\000\040\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\004\000\000"},
+ "\000\040\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\010\000\000"},
{322, "subscriptlist", 0, 3, states_66,
- "\000\040\040\202\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\202\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{323, "subscript", 0, 5, states_67,
- "\000\040\040\202\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\202\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{324, "sliceop", 0, 3, states_68,
"\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{325, "exprlist", 0, 3, states_69,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\000\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\000\000\000\000\103\050\037\000"},
{326, "testlist", 0, 3, states_70,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{327, "dictorsetmaker", 0, 11, states_71,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{328, "classdef", 0, 8, states_72,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001"},
+ "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002"},
{329, "arglist", 0, 8, states_73,
- "\000\040\040\200\001\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\200\001\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{330, "argument", 0, 4, states_74,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{331, "comp_iter", 0, 2, states_75,
"\000\000\000\000\000\000\000\000\000\000\000\104\000\000\000\000\000\000\000\000\000"},
{332, "comp_for", 0, 6, states_76,
@@ -1884,13 +1885,13 @@ static dfa dfas[81] = {
{333, "comp_if", 0, 4, states_77,
"\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000"},
{334, "testlist1", 0, 2, states_78,
- "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\200\041\224\017\000"},
+ "\000\040\040\200\000\000\000\000\000\040\000\000\000\040\004\000\000\103\050\037\000"},
{335, "encoding_decl", 0, 2, states_79,
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{336, "yield_expr", 0, 3, states_80,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\100"},
+ "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200"},
};
-static label labels[167] = {
+static label labels[168] = {
{0, "EMPTY"},
{256, 0},
{4, 0},
@@ -2015,6 +2016,7 @@ static label labels[167] = {
{31, 0},
{30, 0},
{29, 0},
+ {29, 0},
{1, "is"},
{312, 0},
{18, 0},
@@ -2062,6 +2064,6 @@ static label labels[167] = {
grammar _PyParser_Grammar = {
81,
dfas,
- {167, labels},
+ {168, labels},
256
};
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
index 27c9d23..9159b4c 100644
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -1011,6 +1011,8 @@ static int PARSER_FLAGS(PyCompilerFlags *flags)
parser_flags |= PyPARSE_DONT_IMPLY_DEDENT;
if (flags->cf_flags & PyCF_IGNORE_COOKIE)
parser_flags |= PyPARSE_IGNORE_COOKIE;
+ if (flags->cf_flags & CO_FUTURE_BARRY_AS_BDFL)
+ parser_flags |= PyPARSE_BARRY_AS_BDFL;
return parser_flags;
}