summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Doc/library/ast.rst5
-rw-r--r--Doc/library/token-list.inc4
-rw-r--r--Doc/library/token.rst10
-rw-r--r--Grammar/Tokens2
-rw-r--r--Grammar/python.gram34
-rw-r--r--Include/internal/pycore_token.h22
-rw-r--r--Lib/test/test_peg_generator/test_c_parser.py2
-rw-r--r--Lib/test/test_tokenize.py68
-rw-r--r--Lib/test/test_type_comments.py4
-rw-r--r--Lib/token.py24
-rw-r--r--Misc/NEWS.d/next/Core and Builtins/2023-07-22-14-35-38.gh-issue-107015.Ghp58t.rst3
-rw-r--r--Parser/parser.c624
-rw-r--r--Parser/pegen.c4
-rw-r--r--Parser/pegen.h1
-rw-r--r--Parser/token.c2
-rw-r--r--Parser/tokenizer.c78
-rw-r--r--Parser/tokenizer.h6
-rw-r--r--Python/Python-tokenize.c3
-rw-r--r--Tools/peg_generator/pegen/keywordgen.py5
-rw-r--r--Tools/peg_generator/pegen/python_generator.py2
20 files changed, 404 insertions, 499 deletions
diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst
index 530cf30..cd657ae 100644
--- a/Doc/library/ast.rst
+++ b/Doc/library/ast.rst
@@ -2146,7 +2146,7 @@ and classes for traversing abstract syntax trees:
Currently ``major`` must equal to ``3``. For example, setting
``feature_version=(3, 4)`` will allow the use of ``async`` and
``await`` as variable names. The lowest supported version is
- ``(3, 4)``; the highest is ``sys.version_info[0:2]``.
+ ``(3, 7)``; the highest is ``sys.version_info[0:2]``.
If source contains a null character ('\0'), :exc:`ValueError` is raised.
@@ -2169,6 +2169,9 @@ and classes for traversing abstract syntax trees:
.. versionchanged:: 3.8
Added ``type_comments``, ``mode='func_type'`` and ``feature_version``.
+ .. versionchanged:: 3.13
+ The minimum supported version for feature_version is now (3,7)
+
.. function:: unparse(ast_obj)
diff --git a/Doc/library/token-list.inc b/Doc/library/token-list.inc
index e885de8..39df292 100644
--- a/Doc/library/token-list.inc
+++ b/Doc/library/token-list.inc
@@ -207,10 +207,6 @@
.. data:: OP
-.. data:: AWAIT
-
-.. data:: ASYNC
-
.. data:: TYPE_IGNORE
.. data:: TYPE_COMMENT
diff --git a/Doc/library/token.rst b/Doc/library/token.rst
index 903847b..e6dc37d 100644
--- a/Doc/library/token.rst
+++ b/Doc/library/token.rst
@@ -80,17 +80,21 @@ the :mod:`tokenize` module.
.. versionchanged:: 3.5
- Added :data:`AWAIT` and :data:`ASYNC` tokens.
+ Added :data:`!AWAIT` and :data:`!ASYNC` tokens.
.. versionchanged:: 3.7
Added :data:`COMMENT`, :data:`NL` and :data:`ENCODING` tokens.
.. versionchanged:: 3.7
- Removed :data:`AWAIT` and :data:`ASYNC` tokens. "async" and "await" are
+ Removed :data:`!AWAIT` and :data:`!ASYNC` tokens. "async" and "await" are
now tokenized as :data:`NAME` tokens.
.. versionchanged:: 3.8
Added :data:`TYPE_COMMENT`, :data:`TYPE_IGNORE`, :data:`COLONEQUAL`.
- Added :data:`AWAIT` and :data:`ASYNC` tokens back (they're needed
+ Added :data:`!AWAIT` and :data:`!ASYNC` tokens back (they're needed
to support parsing older Python versions for :func:`ast.parse` with
``feature_version`` set to 6 or lower).
+
+.. versionchanged:: 3.13
+ Removed :data:`!AWAIT` and :data:`!ASYNC` tokens again.
+
diff --git a/Grammar/Tokens b/Grammar/Tokens
index 618ae81..20bb803 100644
--- a/Grammar/Tokens
+++ b/Grammar/Tokens
@@ -56,8 +56,6 @@ COLONEQUAL ':='
EXCLAMATION '!'
OP
-AWAIT
-ASYNC
TYPE_IGNORE
TYPE_COMMENT
SOFT_KEYWORD
diff --git a/Grammar/python.gram b/Grammar/python.gram
index c1863ae..e7c8178 100644
--- a/Grammar/python.gram
+++ b/Grammar/python.gram
@@ -127,11 +127,11 @@ simple_stmt[stmt_ty] (memo):
| &'nonlocal' nonlocal_stmt
compound_stmt[stmt_ty]:
- | &('def' | '@' | ASYNC) function_def
+ | &('def' | '@' | 'async') function_def
| &'if' if_stmt
| &('class' | '@') class_def
- | &('with' | ASYNC) with_stmt
- | &('for' | ASYNC) for_stmt
+ | &('with' | 'async') with_stmt
+ | &('for' | 'async') for_stmt
| &'try' try_stmt
| &'while' while_stmt
| match_stmt
@@ -272,7 +272,7 @@ function_def_raw[stmt_ty]:
_PyAST_FunctionDef(n->v.Name.id,
(params) ? params : CHECK(arguments_ty, _PyPegen_empty_arguments(p)),
b, NULL, a, NEW_TYPE_COMMENT(p, tc), t, EXTRA) }
- | ASYNC 'def' n=NAME t=[type_params] &&'(' params=[params] ')' a=['->' z=expression { z }] &&':' tc=[func_type_comment] b=block {
+ | 'async' 'def' n=NAME t=[type_params] &&'(' params=[params] ')' a=['->' z=expression { z }] &&':' tc=[func_type_comment] b=block {
CHECK_VERSION(
stmt_ty,
5,
@@ -385,7 +385,7 @@ for_stmt[stmt_ty]:
| invalid_for_stmt
| 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] {
_PyAST_For(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA) }
- | ASYNC 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] {
+ | 'async' 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] {
CHECK_VERSION(stmt_ty, 5, "Async for loops are", _PyAST_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) }
| invalid_for_target
@@ -398,9 +398,9 @@ with_stmt[stmt_ty]:
CHECK_VERSION(stmt_ty, 9, "Parenthesized context managers are", _PyAST_With(a, b, NULL, EXTRA)) }
| 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
_PyAST_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }
- | ASYNC 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block {
+ | 'async' 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block {
CHECK_VERSION(stmt_ty, 5, "Async with statements are", _PyAST_AsyncWith(a, b, NULL, EXTRA)) }
- | ASYNC 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
+ | 'async' 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
CHECK_VERSION(stmt_ty, 5, "Async with statements are", _PyAST_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) }
| invalid_with_stmt
@@ -814,7 +814,7 @@ power[expr_ty]:
# Primary elements are things like "obj.something.something", "obj[something]", "obj(something)", "obj" ...
await_primary[expr_ty] (memo):
- | AWAIT a=primary { CHECK_VERSION(expr_ty, 5, "Await expressions are", _PyAST_Await(a, EXTRA)) }
+ | 'await' a=primary { CHECK_VERSION(expr_ty, 5, "Await expressions are", _PyAST_Await(a, EXTRA)) }
| primary
primary[expr_ty]:
@@ -966,7 +966,7 @@ for_if_clauses[asdl_comprehension_seq*]:
| a[asdl_comprehension_seq*]=for_if_clause+ { a }
for_if_clause[comprehension_ty]:
- | ASYNC 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' z=disjunction { z })* {
+ | 'async' 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' z=disjunction { z })* {
CHECK_VERSION(comprehension_ty, 6, "Async comprehensions are", _PyAST_comprehension(a, b, c, 1, p->arena)) }
| 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' z=disjunction { z })* {
_PyAST_comprehension(a, b, c, 0, p->arena) }
@@ -1284,7 +1284,7 @@ invalid_with_item:
RAISE_SYNTAX_ERROR_INVALID_TARGET(STAR_TARGETS, a) }
invalid_for_target:
- | ASYNC? 'for' a=star_expressions {
+ | 'async'? 'for' a=star_expressions {
RAISE_SYNTAX_ERROR_INVALID_TARGET(FOR_TARGETS, a) }
invalid_group:
@@ -1301,12 +1301,12 @@ invalid_import_from_targets:
RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") }
invalid_with_stmt:
- | [ASYNC] 'with' ','.(expression ['as' star_target])+ NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
- | [ASYNC] 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
+ | ['async'] 'with' ','.(expression ['as' star_target])+ NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
+ | ['async'] 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
invalid_with_stmt_indent:
- | [ASYNC] a='with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT {
+ | ['async'] a='with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT {
RAISE_INDENTATION_ERROR("expected an indented block after 'with' statement on line %d", a->lineno) }
- | [ASYNC] a='with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT {
+ | ['async'] a='with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT {
RAISE_INDENTATION_ERROR("expected an indented block after 'with' statement on line %d", a->lineno) }
invalid_try_stmt:
@@ -1367,11 +1367,11 @@ invalid_while_stmt:
| a='while' named_expression ':' NEWLINE !INDENT {
RAISE_INDENTATION_ERROR("expected an indented block after 'while' statement on line %d", a->lineno) }
invalid_for_stmt:
- | [ASYNC] 'for' star_targets 'in' star_expressions NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
- | [ASYNC] a='for' star_targets 'in' star_expressions ':' NEWLINE !INDENT {
+ | ['async'] 'for' star_targets 'in' star_expressions NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
+ | ['async'] a='for' star_targets 'in' star_expressions ':' NEWLINE !INDENT {
RAISE_INDENTATION_ERROR("expected an indented block after 'for' statement on line %d", a->lineno) }
invalid_def_raw:
- | [ASYNC] a='def' NAME '(' [params] ')' ['->' expression] ':' NEWLINE !INDENT {
+ | ['async'] a='def' NAME '(' [params] ')' ['->' expression] ':' NEWLINE !INDENT {
RAISE_INDENTATION_ERROR("expected an indented block after function definition on line %d", a->lineno) }
invalid_class_def_raw:
| 'class' NAME ['(' [arguments] ')'] NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
diff --git a/Include/internal/pycore_token.h b/Include/internal/pycore_token.h
index c02e637..9c65cd8 100644
--- a/Include/internal/pycore_token.h
+++ b/Include/internal/pycore_token.h
@@ -69,18 +69,16 @@ extern "C" {
#define COLONEQUAL 53
#define EXCLAMATION 54
#define OP 55
-#define AWAIT 56
-#define ASYNC 57
-#define TYPE_IGNORE 58
-#define TYPE_COMMENT 59
-#define SOFT_KEYWORD 60
-#define FSTRING_START 61
-#define FSTRING_MIDDLE 62
-#define FSTRING_END 63
-#define COMMENT 64
-#define NL 65
-#define ERRORTOKEN 66
-#define N_TOKENS 68
+#define TYPE_IGNORE 56
+#define TYPE_COMMENT 57
+#define SOFT_KEYWORD 58
+#define FSTRING_START 59
+#define FSTRING_MIDDLE 60
+#define FSTRING_END 61
+#define COMMENT 62
+#define NL 63
+#define ERRORTOKEN 64
+#define N_TOKENS 66
#define NT_OFFSET 256
/* Special definitions for cooperation with parser */
diff --git a/Lib/test/test_peg_generator/test_c_parser.py b/Lib/test/test_peg_generator/test_c_parser.py
index f9105a9..9e273e9 100644
--- a/Lib/test/test_peg_generator/test_c_parser.py
+++ b/Lib/test/test_peg_generator/test_c_parser.py
@@ -404,7 +404,7 @@ class TestCParser(unittest.TestCase):
a='[' b=NAME c=for_if_clauses d=']' { _PyAST_ListComp(b, c, EXTRA) }
)
for_if_clauses[asdl_comprehension_seq*]: (
- a[asdl_comprehension_seq*]=(y=[ASYNC] 'for' a=NAME 'in' b=NAME c[asdl_expr_seq*]=('if' z=NAME { z })*
+ a[asdl_comprehension_seq*]=(y=['async'] 'for' a=NAME 'in' b=NAME c[asdl_expr_seq*]=('if' z=NAME { z })*
{ _PyAST_comprehension(_PyAST_Name(((expr_ty) a)->v.Name.id, Store, EXTRA), b, c, (y == NULL) ? 0 : 1, p->arena) })+ { a }
)
"""
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index d1552d8..7863e27 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -2521,7 +2521,7 @@ def"', """\
def test_async(self):
self.check_tokenize('async = 1', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
EQUAL '=' (1, 6) (1, 7)
NUMBER '1' (1, 8) (1, 9)
""")
@@ -2530,21 +2530,21 @@ def"', """\
NAME 'a' (1, 0) (1, 1)
EQUAL '=' (1, 2) (1, 3)
LPAR '(' (1, 4) (1, 5)
- ASYNC 'async' (1, 5) (1, 10)
+ NAME 'async' (1, 5) (1, 10)
EQUAL '=' (1, 11) (1, 12)
NUMBER '1' (1, 13) (1, 14)
RPAR ')' (1, 14) (1, 15)
""")
self.check_tokenize('async()', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
LPAR '(' (1, 5) (1, 6)
RPAR ')' (1, 6) (1, 7)
""")
self.check_tokenize('class async(Bar):pass', """\
NAME 'class' (1, 0) (1, 5)
- ASYNC 'async' (1, 6) (1, 11)
+ NAME 'async' (1, 6) (1, 11)
LPAR '(' (1, 11) (1, 12)
NAME 'Bar' (1, 12) (1, 15)
RPAR ')' (1, 15) (1, 16)
@@ -2554,13 +2554,13 @@ def"', """\
self.check_tokenize('class async:pass', """\
NAME 'class' (1, 0) (1, 5)
- ASYNC 'async' (1, 6) (1, 11)
+ NAME 'async' (1, 6) (1, 11)
COLON ':' (1, 11) (1, 12)
NAME 'pass' (1, 12) (1, 16)
""")
self.check_tokenize('await = 1', """\
- AWAIT 'await' (1, 0) (1, 5)
+ NAME 'await' (1, 0) (1, 5)
EQUAL '=' (1, 6) (1, 7)
NUMBER '1' (1, 8) (1, 9)
""")
@@ -2568,11 +2568,11 @@ def"', """\
self.check_tokenize('foo.async', """\
NAME 'foo' (1, 0) (1, 3)
DOT '.' (1, 3) (1, 4)
- ASYNC 'async' (1, 4) (1, 9)
+ NAME 'async' (1, 4) (1, 9)
""")
self.check_tokenize('async for a in b: pass', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
NAME 'for' (1, 6) (1, 9)
NAME 'a' (1, 10) (1, 11)
NAME 'in' (1, 12) (1, 14)
@@ -2582,7 +2582,7 @@ def"', """\
""")
self.check_tokenize('async with a as b: pass', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
NAME 'with' (1, 6) (1, 10)
NAME 'a' (1, 11) (1, 12)
NAME 'as' (1, 13) (1, 15)
@@ -2592,45 +2592,45 @@ def"', """\
""")
self.check_tokenize('async.foo', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
DOT '.' (1, 5) (1, 6)
NAME 'foo' (1, 6) (1, 9)
""")
self.check_tokenize('async', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
""")
self.check_tokenize('async\n#comment\nawait', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
NEWLINE '' (1, 5) (1, 5)
- AWAIT 'await' (3, 0) (3, 5)
+ NAME 'await' (3, 0) (3, 5)
""")
self.check_tokenize('async\n...\nawait', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
NEWLINE '' (1, 5) (1, 5)
ELLIPSIS '...' (2, 0) (2, 3)
NEWLINE '' (2, 3) (2, 3)
- AWAIT 'await' (3, 0) (3, 5)
+ NAME 'await' (3, 0) (3, 5)
""")
self.check_tokenize('async\nawait', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
NEWLINE '' (1, 5) (1, 5)
- AWAIT 'await' (2, 0) (2, 5)
+ NAME 'await' (2, 0) (2, 5)
""")
self.check_tokenize('foo.async + 1', """\
NAME 'foo' (1, 0) (1, 3)
DOT '.' (1, 3) (1, 4)
- ASYNC 'async' (1, 4) (1, 9)
+ NAME 'async' (1, 4) (1, 9)
PLUS '+' (1, 10) (1, 11)
NUMBER '1' (1, 12) (1, 13)
""")
self.check_tokenize('async def foo(): pass', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
NAME 'def' (1, 6) (1, 9)
NAME 'foo' (1, 10) (1, 13)
LPAR '(' (1, 13) (1, 14)
@@ -2647,7 +2647,7 @@ async def foo():
await
async += 1
''', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
NAME 'def' (1, 6) (1, 9)
NAME 'foo' (1, 10) (1, 13)
LPAR '(' (1, 13) (1, 14)
@@ -2658,12 +2658,12 @@ async += 1
NAME 'def' (2, 2) (2, 5)
NAME 'foo' (2, 6) (2, 9)
LPAR '(' (2, 9) (2, 10)
- AWAIT 'await' (2, 10) (2, 15)
+ NAME 'await' (2, 10) (2, 15)
RPAR ')' (2, 15) (2, 16)
COLON ':' (2, 16) (2, 17)
NEWLINE '' (2, 17) (2, 17)
INDENT '' (3, -1) (3, -1)
- AWAIT 'await' (3, 4) (3, 9)
+ NAME 'await' (3, 4) (3, 9)
EQUAL '=' (3, 10) (3, 11)
NUMBER '1' (3, 12) (3, 13)
NEWLINE '' (3, 13) (3, 13)
@@ -2673,18 +2673,18 @@ async += 1
COLON ':' (4, 6) (4, 7)
NEWLINE '' (4, 7) (4, 7)
INDENT '' (5, -1) (5, -1)
- AWAIT 'await' (5, 4) (5, 9)
+ NAME 'await' (5, 4) (5, 9)
NEWLINE '' (5, 9) (5, 9)
DEDENT '' (6, -1) (6, -1)
DEDENT '' (6, -1) (6, -1)
- ASYNC 'async' (6, 0) (6, 5)
+ NAME 'async' (6, 0) (6, 5)
PLUSEQUAL '+=' (6, 6) (6, 8)
NUMBER '1' (6, 9) (6, 10)
NEWLINE '' (6, 10) (6, 10)
""")
self.check_tokenize('async def foo():\n async for i in 1: pass', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
NAME 'def' (1, 6) (1, 9)
NAME 'foo' (1, 10) (1, 13)
LPAR '(' (1, 13) (1, 14)
@@ -2692,7 +2692,7 @@ async += 1
COLON ':' (1, 15) (1, 16)
NEWLINE '' (1, 16) (1, 16)
INDENT '' (2, -1) (2, -1)
- ASYNC 'async' (2, 2) (2, 7)
+ NAME 'async' (2, 2) (2, 7)
NAME 'for' (2, 8) (2, 11)
NAME 'i' (2, 12) (2, 13)
NAME 'in' (2, 14) (2, 16)
@@ -2703,14 +2703,14 @@ async += 1
""")
self.check_tokenize('async def foo(async): await', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
NAME 'def' (1, 6) (1, 9)
NAME 'foo' (1, 10) (1, 13)
LPAR '(' (1, 13) (1, 14)
- ASYNC 'async' (1, 14) (1, 19)
+ NAME 'async' (1, 14) (1, 19)
RPAR ')' (1, 19) (1, 20)
COLON ':' (1, 20) (1, 21)
- AWAIT 'await' (1, 22) (1, 27)
+ NAME 'await' (1, 22) (1, 27)
""")
self.check_tokenize('''\
@@ -2734,7 +2734,7 @@ def f():
COLON ':' (3, 11) (3, 12)
NAME 'pass' (3, 13) (3, 17)
NEWLINE '' (3, 17) (3, 17)
- ASYNC 'async' (4, 2) (4, 7)
+ NAME 'async' (4, 2) (4, 7)
NAME 'def' (4, 8) (4, 11)
NAME 'bar' (4, 12) (4, 15)
LPAR '(' (4, 15) (4, 16)
@@ -2742,7 +2742,7 @@ def f():
COLON ':' (4, 17) (4, 18)
NAME 'pass' (4, 19) (4, 23)
NEWLINE '' (4, 23) (4, 23)
- AWAIT 'await' (6, 2) (6, 7)
+ NAME 'await' (6, 2) (6, 7)
EQUAL '=' (6, 8) (6, 9)
NUMBER '2' (6, 10) (6, 11)
DEDENT '' (6, -1) (6, -1)
@@ -2755,7 +2755,7 @@ async def f():
async def bar(): pass
await = 2''', """\
- ASYNC 'async' (1, 0) (1, 5)
+ NAME 'async' (1, 0) (1, 5)
NAME 'def' (1, 6) (1, 9)
NAME 'f' (1, 10) (1, 11)
LPAR '(' (1, 11) (1, 12)
@@ -2770,7 +2770,7 @@ async def f():
COLON ':' (3, 11) (3, 12)
NAME 'pass' (3, 13) (3, 17)
NEWLINE '' (3, 17) (3, 17)
- ASYNC 'async' (4, 2) (4, 7)
+ NAME 'async' (4, 2) (4, 7)
NAME 'def' (4, 8) (4, 11)
NAME 'bar' (4, 12) (4, 15)
LPAR '(' (4, 15) (4, 16)
@@ -2778,7 +2778,7 @@ async def f():
COLON ':' (4, 17) (4, 18)
NAME 'pass' (4, 19) (4, 23)
NEWLINE '' (4, 23) (4, 23)
- AWAIT 'await' (6, 2) (6, 7)
+ NAME 'await' (6, 2) (6, 7)
EQUAL '=' (6, 8) (6, 9)
NUMBER '2' (6, 10) (6, 11)
DEDENT '' (6, -1) (6, -1)
diff --git a/Lib/test/test_type_comments.py b/Lib/test/test_type_comments.py
index aba4a44..9a11fab 100644
--- a/Lib/test/test_type_comments.py
+++ b/Lib/test/test_type_comments.py
@@ -260,8 +260,8 @@ class TypeCommentTests(unittest.TestCase):
self.assertEqual(tree.body[1].type_comment, None)
def test_asyncvar(self):
- for tree in self.parse_all(asyncvar, maxver=6):
- pass
+ with self.assertRaises(SyntaxError):
+ self.classic_parse(asyncvar)
def test_asynccomp(self):
for tree in self.parse_all(asynccomp, minver=6):
diff --git a/Lib/token.py b/Lib/token.py
index 487f6ed..b620317 100644
--- a/Lib/token.py
+++ b/Lib/token.py
@@ -59,20 +59,18 @@ ELLIPSIS = 52
COLONEQUAL = 53
EXCLAMATION = 54
OP = 55
-AWAIT = 56
-ASYNC = 57
-TYPE_IGNORE = 58
-TYPE_COMMENT = 59
-SOFT_KEYWORD = 60
-FSTRING_START = 61
-FSTRING_MIDDLE = 62
-FSTRING_END = 63
-COMMENT = 64
-NL = 65
+TYPE_IGNORE = 56
+TYPE_COMMENT = 57
+SOFT_KEYWORD = 58
+FSTRING_START = 59
+FSTRING_MIDDLE = 60
+FSTRING_END = 61
+COMMENT = 62
+NL = 63
# These aren't used by the C tokenizer but are needed for tokenize.py
-ERRORTOKEN = 66
-ENCODING = 67
-N_TOKENS = 68
+ERRORTOKEN = 64
+ENCODING = 65
+N_TOKENS = 66
# Special definitions for cooperation with parser
NT_OFFSET = 256
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-22-14-35-38.gh-issue-107015.Ghp58t.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-22-14-35-38.gh-issue-107015.Ghp58t.rst
new file mode 100644
index 0000000..77618a5
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-22-14-35-38.gh-issue-107015.Ghp58t.rst
@@ -0,0 +1,3 @@
+The ASYNC and AWAIT tokens are removed from the Grammar, which removes the
+posibility of making ``async`` and ``await`` soft keywords when using
+``feature_version<7`` in :func:`ast.parse`.
diff --git a/Parser/parser.c b/Parser/parser.c
index f2ea8f5..44312cf 100644
--- a/Parser/parser.c
+++ b/Parser/parser.c
@@ -17,57 +17,59 @@ static KeywordToken *reserved_keywords[] = {
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {
- {"if", 642},
- {"as", 640},
- {"in", 651},
- {"or", 574},
- {"is", 582},
+ {"if", 656},
+ {"as", 654},
+ {"in", 667},
+ {"or", 581},
+ {"is", 589},
{NULL, -1},
},
(KeywordToken[]) {
- {"del", 604},
- {"def", 652},
- {"for", 650},
- {"try", 624},
- {"and", 575},
- {"not", 581},
+ {"del", 613},
+ {"def", 669},
+ {"for", 666},
+ {"try", 638},
+ {"and", 582},
+ {"not", 588},
{NULL, -1},
},
(KeywordToken[]) {
- {"from", 608},
+ {"from", 618},
{"pass", 504},
- {"with", 615},
- {"elif", 644},
- {"else", 645},
- {"None", 602},
- {"True", 601},
+ {"with", 629},
+ {"elif", 658},
+ {"else", 659},
+ {"None", 611},
+ {"True", 610},
{NULL, -1},
},
(KeywordToken[]) {
- {"raise", 522},
- {"yield", 573},
+ {"raise", 525},
+ {"yield", 580},
{"break", 508},
- {"class", 654},
- {"while", 647},
- {"False", 603},
+ {"async", 668},
+ {"class", 671},
+ {"while", 661},
+ {"False", 612},
+ {"await", 590},
{NULL, -1},
},
(KeywordToken[]) {
- {"return", 519},
- {"import", 607},
- {"assert", 526},
- {"global", 523},
- {"except", 637},
- {"lambda", 600},
+ {"return", 522},
+ {"import", 617},
+ {"assert", 529},
+ {"global", 526},
+ {"except", 651},
+ {"lambda", 609},
{NULL, -1},
},
(KeywordToken[]) {
- {"finally", 633},
+ {"finally", 647},
{NULL, -1},
},
(KeywordToken[]) {
{"continue", 509},
- {"nonlocal", 524},
+ {"nonlocal", 527},
{NULL, -1},
},
};
@@ -1820,7 +1822,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'return' return_stmt"));
stmt_ty return_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 519) // token='return'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 522) // token='return'
&&
(return_stmt_var = return_stmt_rule(p)) // return_stmt
)
@@ -1862,7 +1864,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt"));
stmt_ty raise_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 522) // token='raise'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 525) // token='raise'
&&
(raise_stmt_var = raise_stmt_rule(p)) // raise_stmt
)
@@ -1916,7 +1918,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt"));
stmt_ty del_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 604) // token='del'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 613) // token='del'
&&
(del_stmt_var = del_stmt_rule(p)) // del_stmt
)
@@ -1937,7 +1939,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt"));
stmt_ty yield_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 573) // token='yield'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 580) // token='yield'
&&
(yield_stmt_var = yield_stmt_rule(p)) // yield_stmt
)
@@ -1958,7 +1960,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt"));
stmt_ty assert_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 526) // token='assert'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 529) // token='assert'
&&
(assert_stmt_var = assert_stmt_rule(p)) // assert_stmt
)
@@ -2045,7 +2047,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'global' global_stmt"));
stmt_ty global_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 523) // token='global'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 526) // token='global'
&&
(global_stmt_var = global_stmt_rule(p)) // global_stmt
)
@@ -2066,7 +2068,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'nonlocal' nonlocal_stmt"));
stmt_ty nonlocal_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 524) // token='nonlocal'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 527) // token='nonlocal'
&&
(nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt
)
@@ -2087,11 +2089,11 @@ simple_stmt_rule(Parser *p)
}
// compound_stmt:
-// | &('def' | '@' | ASYNC) function_def
+// | &('def' | '@' | 'async') function_def
// | &'if' if_stmt
// | &('class' | '@') class_def
-// | &('with' | ASYNC) with_stmt
-// | &('for' | ASYNC) for_stmt
+// | &('with' | 'async') with_stmt
+// | &('for' | 'async') for_stmt
// | &'try' try_stmt
// | &'while' while_stmt
// | match_stmt
@@ -2108,12 +2110,12 @@ compound_stmt_rule(Parser *p)
}
stmt_ty _res = NULL;
int _mark = p->mark;
- { // &('def' | '@' | ASYNC) function_def
+ { // &('def' | '@' | 'async') function_def
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def"));
+ D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | 'async') function_def"));
stmt_ty function_def_var;
if (
_PyPegen_lookahead(1, _tmp_8_rule, p)
@@ -2121,13 +2123,13 @@ compound_stmt_rule(Parser *p)
(function_def_var = function_def_rule(p)) // function_def
)
{
- D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def"));
+ D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | 'async') function_def"));
_res = function_def_var;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('def' | '@' | ASYNC) function_def"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('def' | '@' | 'async') function_def"));
}
{ // &'if' if_stmt
if (p->error_indicator) {
@@ -2137,7 +2139,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt"));
stmt_ty if_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 642) // token='if'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 656) // token='if'
&&
(if_stmt_var = if_stmt_rule(p)) // if_stmt
)
@@ -2171,12 +2173,12 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('class' | '@') class_def"));
}
- { // &('with' | ASYNC) with_stmt
+ { // &('with' | 'async') with_stmt
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt"));
+ D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('with' | 'async') with_stmt"));
stmt_ty with_stmt_var;
if (
_PyPegen_lookahead(1, _tmp_10_rule, p)
@@ -2184,20 +2186,20 @@ compound_stmt_rule(Parser *p)
(with_stmt_var = with_stmt_rule(p)) // with_stmt
)
{
- D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt"));
+ D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('with' | 'async') with_stmt"));
_res = with_stmt_var;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('with' | ASYNC) with_stmt"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('with' | 'async') with_stmt"));
}
- { // &('for' | ASYNC) for_stmt
+ { // &('for' | 'async') for_stmt
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt"));
+ D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('for' | 'async') for_stmt"));
stmt_ty for_stmt_var;
if (
_PyPegen_lookahead(1, _tmp_11_rule, p)
@@ -2205,13 +2207,13 @@ compound_stmt_rule(Parser *p)
(for_stmt_var = for_stmt_rule(p)) // for_stmt
)
{
- D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt"));
+ D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('for' | 'async') for_stmt"));
_res = for_stmt_var;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('for' | ASYNC) for_stmt"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('for' | 'async') for_stmt"));
}
{ // &'try' try_stmt
if (p->error_indicator) {
@@ -2221,7 +2223,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt"));
stmt_ty try_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 624) // token='try'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 638) // token='try'
&&
(try_stmt_var = try_stmt_rule(p)) // try_stmt
)
@@ -2242,7 +2244,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt"));
stmt_ty while_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 647) // token='while'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 661) // token='while'
&&
(while_stmt_var = while_stmt_rule(p)) // while_stmt
)
@@ -2939,7 +2941,7 @@ return_stmt_rule(Parser *p)
Token * _keyword;
void *a;
if (
- (_keyword = _PyPegen_expect_token(p, 519)) // token='return'
+ (_keyword = _PyPegen_expect_token(p, 522)) // token='return'
&&
(a = star_expressions_rule(p), !p->error_indicator) // star_expressions?
)
@@ -3005,7 +3007,7 @@ raise_stmt_rule(Parser *p)
expr_ty a;
void *b;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='raise'
+ (_keyword = _PyPegen_expect_token(p, 525)) // token='raise'
&&
(a = expression_rule(p)) // expression
&&
@@ -3042,7 +3044,7 @@ raise_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 522)) // token='raise'
+ (_keyword = _PyPegen_expect_token(p, 525)) // token='raise'
)
{
D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise'"));
@@ -3105,7 +3107,7 @@ global_stmt_rule(Parser *p)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 523)) // token='global'
+ (_keyword = _PyPegen_expect_token(p, 526)) // token='global'
&&
(a = (asdl_expr_seq*)_gather_19_rule(p)) // ','.NAME+
)
@@ -3170,7 +3172,7 @@ nonlocal_stmt_rule(Parser *p)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 524)) // token='nonlocal'
+ (_keyword = _PyPegen_expect_token(p, 527)) // token='nonlocal'
&&
(a = (asdl_expr_seq*)_gather_21_rule(p)) // ','.NAME+
)
@@ -3235,7 +3237,7 @@ del_stmt_rule(Parser *p)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 604)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 613)) // token='del'
&&
(a = del_targets_rule(p)) // del_targets
&&
@@ -3384,7 +3386,7 @@ assert_stmt_rule(Parser *p)
expr_ty a;
void *b;
if (
- (_keyword = _PyPegen_expect_token(p, 526)) // token='assert'
+ (_keyword = _PyPegen_expect_token(p, 529)) // token='assert'
&&
(a = expression_rule(p)) // expression
&&
@@ -3528,7 +3530,7 @@ import_name_rule(Parser *p)
Token * _keyword;
asdl_alias_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 607)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 617)) // token='import'
&&
(a = dotted_as_names_rule(p)) // dotted_as_names
)
@@ -3598,13 +3600,13 @@ import_from_rule(Parser *p)
expr_ty b;
asdl_alias_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 618)) // token='from'
&&
(a = _loop0_25_rule(p)) // (('.' | '...'))*
&&
(b = dotted_name_rule(p)) // dotted_name
&&
- (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 617)) // token='import'
&&
(c = import_from_targets_rule(p)) // import_from_targets
)
@@ -3642,11 +3644,11 @@ import_from_rule(Parser *p)
asdl_seq * a;
asdl_alias_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 618)) // token='from'
&&
(a = _loop1_26_rule(p)) // (('.' | '...'))+
&&
- (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 617)) // token='import'
&&
(b = import_from_targets_rule(p)) // import_from_targets
)
@@ -4406,7 +4408,7 @@ class_def_raw_rule(Parser *p)
asdl_stmt_seq* c;
void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 654)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 671)) // token='class'
&&
(a = _PyPegen_name_token(p)) // NAME
&&
@@ -4516,7 +4518,7 @@ function_def_rule(Parser *p)
// function_def_raw:
// | invalid_def_raw
// | 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block
-// | ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block
+// | 'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block
static stmt_ty
function_def_raw_rule(Parser *p)
{
@@ -4575,7 +4577,7 @@ function_def_raw_rule(Parser *p)
void *t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 652)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 669)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
@@ -4618,27 +4620,27 @@ function_def_raw_rule(Parser *p)
D(fprintf(stderr, "%*c%s function_def_raw[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block"));
}
- { // ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block
+ { // 'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> function_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block"));
+ D(fprintf(stderr, "%*c> function_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block"));
Token * _keyword;
+ Token * _keyword_1;
Token * _literal;
Token * _literal_1;
Token * _literal_2;
void *a;
- Token * async_var;
asdl_stmt_seq* b;
expr_ty n;
void *params;
void *t;
void *tc;
if (
- (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
+ (_keyword = _PyPegen_expect_token(p, 668)) // token='async'
&&
- (_keyword = _PyPegen_expect_token(p, 652)) // token='def'
+ (_keyword_1 = _PyPegen_expect_token(p, 669)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
@@ -4659,7 +4661,7 @@ function_def_raw_rule(Parser *p)
(b = block_rule(p)) // block
)
{
- D(fprintf(stderr, "%*c+ function_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block"));
+ D(fprintf(stderr, "%*c+ function_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block"));
Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
if (_token == NULL) {
p->level--;
@@ -4679,7 +4681,7 @@ function_def_raw_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s function_def_raw[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block"));
}
_res = NULL;
done:
@@ -5992,7 +5994,7 @@ if_stmt_rule(Parser *p)
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 656)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6037,7 +6039,7 @@ if_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 656)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6133,7 +6135,7 @@ elif_stmt_rule(Parser *p)
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 644)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 658)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6178,7 +6180,7 @@ elif_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 644)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 658)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6260,7 +6262,7 @@ else_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 645)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 659)) // token='else'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -6340,7 +6342,7 @@ while_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 647)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 661)) // token='while'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6382,7 +6384,7 @@ while_stmt_rule(Parser *p)
// for_stmt:
// | invalid_for_stmt
// | 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?
-// | ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?
+// | 'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?
// | invalid_for_target
static stmt_ty
for_stmt_rule(Parser *p)
@@ -6441,11 +6443,11 @@ for_stmt_rule(Parser *p)
expr_ty t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 666)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 667)) // token='in'
&&
(_cut_var = 1)
&&
@@ -6486,30 +6488,30 @@ for_stmt_rule(Parser *p)
return NULL;
}
}
- { // ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?
+ { // 'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?"));
+ D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?"));
int _cut_var = 0;
Token * _keyword;
Token * _keyword_1;
+ Token * _keyword_2;
Token * _literal;
- Token * async_var;
asdl_stmt_seq* b;
void *el;
expr_ty ex;
expr_ty t;
void *tc;
if (
- (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
+ (_keyword = _PyPegen_expect_token(p, 668)) // token='async'
&&
- (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
+ (_keyword_1 = _PyPegen_expect_token(p, 666)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
+ (_keyword_2 = _PyPegen_expect_token(p, 667)) // token='in'
&&
(_cut_var = 1)
&&
@@ -6524,7 +6526,7 @@ for_stmt_rule(Parser *p)
(el = else_block_rule(p), !p->error_indicator) // else_block?
)
{
- D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?"));
+ D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?"));
Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
if (_token == NULL) {
p->level--;
@@ -6544,7 +6546,7 @@ for_stmt_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?"));
if (_cut_var) {
p->level--;
return NULL;
@@ -6579,8 +6581,8 @@ for_stmt_rule(Parser *p)
// | invalid_with_stmt_indent
// | 'with' '(' ','.with_item+ ','? ')' ':' block
// | 'with' ','.with_item+ ':' TYPE_COMMENT? block
-// | ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block
-// | ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block
+// | 'async' 'with' '(' ','.with_item+ ','? ')' ':' block
+// | 'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block
// | invalid_with_stmt
static stmt_ty
with_stmt_rule(Parser *p)
@@ -6638,7 +6640,7 @@ with_stmt_rule(Parser *p)
asdl_withitem_seq* a;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 629)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
@@ -6687,7 +6689,7 @@ with_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 629)) // token='with'
&&
(a = (asdl_withitem_seq*)_gather_54_rule(p)) // ','.with_item+
&&
@@ -6720,25 +6722,25 @@ with_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block"));
}
- { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block
+ { // 'async' 'with' '(' ','.with_item+ ','? ')' ':' block
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block"));
+ D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async' 'with' '(' ','.with_item+ ','? ')' ':' block"));
Token * _keyword;
+ Token * _keyword_1;
Token * _literal;
Token * _literal_1;
Token * _literal_2;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
asdl_withitem_seq* a;
- Token * async_var;
asdl_stmt_seq* b;
if (
- (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
+ (_keyword = _PyPegen_expect_token(p, 668)) // token='async'
&&
- (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
+ (_keyword_1 = _PyPegen_expect_token(p, 629)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
@@ -6753,7 +6755,7 @@ with_stmt_rule(Parser *p)
(b = block_rule(p)) // block
)
{
- D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block"));
+ D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'with' '(' ','.with_item+ ','? ')' ':' block"));
Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
if (_token == NULL) {
p->level--;
@@ -6773,24 +6775,24 @@ with_stmt_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async' 'with' '(' ','.with_item+ ','? ')' ':' block"));
}
- { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block
+ { // 'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block"));
+ D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block"));
Token * _keyword;
+ Token * _keyword_1;
Token * _literal;
asdl_withitem_seq* a;
- Token * async_var;
asdl_stmt_seq* b;
void *tc;
if (
- (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
+ (_keyword = _PyPegen_expect_token(p, 668)) // token='async'
&&
- (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
+ (_keyword_1 = _PyPegen_expect_token(p, 629)) // token='with'
&&
(a = (asdl_withitem_seq*)_gather_58_rule(p)) // ','.with_item+
&&
@@ -6801,7 +6803,7 @@ with_stmt_rule(Parser *p)
(b = block_rule(p)) // block
)
{
- D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block"));
+ D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block"));
Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
if (_token == NULL) {
p->level--;
@@ -6821,7 +6823,7 @@ with_stmt_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block"));
}
if (p->call_invalid_rules) { // invalid_with_stmt
if (p->error_indicator) {
@@ -6877,7 +6879,7 @@ with_item_rule(Parser *p)
if (
(e = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(t = star_target_rule(p)) // star_target
&&
@@ -7003,7 +7005,7 @@ try_stmt_rule(Parser *p)
asdl_stmt_seq* b;
asdl_stmt_seq* f;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 638)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -7047,7 +7049,7 @@ try_stmt_rule(Parser *p)
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 638)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -7095,7 +7097,7 @@ try_stmt_rule(Parser *p)
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 638)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -7194,7 +7196,7 @@ except_block_rule(Parser *p)
expr_ty e;
void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='except'
&&
(e = expression_rule(p)) // expression
&&
@@ -7237,7 +7239,7 @@ except_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -7349,7 +7351,7 @@ except_star_block_rule(Parser *p)
expr_ty e;
void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -7452,7 +7454,7 @@ finally_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 633)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 647)) // token='finally'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -7764,7 +7766,7 @@ guard_rule(Parser *p)
Token * _keyword;
expr_ty guard;
if (
- (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 656)) // token='if'
&&
(guard = named_expression_rule(p)) // named_expression
)
@@ -7962,7 +7964,7 @@ as_pattern_rule(Parser *p)
if (
(pattern = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(target = pattern_capture_target_rule(p)) // pattern_capture_target
)
@@ -8399,7 +8401,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 611)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -8432,7 +8434,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 610)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -8465,7 +8467,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 612)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -8592,7 +8594,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 611)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -8625,7 +8627,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 610)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -8658,7 +8660,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 612)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -11222,11 +11224,11 @@ expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 656)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 645)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 659)) // token='else'
&&
(c = expression_rule(p)) // expression
)
@@ -11331,9 +11333,9 @@ yield_expr_rule(Parser *p)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 573)) // token='yield'
+ (_keyword = _PyPegen_expect_token(p, 580)) // token='yield'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 608)) // token='from'
+ (_keyword_1 = _PyPegen_expect_token(p, 618)) // token='from'
&&
(a = expression_rule(p)) // expression
)
@@ -11369,7 +11371,7 @@ yield_expr_rule(Parser *p)
Token * _keyword;
void *a;
if (
- (_keyword = _PyPegen_expect_token(p, 573)) // token='yield'
+ (_keyword = _PyPegen_expect_token(p, 580)) // token='yield'
&&
(a = star_expressions_rule(p), !p->error_indicator) // star_expressions?
)
@@ -12118,7 +12120,7 @@ inversion_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 581)) // token='not'
+ (_keyword = _PyPegen_expect_token(p, 588)) // token='not'
&&
(a = inversion_rule(p)) // inversion
)
@@ -12781,9 +12783,9 @@ notin_bitwise_or_rule(Parser *p)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 581)) // token='not'
+ (_keyword = _PyPegen_expect_token(p, 588)) // token='not'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 667)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12830,7 +12832,7 @@ in_bitwise_or_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 651)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 667)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12878,9 +12880,9 @@ isnot_bitwise_or_rule(Parser *p)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 582)) // token='is'
+ (_keyword = _PyPegen_expect_token(p, 589)) // token='is'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 581)) // token='not'
+ (_keyword_1 = _PyPegen_expect_token(p, 588)) // token='not'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12927,7 +12929,7 @@ is_bitwise_or_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 582)) // token='is'
+ (_keyword = _PyPegen_expect_token(p, 589)) // token='is'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -14183,7 +14185,7 @@ power_rule(Parser *p)
return _res;
}
-// await_primary: AWAIT primary | primary
+// await_primary: 'await' primary | primary
static expr_ty
await_primary_rule(Parser *p)
{
@@ -14210,21 +14212,21 @@ await_primary_rule(Parser *p)
UNUSED(_start_lineno); // Only used by EXTRA macro
int _start_col_offset = p->tokens[_mark]->col_offset;
UNUSED(_start_col_offset); // Only used by EXTRA macro
- { // AWAIT primary
+ { // 'await' primary
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> await_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "AWAIT primary"));
+ D(fprintf(stderr, "%*c> await_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'await' primary"));
+ Token * _keyword;
expr_ty a;
- Token * await_var;
if (
- (await_var = _PyPegen_expect_token(p, AWAIT)) // token='AWAIT'
+ (_keyword = _PyPegen_expect_token(p, 590)) // token='await'
&&
(a = primary_rule(p)) // primary
)
{
- D(fprintf(stderr, "%*c+ await_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "AWAIT primary"));
+ D(fprintf(stderr, "%*c+ await_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'await' primary"));
Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
if (_token == NULL) {
p->level--;
@@ -14244,7 +14246,7 @@ await_primary_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s await_primary[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "AWAIT primary"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'await' primary"));
}
{ // primary
if (p->error_indicator) {
@@ -14768,7 +14770,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 610)) // token='True'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -14801,7 +14803,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 612)) // token='False'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -14834,7 +14836,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 611)) // token='None'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -15104,7 +15106,7 @@ lambdef_rule(Parser *p)
void *a;
expr_ty b;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='lambda'
+ (_keyword = _PyPegen_expect_token(p, 609)) // token='lambda'
&&
(a = lambda_params_rule(p), !p->error_indicator) // lambda_params?
&&
@@ -16981,7 +16983,7 @@ for_if_clauses_rule(Parser *p)
}
// for_if_clause:
-// | ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*
+// | 'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))*
// | 'for' star_targets 'in' ~ disjunction (('if' disjunction))*
// | invalid_for_target
static comprehension_ty
@@ -16997,27 +16999,27 @@ for_if_clause_rule(Parser *p)
}
comprehension_ty _res = NULL;
int _mark = p->mark;
- { // ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*
+ { // 'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))*
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
+ D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
int _cut_var = 0;
Token * _keyword;
Token * _keyword_1;
+ Token * _keyword_2;
expr_ty a;
- Token * async_var;
expr_ty b;
asdl_expr_seq* c;
if (
- (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
+ (_keyword = _PyPegen_expect_token(p, 668)) // token='async'
&&
- (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
+ (_keyword_1 = _PyPegen_expect_token(p, 666)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
+ (_keyword_2 = _PyPegen_expect_token(p, 667)) // token='in'
&&
(_cut_var = 1)
&&
@@ -17026,7 +17028,7 @@ for_if_clause_rule(Parser *p)
(c = (asdl_expr_seq*)_loop0_120_rule(p)) // (('if' disjunction))*
)
{
- D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
+ D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
_res = CHECK_VERSION ( comprehension_ty , 6 , "Async comprehensions are" , _PyAST_comprehension ( a , b , c , 1 , p -> arena ) );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -17037,7 +17039,7 @@ for_if_clause_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
if (_cut_var) {
p->level--;
return NULL;
@@ -17056,11 +17058,11 @@ for_if_clause_rule(Parser *p)
expr_ty b;
asdl_expr_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 666)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 667)) // token='in'
&&
(_cut_var = 1)
&&
@@ -20349,11 +20351,11 @@ expression_without_invalid_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 656)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 645)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 659)) // token='else'
&&
(c = expression_rule(p)) // expression
)
@@ -20536,7 +20538,7 @@ invalid_expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 656)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
@@ -20567,7 +20569,7 @@ invalid_expression_rule(Parser *p)
Token * a;
Token * b;
if (
- (a = _PyPegen_expect_token(p, 600)) // token='lambda'
+ (a = _PyPegen_expect_token(p, 609)) // token='lambda'
&&
(_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params?
&&
@@ -21042,7 +21044,7 @@ invalid_del_stmt_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 604)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 613)) // token='del'
&&
(a = star_expressions_rule(p)) // star_expressions
)
@@ -22492,7 +22494,7 @@ invalid_with_item_rule(Parser *p)
if (
(expression_var = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(a = expression_rule(p)) // expression
&&
@@ -22518,7 +22520,7 @@ invalid_with_item_rule(Parser *p)
return _res;
}
-// invalid_for_target: ASYNC? 'for' star_expressions
+// invalid_for_target: 'async'? 'for' star_expressions
static void *
invalid_for_target_rule(Parser *p)
{
@@ -22532,25 +22534,25 @@ invalid_for_target_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // ASYNC? 'for' star_expressions
+ { // 'async'? 'for' star_expressions
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_for_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_expressions"));
+ D(fprintf(stderr, "%*c> invalid_for_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_expressions"));
Token * _keyword;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty a;
if (
- (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
+ (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 666)) // token='for'
&&
(a = star_expressions_rule(p)) // star_expressions
)
{
- D(fprintf(stderr, "%*c+ invalid_for_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_expressions"));
+ D(fprintf(stderr, "%*c+ invalid_for_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_expressions"));
_res = RAISE_SYNTAX_ERROR_INVALID_TARGET ( FOR_TARGETS , a );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -22561,7 +22563,7 @@ invalid_for_target_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_for_target[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'for' star_expressions"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'for' star_expressions"));
}
_res = NULL;
done:
@@ -22677,11 +22679,11 @@ invalid_import_rule(Parser *p)
Token * a;
expr_ty dotted_name_var;
if (
- (a = _PyPegen_expect_token(p, 607)) // token='import'
+ (a = _PyPegen_expect_token(p, 617)) // token='import'
&&
(_gather_203_var = _gather_203_rule(p)) // ','.dotted_name+
&&
- (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 618)) // token='from'
&&
(dotted_name_var = dotted_name_rule(p)) // dotted_name
)
@@ -22756,8 +22758,8 @@ invalid_import_from_targets_rule(Parser *p)
}
// invalid_with_stmt:
-// | ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE
-// | ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE
+// | 'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE
+// | 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE
static void *
invalid_with_stmt_rule(Parser *p)
{
@@ -22771,28 +22773,28 @@ invalid_with_stmt_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE
+ { // 'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE"));
+ D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE"));
asdl_seq * _gather_205_var;
Token * _keyword;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
+ (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 629)) // token='with'
&&
(_gather_205_var = _gather_205_rule(p)) // ','.(expression ['as' star_target])+
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ invalid_with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE"));
+ D(fprintf(stderr, "%*c+ invalid_with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE"));
_res = RAISE_SYNTAX_ERROR ( "expected ':'" );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -22803,14 +22805,14 @@ invalid_with_stmt_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_with_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE"));
}
- { // ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE
+ { // 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE"));
+ D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE"));
asdl_seq * _gather_207_var;
Token * _keyword;
Token * _literal;
@@ -22821,9 +22823,9 @@ invalid_with_stmt_rule(Parser *p)
UNUSED(_opt_var_1); // Silence compiler warnings
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
+ (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 629)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
@@ -22836,7 +22838,7 @@ invalid_with_stmt_rule(Parser *p)
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ invalid_with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE"));
+ D(fprintf(stderr, "%*c+ invalid_with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE"));
_res = RAISE_SYNTAX_ERROR ( "expected ':'" );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -22847,7 +22849,7 @@ invalid_with_stmt_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_with_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE"));
}
_res = NULL;
done:
@@ -22856,8 +22858,8 @@ invalid_with_stmt_rule(Parser *p)
}
// invalid_with_stmt_indent:
-// | ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT
-// | ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT
+// | 'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT
+// | 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT
static void *
invalid_with_stmt_indent_rule(Parser *p)
{
@@ -22871,12 +22873,12 @@ invalid_with_stmt_indent_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT
+ { // 'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT"));
+ D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT"));
asdl_seq * _gather_209_var;
Token * _literal;
void *_opt_var;
@@ -22884,9 +22886,9 @@ invalid_with_stmt_indent_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
+ (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 615)) // token='with'
+ (a = _PyPegen_expect_token(p, 629)) // token='with'
&&
(_gather_209_var = _gather_209_rule(p)) // ','.(expression ['as' star_target])+
&&
@@ -22897,7 +22899,7 @@ invalid_with_stmt_indent_rule(Parser *p)
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT
)
{
- D(fprintf(stderr, "%*c+ invalid_with_stmt_indent[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT"));
+ D(fprintf(stderr, "%*c+ invalid_with_stmt_indent[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT"));
_res = RAISE_INDENTATION_ERROR ( "expected an indented block after 'with' statement on line %d" , a -> lineno );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -22908,14 +22910,14 @@ invalid_with_stmt_indent_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_with_stmt_indent[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT"));
}
- { // ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT
+ { // 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT"));
+ D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT"));
asdl_seq * _gather_211_var;
Token * _literal;
Token * _literal_1;
@@ -22927,9 +22929,9 @@ invalid_with_stmt_indent_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
+ (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 615)) // token='with'
+ (a = _PyPegen_expect_token(p, 629)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
@@ -22946,7 +22948,7 @@ invalid_with_stmt_indent_rule(Parser *p)
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT
)
{
- D(fprintf(stderr, "%*c+ invalid_with_stmt_indent[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT"));
+ D(fprintf(stderr, "%*c+ invalid_with_stmt_indent[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT"));
_res = RAISE_INDENTATION_ERROR ( "expected an indented block after 'with' statement on line %d" , a -> lineno );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -22957,7 +22959,7 @@ invalid_with_stmt_indent_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_with_stmt_indent[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT"));
}
_res = NULL;
done:
@@ -22993,7 +22995,7 @@ invalid_try_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 624)) // token='try'
+ (a = _PyPegen_expect_token(p, 638)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23025,7 +23027,7 @@ invalid_try_stmt_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* block_var;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 638)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23064,7 +23066,7 @@ invalid_try_stmt_rule(Parser *p)
Token * b;
expr_ty expression_var;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 638)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23072,7 +23074,7 @@ invalid_try_stmt_rule(Parser *p)
&&
(_loop1_215_var = _loop1_215_rule(p)) // except_block+
&&
- (a = _PyPegen_expect_token(p, 637)) // token='except'
+ (a = _PyPegen_expect_token(p, 651)) // token='except'
&&
(b = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -23111,7 +23113,7 @@ invalid_try_stmt_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 638)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23119,7 +23121,7 @@ invalid_try_stmt_rule(Parser *p)
&&
(_loop1_218_var = _loop1_218_rule(p)) // except_star_block+
&&
- (a = _PyPegen_expect_token(p, 637)) // token='except'
+ (a = _PyPegen_expect_token(p, 651)) // token='except'
&&
(_opt_var = _tmp_219_rule(p), !p->error_indicator) // [expression ['as' NAME]]
&&
@@ -23179,7 +23181,7 @@ invalid_except_stmt_rule(Parser *p)
expr_ty a;
expr_ty expressions_var;
if (
- (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='except'
&&
(_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'?
&&
@@ -23221,7 +23223,7 @@ invalid_except_stmt_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 637)) // token='except'
+ (a = _PyPegen_expect_token(p, 651)) // token='except'
&&
(_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'?
&&
@@ -23254,7 +23256,7 @@ invalid_except_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 637)) // token='except'
+ (a = _PyPegen_expect_token(p, 651)) // token='except'
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -23282,7 +23284,7 @@ invalid_except_stmt_rule(Parser *p)
void *_tmp_222_var;
Token * a;
if (
- (a = _PyPegen_expect_token(p, 637)) // token='except'
+ (a = _PyPegen_expect_token(p, 651)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -23332,7 +23334,7 @@ invalid_finally_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 633)) // token='finally'
+ (a = _PyPegen_expect_token(p, 647)) // token='finally'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23389,7 +23391,7 @@ invalid_except_stmt_indent_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 637)) // token='except'
+ (a = _PyPegen_expect_token(p, 651)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
@@ -23425,7 +23427,7 @@ invalid_except_stmt_indent_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 637)) // token='except'
+ (a = _PyPegen_expect_token(p, 651)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23482,7 +23484,7 @@ invalid_except_star_stmt_indent_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 637)) // token='except'
+ (a = _PyPegen_expect_token(p, 651)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -23724,7 +23726,7 @@ invalid_as_pattern_rule(Parser *p)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"'
)
@@ -23754,7 +23756,7 @@ invalid_as_pattern_rule(Parser *p)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
_PyPegen_lookahead_with_name(0, _PyPegen_name_token, p)
&&
@@ -23911,7 +23913,7 @@ invalid_if_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 656)) // token='if'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -23942,7 +23944,7 @@ invalid_if_stmt_rule(Parser *p)
expr_ty a_1;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 642)) // token='if'
+ (a = _PyPegen_expect_token(p, 656)) // token='if'
&&
(a_1 = named_expression_rule(p)) // named_expression
&&
@@ -23998,7 +24000,7 @@ invalid_elif_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 644)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 658)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -24029,7 +24031,7 @@ invalid_elif_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 644)) // token='elif'
+ (a = _PyPegen_expect_token(p, 658)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -24083,7 +24085,7 @@ invalid_else_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 645)) // token='else'
+ (a = _PyPegen_expect_token(p, 659)) // token='else'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -24137,7 +24139,7 @@ invalid_while_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 647)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 661)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -24168,7 +24170,7 @@ invalid_while_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 647)) // token='while'
+ (a = _PyPegen_expect_token(p, 661)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -24199,8 +24201,8 @@ invalid_while_stmt_rule(Parser *p)
}
// invalid_for_stmt:
-// | ASYNC? 'for' star_targets 'in' star_expressions NEWLINE
-// | ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT
+// | 'async'? 'for' star_targets 'in' star_expressions NEWLINE
+// | 'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT
static void *
invalid_for_stmt_rule(Parser *p)
{
@@ -24214,12 +24216,12 @@ invalid_for_stmt_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // ASYNC? 'for' star_targets 'in' star_expressions NEWLINE
+ { // 'async'? 'for' star_targets 'in' star_expressions NEWLINE
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions NEWLINE"));
+ D(fprintf(stderr, "%*c> invalid_for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions NEWLINE"));
Token * _keyword;
Token * _keyword_1;
void *_opt_var;
@@ -24228,20 +24230,20 @@ invalid_for_stmt_rule(Parser *p)
expr_ty star_expressions_var;
expr_ty star_targets_var;
if (
- (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
+ (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 666)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 667)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ invalid_for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions NEWLINE"));
+ D(fprintf(stderr, "%*c+ invalid_for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions NEWLINE"));
_res = RAISE_SYNTAX_ERROR ( "expected ':'" );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24252,14 +24254,14 @@ invalid_for_stmt_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_for_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions NEWLINE"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions NEWLINE"));
}
- { // ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT
+ { // 'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT"));
+ D(fprintf(stderr, "%*c> invalid_for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT"));
Token * _keyword;
Token * _literal;
void *_opt_var;
@@ -24269,13 +24271,13 @@ invalid_for_stmt_rule(Parser *p)
expr_ty star_expressions_var;
expr_ty star_targets_var;
if (
- (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
+ (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 650)) // token='for'
+ (a = _PyPegen_expect_token(p, 666)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword = _PyPegen_expect_token(p, 651)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 667)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
@@ -24286,7 +24288,7 @@ invalid_for_stmt_rule(Parser *p)
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT
)
{
- D(fprintf(stderr, "%*c+ invalid_for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT"));
+ D(fprintf(stderr, "%*c+ invalid_for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT"));
_res = RAISE_INDENTATION_ERROR ( "expected an indented block after 'for' statement on line %d" , a -> lineno );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24297,7 +24299,7 @@ invalid_for_stmt_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_for_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT"));
}
_res = NULL;
done:
@@ -24306,7 +24308,7 @@ invalid_for_stmt_rule(Parser *p)
}
// invalid_def_raw:
-// | ASYNC? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT
+// | 'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT
static void *
invalid_def_raw_rule(Parser *p)
{
@@ -24320,12 +24322,12 @@ invalid_def_raw_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // ASYNC? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT
+ { // 'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT"));
+ D(fprintf(stderr, "%*c> invalid_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT"));
Token * _literal;
Token * _literal_1;
Token * _literal_2;
@@ -24339,9 +24341,9 @@ invalid_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
+ (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 652)) // token='def'
+ (a = _PyPegen_expect_token(p, 669)) // token='def'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -24360,7 +24362,7 @@ invalid_def_raw_rule(Parser *p)
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT
)
{
- D(fprintf(stderr, "%*c+ invalid_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT"));
+ D(fprintf(stderr, "%*c+ invalid_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT"));
_res = RAISE_INDENTATION_ERROR ( "expected an indented block after function definition on line %d" , a -> lineno );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24371,7 +24373,7 @@ invalid_def_raw_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_def_raw[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT"));
}
_res = NULL;
done:
@@ -24407,7 +24409,7 @@ invalid_class_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 654)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 671)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -24442,7 +24444,7 @@ invalid_class_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 654)) // token='class'
+ (a = _PyPegen_expect_token(p, 671)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -25610,7 +25612,7 @@ _tmp_7_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 607)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 617)) // token='import'
)
{
D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'"));
@@ -25629,7 +25631,7 @@ _tmp_7_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 618)) // token='from'
)
{
D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'"));
@@ -25646,7 +25648,7 @@ _tmp_7_rule(Parser *p)
return _res;
}
-// _tmp_8: 'def' | '@' | ASYNC
+// _tmp_8: 'def' | '@' | 'async'
static void *
_tmp_8_rule(Parser *p)
{
@@ -25668,7 +25670,7 @@ _tmp_8_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 652)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 669)) // token='def'
)
{
D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'"));
@@ -25698,24 +25700,24 @@ _tmp_8_rule(Parser *p)
D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'"));
}
- { // ASYNC
+ { // 'async'
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
- Token * async_var;
+ D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'"));
+ Token * _keyword;
if (
- (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
+ (_keyword = _PyPegen_expect_token(p, 668)) // token='async'
)
{
- D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
- _res = async_var;
+ D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'"));
+ _res = _keyword;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'"));
}
_res = NULL;
done:
@@ -25745,7 +25747,7 @@ _tmp_9_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 654)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 671)) // token='class'
)
{
D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'"));
@@ -25781,7 +25783,7 @@ _tmp_9_rule(Parser *p)
return _res;
}
-// _tmp_10: 'with' | ASYNC
+// _tmp_10: 'with' | 'async'
static void *
_tmp_10_rule(Parser *p)
{
@@ -25803,7 +25805,7 @@ _tmp_10_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 629)) // token='with'
)
{
D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'"));
@@ -25814,24 +25816,24 @@ _tmp_10_rule(Parser *p)
D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with'"));
}
- { // ASYNC
+ { // 'async'
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
- Token * async_var;
+ D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'"));
+ Token * _keyword;
if (
- (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
+ (_keyword = _PyPegen_expect_token(p, 668)) // token='async'
)
{
- D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
- _res = async_var;
+ D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'"));
+ _res = _keyword;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'"));
}
_res = NULL;
done:
@@ -25839,7 +25841,7 @@ _tmp_10_rule(Parser *p)
return _res;
}
-// _tmp_11: 'for' | ASYNC
+// _tmp_11: 'for' | 'async'
static void *
_tmp_11_rule(Parser *p)
{
@@ -25861,7 +25863,7 @@ _tmp_11_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 666)) // token='for'
)
{
D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'"));
@@ -25872,24 +25874,24 @@ _tmp_11_rule(Parser *p)
D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for'"));
}
- { // ASYNC
+ { // 'async'
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
- Token * async_var;
+ D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'"));
+ Token * _keyword;
if (
- (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
+ (_keyword = _PyPegen_expect_token(p, 668)) // token='async'
)
{
- D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
- _res = async_var;
+ D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'"));
+ _res = _keyword;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'"));
}
_res = NULL;
done:
@@ -26272,7 +26274,7 @@ _tmp_18_rule(Parser *p)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 618)) // token='from'
&&
(z = expression_rule(p)) // expression
)
@@ -26922,7 +26924,7 @@ _tmp_29_rule(Parser *p)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
@@ -27088,7 +27090,7 @@ _tmp_32_rule(Parser *p)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
@@ -29106,7 +29108,7 @@ _tmp_63_rule(Parser *p)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
@@ -29153,7 +29155,7 @@ _tmp_64_rule(Parser *p)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
@@ -34721,7 +34723,7 @@ _tmp_153_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 610)) // token='True'
)
{
D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -34740,7 +34742,7 @@ _tmp_153_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 612)) // token='False'
)
{
D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -34759,7 +34761,7 @@ _tmp_153_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 611)) // token='None'
)
{
D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -34901,7 +34903,7 @@ _tmp_156_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 645)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 659)) // token='else'
)
{
D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'"));
@@ -35132,7 +35134,7 @@ _tmp_159_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 610)) // token='True'
)
{
D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -35151,7 +35153,7 @@ _tmp_159_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 611)) // token='None'
)
{
D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -35170,7 +35172,7 @@ _tmp_159_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 612)) // token='False'
)
{
D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -38608,7 +38610,7 @@ _tmp_213_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='except'
)
{
D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'"));
@@ -38627,7 +38629,7 @@ _tmp_213_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 633)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 647)) // token='finally'
)
{
D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'"));
@@ -38808,7 +38810,7 @@ _tmp_216_rule(Parser *p)
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
@@ -39034,7 +39036,7 @@ _tmp_220_rule(Parser *p)
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
@@ -39076,7 +39078,7 @@ _tmp_221_rule(Parser *p)
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
@@ -39176,7 +39178,7 @@ _tmp_223_rule(Parser *p)
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
@@ -39218,7 +39220,7 @@ _tmp_224_rule(Parser *p)
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
@@ -40867,7 +40869,7 @@ _tmp_254_rule(Parser *p)
Token * _keyword;
expr_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 574)) // token='or'
+ (_keyword = _PyPegen_expect_token(p, 581)) // token='or'
&&
(c = conjunction_rule(p)) // conjunction
)
@@ -40914,7 +40916,7 @@ _tmp_255_rule(Parser *p)
Token * _keyword;
expr_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 575)) // token='and'
+ (_keyword = _PyPegen_expect_token(p, 582)) // token='and'
&&
(c = inversion_rule(p)) // inversion
)
@@ -41077,7 +41079,7 @@ _tmp_258_rule(Parser *p)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 656)) // token='if'
&&
(z = disjunction_rule(p)) // disjunction
)
@@ -41124,7 +41126,7 @@ _tmp_259_rule(Parser *p)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 656)) // token='if'
&&
(z = disjunction_rule(p)) // disjunction
)
@@ -41697,7 +41699,7 @@ _tmp_271_rule(Parser *p)
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
@@ -41799,7 +41801,7 @@ _tmp_273_rule(Parser *p)
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
@@ -41841,7 +41843,7 @@ _tmp_274_rule(Parser *p)
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
@@ -41883,7 +41885,7 @@ _tmp_275_rule(Parser *p)
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
@@ -41925,7 +41927,7 @@ _tmp_276_rule(Parser *p)
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
diff --git a/Parser/pegen.c b/Parser/pegen.c
index 885d423..bfade34 100644
--- a/Parser/pegen.c
+++ b/Parser/pegen.c
@@ -734,9 +734,6 @@ compute_parser_flags(PyCompilerFlags *flags)
if (flags->cf_flags & PyCF_TYPE_COMMENTS) {
parser_flags |= PyPARSE_TYPE_COMMENTS;
}
- if ((flags->cf_flags & PyCF_ONLY_AST) && flags->cf_feature_version < 7) {
- parser_flags |= PyPARSE_ASYNC_HACKS;
- }
if (flags->cf_flags & PyCF_ALLOW_INCOMPLETE_INPUT) {
parser_flags |= PyPARSE_ALLOW_INCOMPLETE_INPUT;
}
@@ -755,7 +752,6 @@ _PyPegen_Parser_New(struct tok_state *tok, int start_rule, int flags,
}
assert(tok != NULL);
tok->type_comments = (flags & PyPARSE_TYPE_COMMENTS) > 0;
- tok->async_hacks = (flags & PyPARSE_ASYNC_HACKS) > 0;
p->tok = tok;
p->keywords = NULL;
p->n_keyword_lists = -1;
diff --git a/Parser/pegen.h b/Parser/pegen.h
index 5f29285..0852bb5 100644
--- a/Parser/pegen.h
+++ b/Parser/pegen.h
@@ -20,7 +20,6 @@
#define PyPARSE_IGNORE_COOKIE 0x0010
#define PyPARSE_BARRY_AS_BDFL 0x0020
#define PyPARSE_TYPE_COMMENTS 0x0040
-#define PyPARSE_ASYNC_HACKS 0x0080
#define PyPARSE_ALLOW_INCOMPLETE_INPUT 0x0100
#define CURRENT_POS (-5)
diff --git a/Parser/token.c b/Parser/token.c
index 2bc963a..4f163f2 100644
--- a/Parser/token.c
+++ b/Parser/token.c
@@ -62,8 +62,6 @@ const char * const _PyParser_TokenNames[] = {
"COLONEQUAL",
"EXCLAMATION",
"OP",
- "AWAIT",
- "ASYNC",
"TYPE_IGNORE",
"TYPE_COMMENT",
"SOFT_KEYWORD",
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index ccff160..5a42f6f 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -104,10 +104,6 @@ tok_new(void)
tok->decoding_buffer = NULL;
tok->readline = NULL;
tok->type_comments = 0;
- tok->async_hacks = 0;
- tok->async_def = 0;
- tok->async_def_indent = 0;
- tok->async_def_nl = 0;
tok->interactive_underflow = IUNDERFLOW_NORMAL;
tok->str = NULL;
tok->report_warnings = 1;
@@ -1925,27 +1921,6 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
/* Peek ahead at the next character */
c = tok_nextc(tok);
tok_backup(tok, c);
- /* Check if we are closing an async function */
- if (tok->async_def
- && !blankline
- /* Due to some implementation artifacts of type comments,
- * a TYPE_COMMENT at the start of a function won't set an
- * indentation level and it will produce a NEWLINE after it.
- * To avoid spuriously ending an async function due to this,
- * wait until we have some non-newline char in front of us. */
- && c != '\n'
- && tok->level == 0
- /* There was a NEWLINE after ASYNC DEF,
- so we're past the signature. */
- && tok->async_def_nl
- /* Current indentation level is less than where
- the async function was defined */
- && tok->async_def_indent >= tok->indent)
- {
- tok->async_def = 0;
- tok->async_def_indent = 0;
- tok->async_def_nl = 0;
- }
again:
tok->start = NULL;
@@ -2094,54 +2069,6 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
p_start = tok->start;
p_end = tok->cur;
- /* async/await parsing block. */
- if (tok->cur - tok->start == 5 && tok->start[0] == 'a') {
- /* May be an 'async' or 'await' token. For Python 3.7 or
- later we recognize them unconditionally. For Python
- 3.5 or 3.6 we recognize 'async' in front of 'def', and
- either one inside of 'async def'. (Technically we
- shouldn't recognize these at all for 3.4 or earlier,
- but there's no *valid* Python 3.4 code that would be
- rejected, and async functions will be rejected in a
- later phase.) */
- if (!tok->async_hacks || tok->async_def) {
- /* Always recognize the keywords. */
- if (memcmp(tok->start, "async", 5) == 0) {
- return MAKE_TOKEN(ASYNC);
- }
- if (memcmp(tok->start, "await", 5) == 0) {
- return MAKE_TOKEN(AWAIT);
- }
- }
- else if (memcmp(tok->start, "async", 5) == 0) {
- /* The current token is 'async'.
- Look ahead one token to see if that is 'def'. */
-
- struct tok_state ahead_tok;
- struct token ahead_token;
- _PyToken_Init(&ahead_token);
- int ahead_tok_kind;
-
- memcpy(&ahead_tok, tok, sizeof(ahead_tok));
- ahead_tok_kind = tok_get_normal_mode(&ahead_tok,
- current_tok,
- &ahead_token);
-
- if (ahead_tok_kind == NAME
- && ahead_tok.cur - ahead_tok.start == 3
- && memcmp(ahead_tok.start, "def", 3) == 0)
- {
- /* The next token is going to be 'def', so instead of
- returning a plain NAME token, return ASYNC. */
- tok->async_def_indent = tok->indent;
- tok->async_def = 1;
- _PyToken_Free(&ahead_token);
- return MAKE_TOKEN(ASYNC);
- }
- _PyToken_Free(&ahead_token);
- }
- }
-
return MAKE_TOKEN(NAME);
}
@@ -2172,11 +2099,6 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
p_start = tok->start;
p_end = tok->cur - 1; /* Leave '\n' out of the string */
tok->cont_line = 0;
- if (tok->async_def) {
- /* We're somewhere inside an 'async def' function, and
- we've encountered a NEWLINE after its signature. */
- tok->async_def_nl = 1;
- }
return MAKE_TOKEN(NEWLINE);
}
diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h
index 1e1daa3..11d69fc 100644
--- a/Parser/tokenizer.h
+++ b/Parser/tokenizer.h
@@ -116,12 +116,6 @@ struct tok_state {
int type_comments; /* Whether to look for type comments */
- /* async/await related fields (still needed depending on feature_version) */
- int async_hacks; /* =1 if async/await aren't always keywords */
- int async_def; /* =1 if tokens are inside an 'async def' body. */
- int async_def_indent; /* Indentation level of the outermost 'async def'. */
- int async_def_nl; /* =1 if the outermost 'async def' had at least one
- NEWLINE token after it. */
/* How to proceed when asked for a new token in interactive mode */
enum interactive_underflow_t interactive_underflow;
int report_warnings;
diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c
index 1938562..1b02106 100644
--- a/Python/Python-tokenize.c
+++ b/Python/Python-tokenize.c
@@ -237,9 +237,6 @@ tokenizeriter_next(tokenizeriterobject *it)
if (type > DEDENT && type < OP) {
type = OP;
}
- else if (type == ASYNC || type == AWAIT) {
- type = NAME;
- }
else if (type == NEWLINE) {
Py_DECREF(str);
if (!it->tok->implicit_newline) {
diff --git a/Tools/peg_generator/pegen/keywordgen.py b/Tools/peg_generator/pegen/keywordgen.py
index 35a5e1a..bbf1326 100644
--- a/Tools/peg_generator/pegen/keywordgen.py
+++ b/Tools/peg_generator/pegen/keywordgen.py
@@ -35,9 +35,6 @@ iskeyword = frozenset(kwlist).__contains__
issoftkeyword = frozenset(softkwlist).__contains__
'''.lstrip()
-EXTRA_KEYWORDS = ["async", "await"]
-
-
def main() -> None:
parser = argparse.ArgumentParser(
description="Generate the Lib/keywords.py file from the grammar."
@@ -62,7 +59,7 @@ def main() -> None:
gen.collect_rules()
with args.keyword_file as thefile:
- all_keywords = sorted(list(gen.keywords.keys()) + EXTRA_KEYWORDS)
+ all_keywords = sorted(list(gen.keywords.keys()))
all_soft_keywords = sorted(gen.soft_keywords)
keywords = "" if not all_keywords else " " + ",\n ".join(map(repr, all_keywords))
diff --git a/Tools/peg_generator/pegen/python_generator.py b/Tools/peg_generator/pegen/python_generator.py
index 5329d0e..4a2883e 100644
--- a/Tools/peg_generator/pegen/python_generator.py
+++ b/Tools/peg_generator/pegen/python_generator.py
@@ -102,7 +102,7 @@ class PythonCallMakerVisitor(GrammarVisitor):
if name in ("NAME", "NUMBER", "STRING", "OP", "TYPE_COMMENT"):
name = name.lower()
return name, f"self.{name}()"
- if name in ("NEWLINE", "DEDENT", "INDENT", "ENDMARKER", "ASYNC", "AWAIT"):
+ if name in ("NEWLINE", "DEDENT", "INDENT", "ENDMARKER"):
# Avoid using names that can be Python keywords
return "_" + name.lower(), f"self.expect({name!r})"
return name, f"self.{name}()"