summaryrefslogtreecommitdiffstats
path: root/Lib/tokenize.py
diff options
context:
space:
mode:
authorJelle Zijlstra <jelle.zijlstra@gmail.com>2017-10-06 03:24:46 (GMT)
committerYury Selivanov <yury@magic.io>2017-10-06 03:24:46 (GMT)
commitac317700ce7439e38a8b420218d9a5035bba92ed (patch)
treeddeb7d90f2e90b73a37783b88ef77376d9d996f5 /Lib/tokenize.py
parent2084b30e540d88b9fc752c5bdcc2f24334af4f2b (diff)
downloadcpython-ac317700ce7439e38a8b420218d9a5035bba92ed.zip
cpython-ac317700ce7439e38a8b420218d9a5035bba92ed.tar.gz
cpython-ac317700ce7439e38a8b420218d9a5035bba92ed.tar.bz2
bpo-30406: Make async and await proper keywords (#1669)
Per PEP 492, 'async' and 'await' should become proper keywords in 3.7.
Diffstat (limited to 'Lib/tokenize.py')
-rw-r--r--Lib/tokenize.py62
1 files changed, 1 insertions, 61 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 5fa4152..f5c6ac7 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -491,12 +491,6 @@ def _tokenize(readline, encoding):
contline = None
indents = [0]
- # 'stashed' and 'async_*' are used for async/await parsing
- stashed = None
- async_def = False
- async_def_indent = 0
- async_def_nl = False
-
if encoding is not None:
if encoding == "utf-8-sig":
# BOM will already have been stripped.
@@ -571,18 +565,8 @@ def _tokenize(readline, encoding):
("<tokenize>", lnum, pos, line))
indents = indents[:-1]
- if async_def and async_def_indent >= indents[-1]:
- async_def = False
- async_def_nl = False
- async_def_indent = 0
-
yield TokenInfo(DEDENT, '', (lnum, pos), (lnum, pos), line)
- if async_def and async_def_nl and async_def_indent >= indents[-1]:
- async_def = False
- async_def_nl = False
- async_def_indent = 0
-
else: # continued statement
if not line:
raise TokenError("EOF in multi-line statement", (lnum, 0))
@@ -601,21 +585,13 @@ def _tokenize(readline, encoding):
(initial == '.' and token != '.' and token != '...')):
yield TokenInfo(NUMBER, token, spos, epos, line)
elif initial in '\r\n':
- if stashed:
- yield stashed
- stashed = None
if parenlev > 0:
yield TokenInfo(NL, token, spos, epos, line)
else:
yield TokenInfo(NEWLINE, token, spos, epos, line)
- if async_def:
- async_def_nl = True
elif initial == '#':
assert not token.endswith("\n")
- if stashed:
- yield stashed
- stashed = None
yield TokenInfo(COMMENT, token, spos, epos, line)
elif token in triple_quoted:
@@ -662,36 +638,7 @@ def _tokenize(readline, encoding):
yield TokenInfo(STRING, token, spos, epos, line)
elif initial.isidentifier(): # ordinary name
- if token in ('async', 'await'):
- if async_def:
- yield TokenInfo(
- ASYNC if token == 'async' else AWAIT,
- token, spos, epos, line)
- continue
-
- tok = TokenInfo(NAME, token, spos, epos, line)
- if token == 'async' and not stashed:
- stashed = tok
- continue
-
- if token == 'def':
- if (stashed
- and stashed.type == NAME
- and stashed.string == 'async'):
-
- async_def = True
- async_def_indent = indents[-1]
-
- yield TokenInfo(ASYNC, stashed.string,
- stashed.start, stashed.end,
- stashed.line)
- stashed = None
-
- if stashed:
- yield stashed
- stashed = None
-
- yield tok
+ yield TokenInfo(NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
continued = 1
else:
@@ -699,19 +646,12 @@ def _tokenize(readline, encoding):
parenlev += 1
elif initial in ')]}':
parenlev -= 1
- if stashed:
- yield stashed
- stashed = None
yield TokenInfo(OP, token, spos, epos, line)
else:
yield TokenInfo(ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos += 1
- if stashed:
- yield stashed
- stashed = None
-
for indent in indents[1:]: # pop remaining indent levels
yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')