summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Lib/tokenize.py7
1 files changed, 4 insertions, 3 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index b8ee2c8..f82922b 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -379,10 +379,11 @@ def tokenize(readline):
"""
# This import is here to avoid problems when the itertools module is not
# built yet and tokenize is imported.
- from itertools import chain
+ from itertools import chain, repeat
encoding, consumed = detect_encoding(readline)
- rl_iter = iter(readline, "")
- return _tokenize(chain(consumed, rl_iter).__next__, encoding)
+ rl_gen = iter(readline, b"")
+ empty = repeat(b"")
+ return _tokenize(chain(consumed, rl_gen, empty).__next__, encoding)
def _tokenize(readline, encoding):