diff options
author | Benjamin Peterson <benjamin@python.org> | 2009-11-14 18:09:17 (GMT) |
---|---|---|
committer | Benjamin Peterson <benjamin@python.org> | 2009-11-14 18:09:17 (GMT) |
commit | 81dd8b9594d88ff1d2c8f5efea687645bbc36d6f (patch) | |
tree | fc9036075850bfd8f96ad4d3b56b7f8bb37f5d63 | |
parent | 21db77e396c00c0490b6344a130bdbcef62bfa73 (diff) | |
download | cpython-81dd8b9594d88ff1d2c8f5efea687645bbc36d6f.zip cpython-81dd8b9594d88ff1d2c8f5efea687645bbc36d6f.tar.gz cpython-81dd8b9594d88ff1d2c8f5efea687645bbc36d6f.tar.bz2 |
use some more itertools magic to make '' be yielded after readline is done
-rw-r--r-- | Lib/tokenize.py | 7 |
1 files changed, 4 insertions, 3 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py index b8ee2c8..f82922b 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -379,10 +379,11 @@ def tokenize(readline): """ # This import is here to avoid problems when the itertools module is not # built yet and tokenize is imported. - from itertools import chain + from itertools import chain, repeat encoding, consumed = detect_encoding(readline) - rl_iter = iter(readline, "") - return _tokenize(chain(consumed, rl_iter).__next__, encoding) + rl_gen = iter(readline, b"") + empty = repeat(b"") + return _tokenize(chain(consumed, rl_gen, empty).__next__, encoding) def _tokenize(readline, encoding): |