diff options
| author | Benjamin Peterson <benjamin@python.org> | 2011-09-28 14:48:40 (GMT) |
|---|---|---|
| committer | Benjamin Peterson <benjamin@python.org> | 2011-09-28 14:48:40 (GMT) |
| commit | e56717c3d201db2dadacd20f654d6be7f9ed18ed (patch) | |
| tree | e0df9e7bf6ab16052edaa32311ab66278bc73404 /Lib/test/test_tokenize.py | |
| parent | 19f6260710b3d086abcbe400194b203be11b60ab (diff) | |
| parent | d8b509b192a67f0f217ae52ed81fc91bc27a1818 (diff) | |
| download | cpython-e56717c3d201db2dadacd20f654d6be7f9ed18ed.zip cpython-e56717c3d201db2dadacd20f654d6be7f9ed18ed.tar.gz cpython-e56717c3d201db2dadacd20f654d6be7f9ed18ed.tar.bz2 | |
merge heads
Diffstat (limited to 'Lib/test/test_tokenize.py')
| -rw-r--r-- | Lib/test/test_tokenize.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 9e9656c..af2bbf1 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -600,7 +600,7 @@ def roundtrip(f): f.close() tokens1 = [tok[:2] for tok in token_list] new_bytes = untokenize(tokens1) - readline = (line for line in new_bytes.splitlines(1)).__next__ + readline = (line for line in new_bytes.splitlines(keepends=True)).__next__ tokens2 = [tok[:2] for tok in tokenize(readline)] return tokens1 == tokens2 |
