diff options
author | Ezio Melotti <ezio.melotti@gmail.com> | 2012-11-03 15:38:43 (GMT) |
---|---|---|
committer | Ezio Melotti <ezio.melotti@gmail.com> | 2012-11-03 15:38:43 (GMT) |
commit | 2cc3b4ba9ffa658784da03f14a0a068e2c61d1b3 (patch) | |
tree | 66423060d67ba1f6ad182450edb895cc29ca28f1 /Lib/test/test_tokenize.py | |
parent | 4552e3f95c3382a4665cb8adab343521f8898331 (diff) | |
download | cpython-2cc3b4ba9ffa658784da03f14a0a068e2c61d1b3.zip cpython-2cc3b4ba9ffa658784da03f14a0a068e2c61d1b3.tar.gz cpython-2cc3b4ba9ffa658784da03f14a0a068e2c61d1b3.tar.bz2 |
#16152: fix tokenize to ignore whitespace at the end of the code when no newline is found. Patch by Ned Batchelder.
Diffstat (limited to 'Lib/test/test_tokenize.py')
-rw-r--r-- | Lib/test/test_tokenize.py | 5 |
1 files changed, 5 insertions, 0 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index b6a9ca1..f9652ce 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -552,6 +552,11 @@ Evil tabs DEDENT '' (4, 0) (4, 0) DEDENT '' (4, 0) (4, 0) +Pathological whitespace (http://bugs.python.org/issue16152) + >>> dump_tokens("@ ") + ENCODING 'utf-8' (0, 0) (0, 0) + OP '@' (1, 0) (1, 1) + Non-ascii identifiers >>> dump_tokens("Örter = 'places'\\ngrün = 'green'") |