diff options
author | Guido van Rossum <guido@python.org> | 2007-11-12 17:40:10 (GMT) |
---|---|---|
committer | Guido van Rossum <guido@python.org> | 2007-11-12 17:40:10 (GMT) |
commit | 4fe72f9b03f92f126bec69d79ca3e9e09018c988 (patch) | |
tree | 7c12e90de06c635e220852dad053ae2848c1a746 /Lib/test/test_tokenize.py | |
parent | 1607278c2612275d21354619c6ef1e2869178ba7 (diff) | |
download | cpython-4fe72f9b03f92f126bec69d79ca3e9e09018c988.zip cpython-4fe72f9b03f92f126bec69d79ca3e9e09018c988.tar.gz cpython-4fe72f9b03f92f126bec69d79ca3e9e09018c988.tar.bz2 |
Patch 1420 by Ron Adam.
This adds support for bytes literals (b'...') to tokenize.py, and
removes support for unicode literals (u'...').
Diffstat (limited to 'Lib/test/test_tokenize.py')
-rw-r--r-- | Lib/test/test_tokenize.py | 13 |
1 files changed, 11 insertions, 2 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 8ef9000..e59d9c6 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -183,6 +183,13 @@ def test_main(): next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL + # Validate the tokenize_tests.txt file. + # This makes sure it compiles, and displays any errors in it. + f = open(findfile('tokenize_tests.txt')) + sf = f.read() + f.close() + cf = compile(sf, 'tokenize_tests.txt', 'exec') + # This displays the tokenization of tokenize_tests.py to stdout, and # regrtest.py checks that this equals the expected output (in the # test/output/ directory). @@ -190,10 +197,12 @@ def test_main(): tokenize(f.readline) f.close() - # Now run test_roundtrip() over tokenize_test.py too, and over all + # Now run test_roundtrip() over test_tokenize.py too, and over all # (if the "compiler" resource is enabled) or a small random sample (if # "compiler" is not enabled) of the test*.py files. - f = findfile('tokenize_tests.txt') + f = findfile('test_tokenize.py') + if verbose: + print(' round trip: ', f, file=sys.__stdout__) test_roundtrip(f) testdir = os.path.dirname(f) or os.curdir |