diff options
author | Jason R. Coombs <jaraco@jaraco.com> | 2015-06-20 23:13:50 (GMT) |
---|---|---|
committer | Jason R. Coombs <jaraco@jaraco.com> | 2015-06-20 23:13:50 (GMT) |
commit | 7cf36387e4a4e7f9686274cdfaeaeddc76ff5902 (patch) | |
tree | 43c3cb23143d5107d9f581c2717a11b8a2a4273c /Lib/test/test_tokenize.py | |
parent | 9fe164364af47d119c4bed420694570ee9a4e6a6 (diff) | |
download | cpython-7cf36387e4a4e7f9686274cdfaeaeddc76ff5902.zip cpython-7cf36387e4a4e7f9686274cdfaeaeddc76ff5902.tar.gz cpython-7cf36387e4a4e7f9686274cdfaeaeddc76ff5902.tar.bz2 |
Remove unused import and remove doctest-only import into doctests.
Diffstat (limited to 'Lib/test/test_tokenize.py')
-rw-r--r-- | Lib/test/test_tokenize.py | 4 |
1 files changed, 3 insertions, 1 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 9842207..3e8a654 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -5,6 +5,8 @@ The tests can be really simple. Given a small fragment of source code, print out a table with tokens. The ENDMARKER is omitted for brevity. + >>> import glob + >>> dump_tokens("1 + 1") ENCODING 'utf-8' (0, 0) (0, 0) NUMBER '1' (1, 0) (1, 1) @@ -647,7 +649,7 @@ from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP, open as tokenize_open, Untokenizer) from io import BytesIO from unittest import TestCase, mock -import os, sys, glob +import os import token def dump_tokens(s): |