diff options
author | Nikita Sobolev <mail@sobolevn.me> | 2023-10-19 08:29:45 (GMT) |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-10-19 08:29:45 (GMT) |
commit | e9b5399bee7106beeeb38a45cfef3f0ed3fdd703 (patch) | |
tree | e529260234aa0ba88ce3c5d09ecacbbf0ef0e8a1 /Lib/test/test_tokenize.py | |
parent | 642eb8df951f2f1d4bf4d93ee568707c5bf40a96 (diff) | |
download | cpython-e9b5399bee7106beeeb38a45cfef3f0ed3fdd703.zip cpython-e9b5399bee7106beeeb38a45cfef3f0ed3fdd703.tar.gz cpython-e9b5399bee7106beeeb38a45cfef3f0ed3fdd703.tar.bz2 |
gh-111031: Check more files in `test_tokenize` (#111032)
Diffstat (limited to 'Lib/test/test_tokenize.py')
-rw-r--r-- | Lib/test/test_tokenize.py | 10 |
1 files changed, 0 insertions, 10 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 41b9ebe..290f460 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -1901,19 +1901,9 @@ class TestRoundtrip(TestCase): tempdir = os.path.dirname(__file__) or os.curdir testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py")) - # Tokenize is broken on test_pep3131.py because regular expressions are - # broken on the obscure unicode identifiers in it. *sigh* - # With roundtrip extended to test the 5-tuple mode of untokenize, - # 7 more testfiles fail. Remove them also until the failure is diagnosed. - - testfiles.remove(os.path.join(tempdir, "test_unicode_identifiers.py")) - # TODO: Remove this once we can untokenize PEP 701 syntax testfiles.remove(os.path.join(tempdir, "test_fstring.py")) - for f in ('buffer', 'builtin', 'fileio', 'os', 'platform', 'sys'): - testfiles.remove(os.path.join(tempdir, "test_%s.py") % f) - if not support.is_resource_enabled("cpu"): testfiles = random.sample(testfiles, 10) |