diff options
author | Eric V. Smith <eric@trueblade.com> | 2015-10-17 00:45:53 (GMT) |
---|---|---|
committer | Eric V. Smith <eric@trueblade.com> | 2015-10-17 00:45:53 (GMT) |
commit | 67317742162dd5b9728672b2ff7ed21e2aa7d2fa (patch) | |
tree | ef8a2b418eea11da3bbf9cf9ab493198d71bac08 /Lib/test/test_tokenize.py | |
parent | 9b63868f77c6782bba6b3ceed8e2058b471b6fd5 (diff) | |
download | cpython-67317742162dd5b9728672b2ff7ed21e2aa7d2fa.zip cpython-67317742162dd5b9728672b2ff7ed21e2aa7d2fa.tar.gz cpython-67317742162dd5b9728672b2ff7ed21e2aa7d2fa.tar.bz2 |
Issue 25422: Add tests for multi-line string tokenization. Also remove truncated tokens.
Diffstat (limited to 'Lib/test/test_tokenize.py')
-rw-r--r-- | Lib/test/test_tokenize.py | 38 |
1 files changed, 32 insertions, 6 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 3b17ca6..b74396f 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -24,8 +24,7 @@ class TokenizeTest(TestCase): if type == ENDMARKER: break type = tok_name[type] - result.append(" %(type)-10.10s %(token)-13.13r %(start)s %(end)s" % - locals()) + result.append(f" {type:10} {token!r:13} {start} {end}") self.assertEqual(result, [" ENCODING 'utf-8' (0, 0) (0, 0)"] + expected.rstrip().splitlines()) @@ -132,18 +131,18 @@ def k(x): self.check_tokenize("x = 0xfffffffffff", """\ NAME 'x' (1, 0) (1, 1) OP '=' (1, 2) (1, 3) - NUMBER '0xffffffffff (1, 4) (1, 17) + NUMBER '0xfffffffffff' (1, 4) (1, 17) """) self.check_tokenize("x = 123141242151251616110", """\ NAME 'x' (1, 0) (1, 1) OP '=' (1, 2) (1, 3) - NUMBER '123141242151 (1, 4) (1, 25) + NUMBER '123141242151251616110' (1, 4) (1, 25) """) self.check_tokenize("x = -15921590215012591", """\ NAME 'x' (1, 0) (1, 1) OP '=' (1, 2) (1, 3) OP '-' (1, 4) (1, 5) - NUMBER '159215902150 (1, 5) (1, 22) + NUMBER '15921590215012591' (1, 5) (1, 22) """) def test_float(self): @@ -307,6 +306,33 @@ def k(x): OP '+' (1, 28) (1, 29) STRING 'RB"abc"' (1, 30) (1, 37) """) + # Check 0, 1, and 2 character string prefixes. + self.check_tokenize(r'"a\ +de\ +fg"', """\ + STRING '"a\\\\\\nde\\\\\\nfg"\' (1, 0) (3, 3) + """) + self.check_tokenize(r'u"a\ +de"', """\ + STRING 'u"a\\\\\\nde"\' (1, 0) (2, 3) + """) + self.check_tokenize(r'rb"a\ +d"', """\ + STRING 'rb"a\\\\\\nd"\' (1, 0) (2, 2) + """) + self.check_tokenize(r'"""a\ +b"""', """\ + STRING '\"\""a\\\\\\nb\"\""' (1, 0) (2, 4) + """) + self.check_tokenize(r'u"""a\ +b"""', """\ + STRING 'u\"\""a\\\\\\nb\"\""' (1, 0) (2, 4) + """) + self.check_tokenize(r'rb"""a\ +b\ +c"""', """\ + STRING 'rb"\""a\\\\\\nb\\\\\\nc"\""' (1, 0) (3, 4) + """) def test_function(self): self.check_tokenize("def d22(a, b, c=2, d=2, *k): pass", """\ @@ -505,7 +531,7 @@ def k(x): # Methods self.check_tokenize("@staticmethod\ndef foo(x,y): pass", """\ OP '@' (1, 0) (1, 1) - NAME 'staticmethod (1, 1) (1, 13) + NAME 'staticmethod' (1, 1) (1, 13) NEWLINE '\\n' (1, 13) (1, 14) NAME 'def' (2, 0) (2, 3) NAME 'foo' (2, 4) (2, 7) |