diff options
author | Benjamin Peterson <benjamin@python.org> | 2011-02-26 22:11:02 (GMT) |
---|---|---|
committer | Benjamin Peterson <benjamin@python.org> | 2011-02-26 22:11:02 (GMT) |
commit | 8059e1e2140d08683429a6731ecf4b1d2385cce3 (patch) | |
tree | ea2fede850afc6951800381fb4174b73e4a91b4f /Lib/lib2to3 | |
parent | aeb187a22b2210fcf240a318d1745d0153c5e574 (diff) | |
download | cpython-8059e1e2140d08683429a6731ecf4b1d2385cce3.zip cpython-8059e1e2140d08683429a6731ecf4b1d2385cce3.tar.gz cpython-8059e1e2140d08683429a6731ecf4b1d2385cce3.tar.bz2 |
Merged revisions 88535,88661 via svnmerge from
svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3
........
r88535 | brett.cannon | 2011-02-23 13:46:46 -0600 (Wed, 23 Feb 2011) | 1 line
Add lib2to3.__main__ for easy testing from the console.
........
r88661 | benjamin.peterson | 2011-02-26 16:06:24 -0600 (Sat, 26 Feb 2011) | 6 lines
fix refactoring on formfeed characters #11250
This is because text.splitlines() is not the same as
list(StringIO.StringIO(text)).
........
Diffstat (limited to 'Lib/lib2to3')
-rw-r--r-- | Lib/lib2to3/__main__.py | 4 | ||||
-rw-r--r-- | Lib/lib2to3/patcomp.py | 3 | ||||
-rw-r--r-- | Lib/lib2to3/pgen2/driver.py | 11 | ||||
-rw-r--r-- | Lib/lib2to3/tests/test_parser.py | 10 |
4 files changed, 18 insertions, 10 deletions
diff --git a/Lib/lib2to3/__main__.py b/Lib/lib2to3/__main__.py new file mode 100644 index 0000000..80688ba --- /dev/null +++ b/Lib/lib2to3/__main__.py @@ -0,0 +1,4 @@ +import sys +from .main import main + +sys.exit(main("lib2to3.fixes")) diff --git a/Lib/lib2to3/patcomp.py b/Lib/lib2to3/patcomp.py index 84fee5b..093e5f9 100644 --- a/Lib/lib2to3/patcomp.py +++ b/Lib/lib2to3/patcomp.py @@ -12,6 +12,7 @@ __author__ = "Guido van Rossum <guido@python.org>" # Python imports import os +import StringIO # Fairly local imports from .pgen2 import driver, literals, token, tokenize, parse, grammar @@ -32,7 +33,7 @@ class PatternSyntaxError(Exception): def tokenize_wrapper(input): """Tokenizes a string suppressing significant whitespace.""" skip = set((token.NEWLINE, token.INDENT, token.DEDENT)) - tokens = tokenize.generate_tokens(driver.generate_lines(input).next) + tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline) for quintuple in tokens: type, value, start, end, line_text = quintuple if type not in skip: diff --git a/Lib/lib2to3/pgen2/driver.py b/Lib/lib2to3/pgen2/driver.py index 6b3825e..16adec0 100644 --- a/Lib/lib2to3/pgen2/driver.py +++ b/Lib/lib2to3/pgen2/driver.py @@ -19,6 +19,7 @@ __all__ = ["Driver", "load_grammar"] import codecs import os import logging +import StringIO import sys # Pgen imports @@ -101,18 +102,10 @@ class Driver(object): def parse_string(self, text, debug=False): """Parse a string and return the syntax tree.""" - tokens = tokenize.generate_tokens(generate_lines(text).next) + tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline) return self.parse_tokens(tokens, debug) -def generate_lines(text): - """Generator that behaves like readline without using StringIO.""" - for line in text.splitlines(True): - yield line - while True: - yield "" - - def load_grammar(gt="Grammar.txt", gp=None, save=True, force=False, logger=None): """Load the grammar (maybe from a pickle).""" diff --git a/Lib/lib2to3/tests/test_parser.py b/Lib/lib2to3/tests/test_parser.py index 703d879..2602381 100644 --- a/Lib/lib2to3/tests/test_parser.py +++ b/Lib/lib2to3/tests/test_parser.py @@ -19,6 +19,16 @@ import sys # Local imports from lib2to3.pgen2 import tokenize from ..pgen2.parse import ParseError +from lib2to3.pygram import python_symbols as syms + + +class TestDriver(support.TestCase): + + def test_formfeed(self): + s = """print 1\n\x0Cprint 2\n""" + t = driver.parse_string(s) + self.assertEqual(t.children[0].children[0].type, syms.print_stmt) + self.assertEqual(t.children[1].children[0].type, syms.print_stmt) class GrammarTest(support.TestCase): |