summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
Diffstat (limited to 'Lib')
-rw-r--r--Lib/lib2to3/fixes/fix_import.py17
-rw-r--r--Lib/lib2to3/fixes/fix_operator.py6
-rw-r--r--Lib/lib2to3/fixes/fix_reduce.py5
-rw-r--r--Lib/lib2to3/fixes/fix_tuple_params.py2
-rw-r--r--Lib/lib2to3/fixes/fix_xrange.py12
-rw-r--r--Lib/lib2to3/main.py14
-rw-r--r--Lib/lib2to3/pgen2/tokenize.py9
-rw-r--r--Lib/lib2to3/pytree.py15
-rw-r--r--Lib/lib2to3/refactor.py22
-rw-r--r--Lib/lib2to3/tests/test_fixers.py23
-rw-r--r--Lib/lib2to3/tests/test_parser.py15
-rw-r--r--Lib/lib2to3/tests/test_pytree.py33
-rw-r--r--Lib/lib2to3/tests/test_refactor.py80
13 files changed, 165 insertions, 88 deletions
diff --git a/Lib/lib2to3/fixes/fix_import.py b/Lib/lib2to3/fixes/fix_import.py
index 625611d..ef9b619 100644
--- a/Lib/lib2to3/fixes/fix_import.py
+++ b/Lib/lib2to3/fixes/fix_import.py
@@ -43,7 +43,13 @@ class FixImport(fixer_base.BaseFix):
import_name< 'import' imp=any >
"""
+ def start_tree(self, tree, name):
+ super(FixImport, self).start_tree(tree, name)
+ self.skip = "absolute_import" in tree.future_features
+
def transform(self, node, results):
+ if self.skip:
+ return
imp = results['imp']
if node.type == syms.import_from:
@@ -71,19 +77,22 @@ class FixImport(fixer_base.BaseFix):
self.warning(node, "absolute and local imports together")
return
- new = FromImport('.', [imp])
+ new = FromImport(".", [imp])
new.prefix = node.prefix
return new
def probably_a_local_import(self, imp_name):
- imp_name = imp_name.split('.', 1)[0]
+ if imp_name.startswith("."):
+ # Relative imports are certainly not local imports.
+ return False
+ imp_name = imp_name.split(".", 1)[0]
base_path = dirname(self.filename)
base_path = join(base_path, imp_name)
# If there is no __init__.py next to the file its not in a package
# so can't be a relative import.
- if not exists(join(dirname(base_path), '__init__.py')):
+ if not exists(join(dirname(base_path), "__init__.py")):
return False
- for ext in ['.py', sep, '.pyc', '.so', '.sl', '.pyd']:
+ for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]:
if exists(base_path + ext):
return True
return False
diff --git a/Lib/lib2to3/fixes/fix_operator.py b/Lib/lib2to3/fixes/fix_operator.py
index 9b1089c..ded9eee 100644
--- a/Lib/lib2to3/fixes/fix_operator.py
+++ b/Lib/lib2to3/fixes/fix_operator.py
@@ -14,10 +14,10 @@ class FixOperator(fixer_base.BaseFix):
func = "'(' func=any ')'"
PATTERN = """
power< module='operator'
- trailer< '.' {methods} > trailer< {func} > >
+ trailer< '.' %(methods)s > trailer< %(func)s > >
|
- power< {methods} trailer< {func} > >
- """.format(methods=methods, func=func)
+ power< %(methods)s trailer< %(func)s > >
+ """ % dict(methods=methods, func=func)
def transform(self, node, results):
method = results["method"][0]
diff --git a/Lib/lib2to3/fixes/fix_reduce.py b/Lib/lib2to3/fixes/fix_reduce.py
index 89fa2b4..c54c5c1 100644
--- a/Lib/lib2to3/fixes/fix_reduce.py
+++ b/Lib/lib2to3/fixes/fix_reduce.py
@@ -7,9 +7,8 @@ Makes sure reduce() is imported from the functools module if reduce is
used in that module.
"""
-from .. import pytree
-from .. import fixer_base
-from ..fixer_util import Name, Attr, touch_import
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import touch_import
diff --git a/Lib/lib2to3/fixes/fix_tuple_params.py b/Lib/lib2to3/fixes/fix_tuple_params.py
index 2085935..fad5cad 100644
--- a/Lib/lib2to3/fixes/fix_tuple_params.py
+++ b/Lib/lib2to3/fixes/fix_tuple_params.py
@@ -154,7 +154,7 @@ def map_to_index(param_list, prefix=[], d=None):
if d is None:
d = {}
for i, obj in enumerate(param_list):
- trailer = [Subscript(Number(i))]
+ trailer = [Subscript(Number(str(i)))]
if isinstance(obj, list):
map_to_index(obj, trailer, d=d)
else:
diff --git a/Lib/lib2to3/fixes/fix_xrange.py b/Lib/lib2to3/fixes/fix_xrange.py
index 7efeea4..6e2b925 100644
--- a/Lib/lib2to3/fixes/fix_xrange.py
+++ b/Lib/lib2to3/fixes/fix_xrange.py
@@ -17,6 +17,13 @@ class FixXrange(fixer_base.BaseFix):
rest=any* >
"""
+ def start_tree(self, tree, filename):
+ super(FixXrange, self).start_tree(tree, filename)
+ self.transformed_xranges = set()
+
+ def finish_tree(self, tree, filename):
+ self.transformed_xranges = None
+
def transform(self, node, results):
name = results["name"]
if name.value == "xrange":
@@ -29,9 +36,12 @@ class FixXrange(fixer_base.BaseFix):
def transform_xrange(self, node, results):
name = results["name"]
name.replace(Name("range", prefix=name.prefix))
+ # This prevents the new range call from being wrapped in a list later.
+ self.transformed_xranges.add(id(node))
def transform_range(self, node, results):
- if not self.in_special_context(node):
+ if (id(node) not in self.transformed_xranges and
+ not self.in_special_context(node)):
range_call = Call(Name("range"), [results["args"].clone()])
# Encase the range call in list().
list_call = Call(Name("list"), [range_call],
diff --git a/Lib/lib2to3/main.py b/Lib/lib2to3/main.py
index 6c57e67..02bae37 100644
--- a/Lib/lib2to3/main.py
+++ b/Lib/lib2to3/main.py
@@ -2,6 +2,8 @@
Main program for 2to3.
"""
+from __future__ import with_statement
+
import sys
import os
import difflib
@@ -62,8 +64,14 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
if self.show_diffs:
diff_lines = diff_texts(old, new, filename)
try:
- for line in diff_lines:
- print(line)
+ if self.output_lock is not None:
+ with self.output_lock:
+ for line in diff_lines:
+ print(line)
+ sys.stdout.flush()
+ else:
+ for line in diff_lines:
+ print(line)
except UnicodeEncodeError:
warn("couldn't encode %s's diff for your terminal" %
(filename,))
@@ -94,7 +102,7 @@ def main(fixer_pkg, args=None):
parser.add_option("-x", "--nofix", action="append", default=[],
help="Prevent a fixer from being run.")
parser.add_option("-l", "--list-fixes", action="store_true",
- help="List available transformations (fixes/fix_*.py)")
+ help="List available transformations")
parser.add_option("-p", "--print-function", action="store_true",
help="Modify the grammar so that print() is a function")
parser.add_option("-v", "--verbose", action="store_true",
diff --git a/Lib/lib2to3/pgen2/tokenize.py b/Lib/lib2to3/pgen2/tokenize.py
index 701daf8..31e2969 100644
--- a/Lib/lib2to3/pgen2/tokenize.py
+++ b/Lib/lib2to3/pgen2/tokenize.py
@@ -38,6 +38,13 @@ __all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize",
"generate_tokens", "untokenize"]
del token
+try:
+ bytes
+except NameError:
+ # Support bytes type in Python <= 2.5, so 2to3 turns itself into
+ # valid Python 3 code.
+ bytes = str
+
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
@@ -267,7 +274,7 @@ def detect_encoding(readline):
try:
return readline()
except StopIteration:
- return b''
+ return bytes()
def find_cookie(line):
try:
diff --git a/Lib/lib2to3/pytree.py b/Lib/lib2to3/pytree.py
index 15b83f6..c502771 100644
--- a/Lib/lib2to3/pytree.py
+++ b/Lib/lib2to3/pytree.py
@@ -289,8 +289,7 @@ class Node(Base):
for node in child.post_order():
yield node
- @property
- def prefix(self):
+ def _prefix_getter(self):
"""
The whitespace and comments preceding this node in the input.
"""
@@ -298,11 +297,12 @@ class Node(Base):
return ""
return self.children[0].prefix
- @prefix.setter
- def prefix(self, prefix):
+ def _prefix_setter(self, prefix):
if self.children:
self.children[0].prefix = prefix
+ prefix = property(_prefix_getter, _prefix_setter)
+
def set_child(self, i, child):
"""
Equivalent to 'node.children[i] = child'. This method also sets the
@@ -390,18 +390,17 @@ class Leaf(Base):
"""Return a pre-order iterator for the tree."""
yield self
- @property
- def prefix(self):
+ def _prefix_getter(self):
"""
The whitespace and comments preceding this token in the input.
"""
return self._prefix
- @prefix.setter
- def prefix(self, prefix):
+ def _prefix_setter(self, prefix):
self.changed()
self._prefix = prefix
+ prefix = property(_prefix_getter, _prefix_setter)
def convert(gr, raw_node):
"""
diff --git a/Lib/lib2to3/refactor.py b/Lib/lib2to3/refactor.py
index 8bd61ad..d4af834 100644
--- a/Lib/lib2to3/refactor.py
+++ b/Lib/lib2to3/refactor.py
@@ -8,6 +8,8 @@ recursively descend down directories. Imported as a module, this
provides infrastructure to write your own refactoring tool.
"""
+from __future__ import with_statement
+
__author__ = "Guido van Rossum <guido@python.org>"
@@ -122,13 +124,14 @@ else:
_to_system_newlines = _identity
-def _detect_future_print(source):
+def _detect_future_features(source):
have_docstring = False
gen = tokenize.generate_tokens(io.StringIO(source).readline)
def advance():
tok = next(gen)
return tok[0], tok[1]
ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT))
+ features = set()
try:
while True:
tp, value = advance()
@@ -140,26 +143,25 @@ def _detect_future_print(source):
have_docstring = True
elif tp == token.NAME and value == "from":
tp, value = advance()
- if tp != token.NAME and value != "__future__":
+ if tp != token.NAME or value != "__future__":
break
tp, value = advance()
- if tp != token.NAME and value != "import":
+ if tp != token.NAME or value != "import":
break
tp, value = advance()
if tp == token.OP and value == "(":
tp, value = advance()
while tp == token.NAME:
- if value == "print_function":
- return True
+ features.add(value)
tp, value = advance()
- if tp != token.OP and value != ",":
+ if tp != token.OP or value != ",":
break
tp, value = advance()
else:
break
except StopIteration:
pass
- return False
+ return frozenset(features)
class FixerError(Exception):
@@ -341,7 +343,8 @@ class RefactoringTool(object):
An AST corresponding to the refactored input stream; None if
there were errors during the parse.
"""
- if _detect_future_print(data):
+ features = _detect_future_features(data)
+ if "print_function" in features:
self.driver.grammar = pygram.python_grammar_no_print_statement
try:
tree = self.driver.parse_string(data)
@@ -351,6 +354,7 @@ class RefactoringTool(object):
return
finally:
self.driver.grammar = self.grammar
+ tree.future_features = features
self.log_debug("Refactoring %s", name)
self.refactor_tree(tree, name)
return tree
@@ -605,6 +609,7 @@ class MultiprocessRefactoringTool(RefactoringTool):
def __init__(self, *args, **kwargs):
super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
self.queue = None
+ self.output_lock = None
def refactor(self, items, write=False, doctests_only=False,
num_processes=1):
@@ -618,6 +623,7 @@ class MultiprocessRefactoringTool(RefactoringTool):
if self.queue is not None:
raise RuntimeError("already doing multiple processes")
self.queue = multiprocessing.JoinableQueue()
+ self.output_lock = multiprocessing.Lock()
processes = [multiprocessing.Process(target=self._child)
for i in range(num_processes)]
try:
diff --git a/Lib/lib2to3/tests/test_fixers.py b/Lib/lib2to3/tests/test_fixers.py
index b28c35f..855b4bb 100644
--- a/Lib/lib2to3/tests/test_fixers.py
+++ b/Lib/lib2to3/tests/test_fixers.py
@@ -1497,6 +1497,17 @@ class Test_xrange(FixerTestCase):
for call in fixer_util.consuming_calls:
self.unchanged("a = %s(range(10))" % call)
+class Test_xrange_with_reduce(FixerTestCase):
+
+ def setUp(self):
+ super(Test_xrange_with_reduce, self).setUp(["xrange", "reduce"])
+
+ def test_double_transform(self):
+ b = """reduce(x, xrange(5))"""
+ a = """from functools import reduce
+reduce(x, range(5))"""
+ self.check(b, a)
+
class Test_raw_input(FixerTestCase):
fixer = "raw_input"
@@ -3679,7 +3690,7 @@ class Test_import(FixerTestCase):
self.files_checked.append(name)
return self.always_exists or (name in self.present_files)
- from ..fixes import fix_import
+ from lib2to3.fixes import fix_import
fix_import.exists = fake_exists
def tearDown(self):
@@ -3722,6 +3733,12 @@ class Test_import(FixerTestCase):
self.present_files = set(["bar.py"])
self.unchanged(s)
+ def test_with_absolute_import_enabled(self):
+ s = "from __future__ import absolute_import\nimport bar"
+ self.always_exists = False
+ self.present_files = set(["__init__.py", "bar.py"])
+ self.unchanged(s)
+
def test_in_package(self):
b = "import bar"
a = "from . import bar"
@@ -3736,6 +3753,10 @@ class Test_import(FixerTestCase):
self.present_files = set(["__init__.py", "bar" + os.path.sep])
self.check(b, a)
+ def test_already_relative_import(self):
+ s = "from . import bar"
+ self.unchanged(s)
+
def test_comments_and_indent(self):
b = "import bar # Foo"
a = "from . import bar # Foo"
diff --git a/Lib/lib2to3/tests/test_parser.py b/Lib/lib2to3/tests/test_parser.py
index 06f3227..ce39e41 100644
--- a/Lib/lib2to3/tests/test_parser.py
+++ b/Lib/lib2to3/tests/test_parser.py
@@ -6,6 +6,8 @@ parts of the grammar we've changed, we also make sure we can parse the
test_grammar.py files from both Python 2 and Python 3.
"""
+from __future__ import with_statement
+
# Testing imports
from . import support
from .support import driver, test_dir
@@ -149,10 +151,11 @@ class TestParserIdempotency(support.TestCase):
for filepath in support.all_project_files():
with open(filepath, "rb") as fp:
encoding = tokenize.detect_encoding(fp.readline)[0]
- fp.seek(0)
+ self.assertTrue(encoding is not None,
+ "can't detect encoding for %s" % filepath)
+ with open(filepath, "r") as fp:
source = fp.read()
- if encoding:
- source = source.decode(encoding)
+ source = source.decode(encoding)
tree = driver.parse_string(source)
new = str(tree)
if encoding:
@@ -199,10 +202,10 @@ class TestLiterals(GrammarTest):
self.validate(s)
-def diff(fn, result):
- f = open("@", "wb")
+def diff(fn, result, encoding):
+ f = open("@", "w")
try:
- f.write(result)
+ f.write(result.encode(encoding))
finally:
f.close()
try:
diff --git a/Lib/lib2to3/tests/test_pytree.py b/Lib/lib2to3/tests/test_pytree.py
index 870970c..d31f67d 100644
--- a/Lib/lib2to3/tests/test_pytree.py
+++ b/Lib/lib2to3/tests/test_pytree.py
@@ -9,6 +9,9 @@ more helpful than printing of (the first line of) the docstring,
especially when debugging a test.
"""
+from __future__ import with_statement
+
+import sys
import warnings
# Testing imports
@@ -28,20 +31,22 @@ class TestNodes(support.TestCase):
"""Unit tests for nodes (Base, Leaf, Node)."""
- def test_deprecated_prefix_methods(self):
- l = pytree.Leaf(100, "foo")
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always", DeprecationWarning)
- self.assertEqual(l.get_prefix(), "")
- l.set_prefix("hi")
- self.assertEqual(l.prefix, "hi")
- self.assertEqual(len(w), 2)
- for warning in w:
- self.assertTrue(warning.category is DeprecationWarning)
- self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \
- "use the prefix property")
- self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \
- "use the prefix property")
+ if sys.version_info >= (2,6):
+ # warnings.catch_warnings is new in 2.6.
+ def test_deprecated_prefix_methods(self):
+ l = pytree.Leaf(100, "foo")
+ with warnings.catch_warnings(record=True) as w:
+ warnings.simplefilter("always", DeprecationWarning)
+ self.assertEqual(l.get_prefix(), "")
+ l.set_prefix("hi")
+ self.assertEqual(l.prefix, "hi")
+ self.assertEqual(len(w), 2)
+ for warning in w:
+ self.assertTrue(warning.category is DeprecationWarning)
+ self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \
+ "use the prefix property")
+ self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \
+ "use the prefix property")
def test_instantiate_base(self):
if __debug__:
diff --git a/Lib/lib2to3/tests/test_refactor.py b/Lib/lib2to3/tests/test_refactor.py
index 49fb0c0..35efe25 100644
--- a/Lib/lib2to3/tests/test_refactor.py
+++ b/Lib/lib2to3/tests/test_refactor.py
@@ -2,6 +2,8 @@
Unit tests for refactor.py.
"""
+from __future__ import with_statement
+
import sys
import os
import codecs
@@ -61,42 +63,50 @@ class TestRefactoringTool(unittest.TestCase):
self.assertEqual(full_names,
["myfixes.fix_" + name for name in contents])
- def test_detect_future_print(self):
- run = refactor._detect_future_print
- self.assertFalse(run(""))
- self.assertTrue(run("from __future__ import print_function"))
- self.assertFalse(run("from __future__ import generators"))
- self.assertFalse(run("from __future__ import generators, feature"))
- input = "from __future__ import generators, print_function"
- self.assertTrue(run(input))
- input ="from __future__ import print_function, generators"
- self.assertTrue(run(input))
- input = "from __future__ import (print_function,)"
- self.assertTrue(run(input))
- input = "from __future__ import (generators, print_function)"
- self.assertTrue(run(input))
- input = "from __future__ import (generators, nested_scopes)"
- self.assertFalse(run(input))
- input = """from __future__ import generators
+ def test_detect_future_features(self):
+ run = refactor._detect_future_features
+ fs = frozenset
+ empty = fs()
+ self.assertEqual(run(""), empty)
+ self.assertEqual(run("from __future__ import print_function"),
+ fs(("print_function",)))
+ self.assertEqual(run("from __future__ import generators"),
+ fs(("generators",)))
+ self.assertEqual(run("from __future__ import generators, feature"),
+ fs(("generators", "feature")))
+ inp = "from __future__ import generators, print_function"
+ self.assertEqual(run(inp), fs(("generators", "print_function")))
+ inp ="from __future__ import print_function, generators"
+ self.assertEqual(run(inp), fs(("print_function", "generators")))
+ inp = "from __future__ import (print_function,)"
+ self.assertEqual(run(inp), fs(("print_function",)))
+ inp = "from __future__ import (generators, print_function)"
+ self.assertEqual(run(inp), fs(("generators", "print_function")))
+ inp = "from __future__ import (generators, nested_scopes)"
+ self.assertEqual(run(inp), fs(("generators", "nested_scopes")))
+ inp = """from __future__ import generators
from __future__ import print_function"""
- self.assertTrue(run(input))
- self.assertFalse(run("from"))
- self.assertFalse(run("from 4"))
- self.assertFalse(run("from x"))
- self.assertFalse(run("from x 5"))
- self.assertFalse(run("from x im"))
- self.assertFalse(run("from x import"))
- self.assertFalse(run("from x import 4"))
- input = "'docstring'\nfrom __future__ import print_function"
- self.assertTrue(run(input))
- input = "'docstring'\n'somng'\nfrom __future__ import print_function"
- self.assertFalse(run(input))
- input = "# comment\nfrom __future__ import print_function"
- self.assertTrue(run(input))
- input = "# comment\n'doc'\nfrom __future__ import print_function"
- self.assertTrue(run(input))
- input = "class x: pass\nfrom __future__ import print_function"
- self.assertFalse(run(input))
+ self.assertEqual(run(inp), fs(("generators", "print_function")))
+ invalid = ("from",
+ "from 4",
+ "from x",
+ "from x 5",
+ "from x im",
+ "from x import",
+ "from x import 4",
+ )
+ for inp in invalid:
+ self.assertEqual(run(inp), empty)
+ inp = "'docstring'\nfrom __future__ import print_function"
+ self.assertEqual(run(inp), fs(("print_function",)))
+ inp = "'docstring'\n'somng'\nfrom __future__ import print_function"
+ self.assertEqual(run(inp), empty)
+ inp = "# comment\nfrom __future__ import print_function"
+ self.assertEqual(run(inp), fs(("print_function",)))
+ inp = "# comment\n'doc'\nfrom __future__ import print_function"
+ self.assertEqual(run(inp), fs(("print_function",)))
+ inp = "class x: pass\nfrom __future__ import print_function"
+ self.assertEqual(run(inp), empty)
def test_get_headnode_dict(self):
class NoneFix(fixer_base.BaseFix):