From df6dc8f10770f92db68c69d87abe7c89774d128c Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Sun, 15 Jun 2008 02:57:40 +0000 Subject: Merged revisions 64286 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ................ r64286 | benjamin.peterson | 2008-06-14 21:31:05 -0500 (Sat, 14 Jun 2008) | 49 lines Merged revisions 63661,63666,63695,63711,63729,63769,63790,63880,63886 via svnmerge from svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r63661 | georg.brandl | 2008-05-26 05:26:20 -0500 (Mon, 26 May 2008) | 2 lines Add import fixes for dbm package. ........ r63666 | georg.brandl | 2008-05-26 05:49:09 -0500 (Mon, 26 May 2008) | 2 lines Add xmlrpc package fixes. ........ r63695 | georg.brandl | 2008-05-26 10:14:33 -0500 (Mon, 26 May 2008) | 2 lines Add fixer entries for http package. ........ r63711 | benjamin.peterson | 2008-05-26 13:43:51 -0500 (Mon, 26 May 2008) | 2 lines add import mapping for test.test_support -> test.support ........ r63729 | benjamin.peterson | 2008-05-26 16:31:03 -0500 (Mon, 26 May 2008) | 2 lines mapping for commands module -> subprocess ........ r63769 | brett.cannon | 2008-05-29 00:13:13 -0500 (Thu, 29 May 2008) | 1 line Fixer for UserString.UserString over to the collections module. ........ r63790 | brett.cannon | 2008-05-29 14:13:51 -0500 (Thu, 29 May 2008) | 4 lines Add a fixer for UserList. Closes issue #2878. Thanks to Quentin Gallet-Gilles for the patch. ........ r63880 | collin.winter | 2008-06-01 18:09:38 -0500 (Sun, 01 Jun 2008) | 6 lines Move lib2to3/fixes/{basefix,util}.py down to lib2to3/. This is step 1 of turning lib2to3/ into a general-purpose refactoring library, reusable by other projects. ........ r63886 | collin.winter | 2008-06-01 22:15:01 -0500 (Sun, 01 Jun 2008) | 5 lines Allow refactoring tools to specify a directory for fixer modules. This is step 2 of turning lib2to3/ into a general-purpose refactoring library, reusable by other projects. Step 1: r63880. ........ ................ --- Lib/lib2to3/fixer_base.py | 188 +++++++++++++++ Lib/lib2to3/fixer_util.py | 366 +++++++++++++++++++++++++++++ Lib/lib2to3/fixes/basefix.py | 188 --------------- Lib/lib2to3/fixes/fix_apply.py | 6 +- Lib/lib2to3/fixes/fix_basestring.py | 6 +- Lib/lib2to3/fixes/fix_buffer.py | 6 +- Lib/lib2to3/fixes/fix_callable.py | 6 +- Lib/lib2to3/fixes/fix_dict.py | 12 +- Lib/lib2to3/fixes/fix_except.py | 6 +- Lib/lib2to3/fixes/fix_exec.py | 6 +- Lib/lib2to3/fixes/fix_execfile.py | 6 +- Lib/lib2to3/fixes/fix_filter.py | 6 +- Lib/lib2to3/fixes/fix_funcattrs.py | 6 +- Lib/lib2to3/fixes/fix_future.py | 6 +- Lib/lib2to3/fixes/fix_has_key.py | 6 +- Lib/lib2to3/fixes/fix_idioms.py | 6 +- Lib/lib2to3/fixes/fix_import.py | 6 +- Lib/lib2to3/fixes/fix_imports.py | 123 +++++++++- Lib/lib2to3/fixes/fix_input.py | 6 +- Lib/lib2to3/fixes/fix_intern.py | 6 +- Lib/lib2to3/fixes/fix_itertools.py | 6 +- Lib/lib2to3/fixes/fix_itertools_imports.py | 6 +- Lib/lib2to3/fixes/fix_long.py | 6 +- Lib/lib2to3/fixes/fix_map.py | 6 +- Lib/lib2to3/fixes/fix_methodattrs.py | 6 +- Lib/lib2to3/fixes/fix_ne.py | 4 +- Lib/lib2to3/fixes/fix_next.py | 6 +- Lib/lib2to3/fixes/fix_nonzero.py | 6 +- Lib/lib2to3/fixes/fix_numliterals.py | 6 +- Lib/lib2to3/fixes/fix_print.py | 6 +- Lib/lib2to3/fixes/fix_raise.py | 6 +- Lib/lib2to3/fixes/fix_raw_input.py | 6 +- Lib/lib2to3/fixes/fix_renames.py | 6 +- Lib/lib2to3/fixes/fix_repr.py | 6 +- Lib/lib2to3/fixes/fix_standarderror.py | 6 +- Lib/lib2to3/fixes/fix_throw.py | 6 +- Lib/lib2to3/fixes/fix_tuple_params.py | 6 +- Lib/lib2to3/fixes/fix_types.py | 6 +- Lib/lib2to3/fixes/fix_unicode.py | 4 +- Lib/lib2to3/fixes/fix_ws_comma.py | 4 +- Lib/lib2to3/fixes/fix_xrange.py | 6 +- Lib/lib2to3/fixes/fix_xreadlines.py | 6 +- Lib/lib2to3/fixes/fix_zip.py | 6 +- Lib/lib2to3/fixes/util.py | 366 ----------------------------- Lib/lib2to3/refactor.py | 28 ++- Lib/lib2to3/tests/test_all_fixers.py | 2 +- Lib/lib2to3/tests/test_fixers.py | 12 +- Lib/lib2to3/tests/test_util.py | 20 +- 48 files changed, 823 insertions(+), 704 deletions(-) create mode 100644 Lib/lib2to3/fixer_base.py create mode 100644 Lib/lib2to3/fixer_util.py delete mode 100644 Lib/lib2to3/fixes/basefix.py delete mode 100644 Lib/lib2to3/fixes/util.py diff --git a/Lib/lib2to3/fixer_base.py b/Lib/lib2to3/fixer_base.py new file mode 100644 index 0000000..682b215 --- /dev/null +++ b/Lib/lib2to3/fixer_base.py @@ -0,0 +1,188 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Base class for fixers (optional, but recommended).""" + +# Python imports +import logging +import itertools + +# Get a usable 'set' constructor +try: + set +except NameError: + from sets import Set as set + +# Local imports +from .patcomp import PatternCompiler +from . import pygram +from .fixer_util import does_tree_import + +class BaseFix(object): + + """Optional base class for fixers. + + The subclass name must be FixFooBar where FooBar is the result of + removing underscores and capitalizing the words of the fix name. + For example, the class name for a fixer named 'has_key' should be + FixHasKey. + """ + + PATTERN = None # Most subclasses should override with a string literal + pattern = None # Compiled pattern, set by compile_pattern() + options = None # Options object passed to initializer + filename = None # The filename (set by set_filename) + logger = None # A logger (set by set_filename) + numbers = itertools.count(1) # For new_name() + used_names = set() # A set of all used NAMEs + order = "post" # Does the fixer prefer pre- or post-order traversal + explicit = False # Is this ignored by refactor.py -f all? + run_order = 5 # Fixers will be sorted by run order before execution + # Lower numbers will be run first. + + # Shortcut for access to Python grammar symbols + syms = pygram.python_symbols + + def __init__(self, options, log): + """Initializer. Subclass may override. + + Args: + options: an optparse.Values instance which can be used + to inspect the command line options. + log: a list to append warnings and other messages to. + """ + self.options = options + self.log = log + self.compile_pattern() + + def compile_pattern(self): + """Compiles self.PATTERN into self.pattern. + + Subclass may override if it doesn't want to use + self.{pattern,PATTERN} in .match(). + """ + if self.PATTERN is not None: + self.pattern = PatternCompiler().compile_pattern(self.PATTERN) + + def set_filename(self, filename): + """Set the filename, and a logger derived from it. + + The main refactoring tool should call this. + """ + self.filename = filename + self.logger = logging.getLogger(filename) + + def match(self, node): + """Returns match for a given parse tree node. + + Should return a true or false object (not necessarily a bool). + It may return a non-empty dict of matching sub-nodes as + returned by a matching pattern. + + Subclass may override. + """ + results = {"node": node} + return self.pattern.match(node, results) and results + + def transform(self, node, results): + """Returns the transformation for a given parse tree node. + + Args: + node: the root of the parse tree that matched the fixer. + results: a dict mapping symbolic names to part of the match. + + Returns: + None, or a node that is a modified copy of the + argument node. The node argument may also be modified in-place to + effect the same change. + + Subclass *must* override. + """ + raise NotImplementedError() + + def parenthesize(self, node): + """Wrapper around pygram.parenthesize().""" + return pygram.parenthesize(node) + + def new_name(self, template="xxx_todo_changeme"): + """Return a string suitable for use as an identifier + + The new name is guaranteed not to conflict with other identifiers. + """ + name = template + while name in self.used_names: + name = template + str(next(self.numbers)) + self.used_names.add(name) + return name + + def log_message(self, message): + if self.first_log: + self.first_log = False + self.log.append("### In file %s ###" % self.filename) + self.log.append(message) + + def cannot_convert(self, node, reason=None): + """Warn the user that a given chunk of code is not valid Python 3, + but that it cannot be converted automatically. + + First argument is the top-level node for the code in question. + Optional second argument is why it can't be converted. + """ + lineno = node.get_lineno() + for_output = node.clone() + for_output.set_prefix("") + msg = "Line %d: could not convert: %s" + self.log_message(msg % (lineno, for_output)) + if reason: + self.log_message(reason) + + def warning(self, node, reason): + """Used for warning the user about possible uncertainty in the + translation. + + First argument is the top-level node for the code in question. + Optional second argument is why it can't be converted. + """ + lineno = node.get_lineno() + self.log_message("Line %d: %s" % (lineno, reason)) + + def start_tree(self, tree, filename): + """Some fixers need to maintain tree-wide state. + This method is called once, at the start of tree fix-up. + + tree - the root node of the tree to be processed. + filename - the name of the file the tree came from. + """ + self.used_names = tree.used_names + self.set_filename(filename) + self.numbers = itertools.count(1) + self.first_log = True + + def finish_tree(self, tree, filename): + """Some fixers need to maintain tree-wide state. + This method is called once, at the conclusion of tree fix-up. + + tree - the root node of the tree to be processed. + filename - the name of the file the tree came from. + """ + pass + + +class ConditionalFix(BaseFix): + """ Base class for fixers which not execute if an import is found. """ + + # This is the name of the import which, if found, will cause the test to be skipped + skip_on = None + + def start_tree(self, *args): + super(ConditionalFix, self).start_tree(*args) + self._should_skip = None + + def should_skip(self, node): + if self._should_skip is not None: + return self._should_skip + pkg = self.skip_on.split(".") + name = pkg[-1] + pkg = ".".join(pkg[:-1]) + self._should_skip = does_tree_import(pkg, name, node) + return self._should_skip diff --git a/Lib/lib2to3/fixer_util.py b/Lib/lib2to3/fixer_util.py new file mode 100644 index 0000000..4eeb868 --- /dev/null +++ b/Lib/lib2to3/fixer_util.py @@ -0,0 +1,366 @@ +"""Utility functions, node construction macros, etc.""" +# Author: Collin Winter + +# Local imports +from .pgen2 import token +from .pytree import Leaf, Node +from .pygram import python_symbols as syms +from . import patcomp + + +########################################################### +### Common node-construction "macros" +########################################################### + +def KeywordArg(keyword, value): + return Node(syms.argument, + [keyword, Leaf(token.EQUAL, '='), value]) + +def LParen(): + return Leaf(token.LPAR, "(") + +def RParen(): + return Leaf(token.RPAR, ")") + +def Assign(target, source): + """Build an assignment statement""" + if not isinstance(target, list): + target = [target] + if not isinstance(source, list): + source.set_prefix(" ") + source = [source] + + return Node(syms.atom, + target + [Leaf(token.EQUAL, "=", prefix=" ")] + source) + +def Name(name, prefix=None): + """Return a NAME leaf""" + return Leaf(token.NAME, name, prefix=prefix) + +def Attr(obj, attr): + """A node tuple for obj.attr""" + return [obj, Node(syms.trailer, [Dot(), attr])] + +def Comma(): + """A comma leaf""" + return Leaf(token.COMMA, ",") + +def Dot(): + """A period (.) leaf""" + return Leaf(token.DOT, ".") + +def ArgList(args, lparen=LParen(), rparen=RParen()): + """A parenthesised argument list, used by Call()""" + return Node(syms.trailer, + [lparen.clone(), + Node(syms.arglist, args), + rparen.clone()]) + +def Call(func_name, args, prefix=None): + """A function call""" + node = Node(syms.power, [func_name, ArgList(args)]) + if prefix is not None: + node.set_prefix(prefix) + return node + +def Newline(): + """A newline literal""" + return Leaf(token.NEWLINE, "\n") + +def BlankLine(): + """A blank line""" + return Leaf(token.NEWLINE, "") + +def Number(n, prefix=None): + return Leaf(token.NUMBER, n, prefix=prefix) + +def Subscript(index_node): + """A numeric or string subscript""" + return Node(syms.trailer, [Leaf(token.LBRACE, '['), + index_node, + Leaf(token.RBRACE, ']')]) + +def String(string, prefix=None): + """A string leaf""" + return Leaf(token.STRING, string, prefix=prefix) + +def ListComp(xp, fp, it, test=None): + """A list comprehension of the form [xp for fp in it if test]. + + If test is None, the "if test" part is omitted. + """ + xp.set_prefix("") + fp.set_prefix(" ") + it.set_prefix(" ") + for_leaf = Leaf(token.NAME, "for") + for_leaf.set_prefix(" ") + in_leaf = Leaf(token.NAME, "in") + in_leaf.set_prefix(" ") + inner_args = [for_leaf, fp, in_leaf, it] + if test: + test.set_prefix(" ") + if_leaf = Leaf(token.NAME, "if") + if_leaf.set_prefix(" ") + inner_args.append(Node(syms.comp_if, [if_leaf, test])) + inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)]) + return Node(syms.atom, + [Leaf(token.LBRACE, "["), + inner, + Leaf(token.RBRACE, "]")]) + +def FromImport(package_name, name_leafs): + """ Return an import statement in the form: + from package import name_leafs""" + # XXX: May not handle dotted imports properly (eg, package_name='foo.bar') + assert package_name == '.' or '.' not in package.name, "FromImport has "\ + "not been tested with dotted package names -- use at your own "\ + "peril!" + + for leaf in name_leafs: + # Pull the leaves out of their old tree + leaf.remove() + + children = [Leaf(token.NAME, 'from'), + Leaf(token.NAME, package_name, prefix=" "), + Leaf(token.NAME, 'import', prefix=" "), + Node(syms.import_as_names, name_leafs)] + imp = Node(syms.import_from, children) + return imp + + +########################################################### +### Determine whether a node represents a given literal +########################################################### + +def is_tuple(node): + """Does the node represent a tuple literal?""" + if isinstance(node, Node) and node.children == [LParen(), RParen()]: + return True + return (isinstance(node, Node) + and len(node.children) == 3 + and isinstance(node.children[0], Leaf) + and isinstance(node.children[1], Node) + and isinstance(node.children[2], Leaf) + and node.children[0].value == "(" + and node.children[2].value == ")") + +def is_list(node): + """Does the node represent a list literal?""" + return (isinstance(node, Node) + and len(node.children) > 1 + and isinstance(node.children[0], Leaf) + and isinstance(node.children[-1], Leaf) + and node.children[0].value == "[" + and node.children[-1].value == "]") + +########################################################### +### Common portability code. This allows fixers to do, eg, +### "from .util import set" and forget about it. +########################################################### + +try: + any = any +except NameError: + def any(l): + for o in l: + if o: + return True + return False + +try: + set = set +except NameError: + from sets import Set as set + +try: + reversed = reversed +except NameError: + def reversed(l): + return l[::-1] + +########################################################### +### Misc +########################################################### + + +consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum", + "min", "max"]) + +def attr_chain(obj, attr): + """Follow an attribute chain. + + If you have a chain of objects where a.foo -> b, b.foo-> c, etc, + use this to iterate over all objects in the chain. Iteration is + terminated by getattr(x, attr) is None. + + Args: + obj: the starting object + attr: the name of the chaining attribute + + Yields: + Each successive object in the chain. + """ + next = getattr(obj, attr) + while next: + yield next + next = getattr(next, attr) + +p0 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + """ +p1 = """ +power< + ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | + 'any' | 'all' | (any* trailer< '.' 'join' >) ) + trailer< '(' node=any ')' > + any* +> +""" +p2 = """ +power< + 'sorted' + trailer< '(' arglist ')' > + any* +> +""" +pats_built = False +def in_special_context(node): + """ Returns true if node is in an environment where all that is required + of it is being itterable (ie, it doesn't matter if it returns a list + or an itterator). + See test_map_nochange in test_fixers.py for some examples and tests. + """ + global p0, p1, p2, pats_built + if not pats_built: + p1 = patcomp.compile_pattern(p1) + p0 = patcomp.compile_pattern(p0) + p2 = patcomp.compile_pattern(p2) + pats_built = True + patterns = [p0, p1, p2] + for pattern, parent in zip(patterns, attr_chain(node, "parent")): + results = {} + if pattern.match(parent, results) and results["node"] is node: + return True + return False + +########################################################### +### The following functions are to find bindings in a suite +########################################################### + +def make_suite(node): + if node.type == syms.suite: + return node + node = node.clone() + parent, node.parent = node.parent, None + suite = Node(syms.suite, [node]) + suite.parent = parent + return suite + +def does_tree_import(package, name, node): + """ Returns true if name is imported from package at the + top level of the tree which node belongs to. + To cover the case of an import like 'import foo', use + Null for the package and 'foo' for the name. """ + # Scamper up to the top level namespace + while node.type != syms.file_input: + assert node.parent, "Tree is insane! root found before "\ + "file_input node was found." + node = node.parent + + binding = find_binding(name, node, package) + return bool(binding) + +_def_syms = set([syms.classdef, syms.funcdef]) +def find_binding(name, node, package=None): + """ Returns the node which binds variable name, otherwise None. + If optional argument package is supplied, only imports will + be returned. + See test cases for examples.""" + for child in node.children: + ret = None + if child.type == syms.for_stmt: + if _find(name, child.children[1]): + return child + n = find_binding(name, make_suite(child.children[-1]), package) + if n: ret = n + elif child.type in (syms.if_stmt, syms.while_stmt): + n = find_binding(name, make_suite(child.children[-1]), package) + if n: ret = n + elif child.type == syms.try_stmt: + n = find_binding(name, make_suite(child.children[2]), package) + if n: + ret = n + else: + for i, kid in enumerate(child.children[3:]): + if kid.type == token.COLON and kid.value == ":": + # i+3 is the colon, i+4 is the suite + n = find_binding(name, make_suite(child.children[i+4]), package) + if n: ret = n + elif child.type in _def_syms and child.children[1].value == name: + ret = child + elif _is_import_binding(child, name, package): + ret = child + elif child.type == syms.simple_stmt: + ret = find_binding(name, child, package) + elif child.type == syms.expr_stmt: + if _find(name, child.children[0]): + ret = child + + if ret: + if not package: + return ret + if ret.type in (syms.import_name, syms.import_from): + return ret + return None + +_block_syms = set([syms.funcdef, syms.classdef, syms.trailer]) +def _find(name, node): + nodes = [node] + while nodes: + node = nodes.pop() + if node.type > 256 and node.type not in _block_syms: + nodes.extend(node.children) + elif node.type == token.NAME and node.value == name: + return node + return None + +def _is_import_binding(node, name, package=None): + """ Will reuturn node if node will import name, or node + will import * from package. None is returned otherwise. + See test cases for examples. """ + + if node.type == syms.import_name and not package: + imp = node.children[1] + if imp.type == syms.dotted_as_names: + for child in imp.children: + if child.type == syms.dotted_as_name: + if child.children[2].value == name: + return node + elif child.type == token.NAME and child.value == name: + return node + elif imp.type == syms.dotted_as_name: + last = imp.children[-1] + if last.type == token.NAME and last.value == name: + return node + elif imp.type == token.NAME and imp.value == name: + return node + elif node.type == syms.import_from: + # str(...) is used to make life easier here, because + # from a.b import parses to ['import', ['a', '.', 'b'], ...] + if package and str(node.children[1]).strip() != package: + return None + n = node.children[3] + if package and _find('as', n): + # See test_from_import_as for explanation + return None + elif n.type == syms.import_as_names and _find(name, n): + return node + elif n.type == syms.import_as_name: + child = n.children[2] + if child.type == token.NAME and child.value == name: + return node + elif n.type == token.NAME and n.value == name: + return node + elif package and n.type == token.STAR: + return node + return None diff --git a/Lib/lib2to3/fixes/basefix.py b/Lib/lib2to3/fixes/basefix.py deleted file mode 100644 index 937f8c1..0000000 --- a/Lib/lib2to3/fixes/basefix.py +++ /dev/null @@ -1,188 +0,0 @@ -# Copyright 2006 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Base class for fixers (optional, but recommended).""" - -# Python imports -import logging -import itertools - -# Get a usable 'set' constructor -try: - set -except NameError: - from sets import Set as set - -# Local imports -from ..patcomp import PatternCompiler -from .. import pygram -from .util import does_tree_import - -class BaseFix(object): - - """Optional base class for fixers. - - The subclass name must be FixFooBar where FooBar is the result of - removing underscores and capitalizing the words of the fix name. - For example, the class name for a fixer named 'has_key' should be - FixHasKey. - """ - - PATTERN = None # Most subclasses should override with a string literal - pattern = None # Compiled pattern, set by compile_pattern() - options = None # Options object passed to initializer - filename = None # The filename (set by set_filename) - logger = None # A logger (set by set_filename) - numbers = itertools.count(1) # For new_name() - used_names = set() # A set of all used NAMEs - order = "post" # Does the fixer prefer pre- or post-order traversal - explicit = False # Is this ignored by refactor.py -f all? - run_order = 5 # Fixers will be sorted by run order before execution - # Lower numbers will be run first. - - # Shortcut for access to Python grammar symbols - syms = pygram.python_symbols - - def __init__(self, options, log): - """Initializer. Subclass may override. - - Args: - options: an optparse.Values instance which can be used - to inspect the command line options. - log: a list to append warnings and other messages to. - """ - self.options = options - self.log = log - self.compile_pattern() - - def compile_pattern(self): - """Compiles self.PATTERN into self.pattern. - - Subclass may override if it doesn't want to use - self.{pattern,PATTERN} in .match(). - """ - if self.PATTERN is not None: - self.pattern = PatternCompiler().compile_pattern(self.PATTERN) - - def set_filename(self, filename): - """Set the filename, and a logger derived from it. - - The main refactoring tool should call this. - """ - self.filename = filename - self.logger = logging.getLogger(filename) - - def match(self, node): - """Returns match for a given parse tree node. - - Should return a true or false object (not necessarily a bool). - It may return a non-empty dict of matching sub-nodes as - returned by a matching pattern. - - Subclass may override. - """ - results = {"node": node} - return self.pattern.match(node, results) and results - - def transform(self, node, results): - """Returns the transformation for a given parse tree node. - - Args: - node: the root of the parse tree that matched the fixer. - results: a dict mapping symbolic names to part of the match. - - Returns: - None, or a node that is a modified copy of the - argument node. The node argument may also be modified in-place to - effect the same change. - - Subclass *must* override. - """ - raise NotImplementedError() - - def parenthesize(self, node): - """Wrapper around pygram.parenthesize().""" - return pygram.parenthesize(node) - - def new_name(self, template="xxx_todo_changeme"): - """Return a string suitable for use as an identifier - - The new name is guaranteed not to conflict with other identifiers. - """ - name = template - while name in self.used_names: - name = template + str(next(self.numbers)) - self.used_names.add(name) - return name - - def log_message(self, message): - if self.first_log: - self.first_log = False - self.log.append("### In file %s ###" % self.filename) - self.log.append(message) - - def cannot_convert(self, node, reason=None): - """Warn the user that a given chunk of code is not valid Python 3, - but that it cannot be converted automatically. - - First argument is the top-level node for the code in question. - Optional second argument is why it can't be converted. - """ - lineno = node.get_lineno() - for_output = node.clone() - for_output.set_prefix("") - msg = "Line %d: could not convert: %s" - self.log_message(msg % (lineno, for_output)) - if reason: - self.log_message(reason) - - def warning(self, node, reason): - """Used for warning the user about possible uncertainty in the - translation. - - First argument is the top-level node for the code in question. - Optional second argument is why it can't be converted. - """ - lineno = node.get_lineno() - self.log_message("Line %d: %s" % (lineno, reason)) - - def start_tree(self, tree, filename): - """Some fixers need to maintain tree-wide state. - This method is called once, at the start of tree fix-up. - - tree - the root node of the tree to be processed. - filename - the name of the file the tree came from. - """ - self.used_names = tree.used_names - self.set_filename(filename) - self.numbers = itertools.count(1) - self.first_log = True - - def finish_tree(self, tree, filename): - """Some fixers need to maintain tree-wide state. - This method is called once, at the conclusion of tree fix-up. - - tree - the root node of the tree to be processed. - filename - the name of the file the tree came from. - """ - pass - - -class ConditionalFix(BaseFix): - """ Base class for fixers which not execute if an import is found. """ - - # This is the name of the import which, if found, will cause the test to be skipped - skip_on = None - - def start_tree(self, *args): - super(ConditionalFix, self).start_tree(*args) - self._should_skip = None - - def should_skip(self, node): - if self._should_skip is not None: - return self._should_skip - pkg = self.skip_on.split(".") - name = pkg[-1] - pkg = ".".join(pkg[:-1]) - self._should_skip = does_tree_import(pkg, name, node) - return self._should_skip diff --git a/Lib/lib2to3/fixes/fix_apply.py b/Lib/lib2to3/fixes/fix_apply.py index f233224..faede68 100644 --- a/Lib/lib2to3/fixes/fix_apply.py +++ b/Lib/lib2to3/fixes/fix_apply.py @@ -8,10 +8,10 @@ This converts apply(func, v, k) into (func)(*v, **k).""" # Local imports from .. import pytree from ..pgen2 import token -from . import basefix -from .util import Call, Comma +from .. import fixer_base +from ..fixer_util import Call, Comma -class FixApply(basefix.BaseFix): +class FixApply(fixer_base.BaseFix): PATTERN = """ power< 'apply' diff --git a/Lib/lib2to3/fixes/fix_basestring.py b/Lib/lib2to3/fixes/fix_basestring.py index 6d753d8..5d84cc7 100644 --- a/Lib/lib2to3/fixes/fix_basestring.py +++ b/Lib/lib2to3/fixes/fix_basestring.py @@ -2,10 +2,10 @@ # Author: Christian Heimes # Local imports -from . import basefix -from .util import Name +from .. import fixer_base +from ..fixer_util import Name -class FixBasestring(basefix.BaseFix): +class FixBasestring(fixer_base.BaseFix): PATTERN = "'basestring'" diff --git a/Lib/lib2to3/fixes/fix_buffer.py b/Lib/lib2to3/fixes/fix_buffer.py index 13168d6..2f6822b 100644 --- a/Lib/lib2to3/fixes/fix_buffer.py +++ b/Lib/lib2to3/fixes/fix_buffer.py @@ -4,11 +4,11 @@ """Fixer that changes buffer(...) into memoryview(...).""" # Local imports -from . import basefix -from .util import Name +from .. import fixer_base +from ..fixer_util import Name -class FixBuffer(basefix.BaseFix): +class FixBuffer(fixer_base.BaseFix): explicit = True # The user must ask for this fixer diff --git a/Lib/lib2to3/fixes/fix_callable.py b/Lib/lib2to3/fixes/fix_callable.py index 90b3515..de93792 100644 --- a/Lib/lib2to3/fixes/fix_callable.py +++ b/Lib/lib2to3/fixes/fix_callable.py @@ -7,10 +7,10 @@ This converts callable(obj) into hasattr(obj, '__call__').""" # Local imports from .. import pytree -from . import basefix -from .util import Call, Name, String +from .. import fixer_base +from ..fixer_util import Call, Name, String -class FixCallable(basefix.BaseFix): +class FixCallable(fixer_base.BaseFix): # Ignore callable(*args) or use of keywords. # Either could be a hint that the builtin callable() is not being used. diff --git a/Lib/lib2to3/fixes/fix_dict.py b/Lib/lib2to3/fixes/fix_dict.py index c14a819..485a08c 100644 --- a/Lib/lib2to3/fixes/fix_dict.py +++ b/Lib/lib2to3/fixes/fix_dict.py @@ -27,15 +27,15 @@ as an argument to a function that introspects the argument). from .. import pytree from .. import patcomp from ..pgen2 import token -from . import basefix -from .util import Name, Call, LParen, RParen, ArgList, Dot, set -from . import util +from .. import fixer_base +from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot, set +from .. import fixer_util -iter_exempt = util.consuming_calls | set(["iter"]) +iter_exempt = fixer_util.consuming_calls | set(["iter"]) -class FixDict(basefix.BaseFix): +class FixDict(fixer_base.BaseFix): PATTERN = """ power< head=any+ trailer< '.' method=('keys'|'items'|'values'| @@ -92,7 +92,7 @@ class FixDict(basefix.BaseFix): return results["func"].value in iter_exempt else: # list(d.keys()) -> list(d.keys()), etc. - return results["func"].value in util.consuming_calls + return results["func"].value in fixer_util.consuming_calls if not isiter: return False # for ... in d.iterkeys() -> for ... in d.keys(), etc. diff --git a/Lib/lib2to3/fixes/fix_except.py b/Lib/lib2to3/fixes/fix_except.py index 5d6d153..8387913 100644 --- a/Lib/lib2to3/fixes/fix_except.py +++ b/Lib/lib2to3/fixes/fix_except.py @@ -24,8 +24,8 @@ The following cases will be converted: # Local imports from .. import pytree from ..pgen2 import token -from . import basefix -from .util import Assign, Attr, Name, is_tuple, is_list, reversed +from .. import fixer_base +from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, reversed def find_excepts(nodes): for i, n in enumerate(nodes): @@ -33,7 +33,7 @@ def find_excepts(nodes): if n.children[0].value == 'except': yield (n, nodes[i+2]) -class FixExcept(basefix.BaseFix): +class FixExcept(fixer_base.BaseFix): PATTERN = """ try_stmt< 'try' ':' suite diff --git a/Lib/lib2to3/fixes/fix_exec.py b/Lib/lib2to3/fixes/fix_exec.py index 2e45cb6..9b47aec 100644 --- a/Lib/lib2to3/fixes/fix_exec.py +++ b/Lib/lib2to3/fixes/fix_exec.py @@ -11,11 +11,11 @@ exec code in ns1, ns2 -> exec(code, ns1, ns2) # Local imports from .. import pytree -from . import basefix -from .util import Comma, Name, Call +from .. import fixer_base +from ..fixer_util import Comma, Name, Call -class FixExec(basefix.BaseFix): +class FixExec(fixer_base.BaseFix): PATTERN = """ exec_stmt< 'exec' a=any 'in' b=any [',' c=any] > diff --git a/Lib/lib2to3/fixes/fix_execfile.py b/Lib/lib2to3/fixes/fix_execfile.py index 0e67f09..5854900 100644 --- a/Lib/lib2to3/fixes/fix_execfile.py +++ b/Lib/lib2to3/fixes/fix_execfile.py @@ -8,11 +8,11 @@ exec() function. """ from .. import pytree -from . import basefix -from .util import Comma, Name, Call, LParen, RParen, Dot +from .. import fixer_base +from ..fixer_util import Comma, Name, Call, LParen, RParen, Dot -class FixExecfile(basefix.BaseFix): +class FixExecfile(fixer_base.BaseFix): PATTERN = """ power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > > diff --git a/Lib/lib2to3/fixes/fix_filter.py b/Lib/lib2to3/fixes/fix_filter.py index 08d3495..51fd02a 100644 --- a/Lib/lib2to3/fixes/fix_filter.py +++ b/Lib/lib2to3/fixes/fix_filter.py @@ -15,10 +15,10 @@ Python 2.6 figure it out. # Local imports from ..pgen2 import token -from . import basefix -from .util import Name, Call, ListComp, in_special_context +from .. import fixer_base +from ..fixer_util import Name, Call, ListComp, in_special_context -class FixFilter(basefix.ConditionalFix): +class FixFilter(fixer_base.ConditionalFix): PATTERN = """ filter_lambda=power< diff --git a/Lib/lib2to3/fixes/fix_funcattrs.py b/Lib/lib2to3/fixes/fix_funcattrs.py index a9ba125..4234993 100644 --- a/Lib/lib2to3/fixes/fix_funcattrs.py +++ b/Lib/lib2to3/fixes/fix_funcattrs.py @@ -2,11 +2,11 @@ # Author: Collin Winter # Local imports -from . import basefix -from .util import Name +from .. import fixer_base +from ..fixer_util import Name -class FixFuncattrs(basefix.BaseFix): +class FixFuncattrs(fixer_base.BaseFix): PATTERN = """ power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' | 'func_name' | 'func_defaults' | 'func_code' diff --git a/Lib/lib2to3/fixes/fix_future.py b/Lib/lib2to3/fixes/fix_future.py index 62ba6f6..a032e88 100644 --- a/Lib/lib2to3/fixes/fix_future.py +++ b/Lib/lib2to3/fixes/fix_future.py @@ -5,10 +5,10 @@ from __future__ import foo is replaced with an empty line. # Author: Christian Heimes # Local imports -from . import basefix -from .util import BlankLine +from .. import fixer_base +from ..fixer_util import BlankLine -class FixFuture(basefix.BaseFix): +class FixFuture(fixer_base.BaseFix): PATTERN = """import_from< 'from' module_name="__future__" 'import' any >""" # This should be run last -- some things check for the import diff --git a/Lib/lib2to3/fixes/fix_has_key.py b/Lib/lib2to3/fixes/fix_has_key.py index 77684fb..fb7b07b 100644 --- a/Lib/lib2to3/fixes/fix_has_key.py +++ b/Lib/lib2to3/fixes/fix_has_key.py @@ -32,11 +32,11 @@ CAVEATS: # Local imports from .. import pytree from ..pgen2 import token -from . import basefix -from .util import Name +from .. import fixer_base +from ..fixer_util import Name -class FixHasKey(basefix.BaseFix): +class FixHasKey(fixer_base.BaseFix): PATTERN = """ anchor=power< diff --git a/Lib/lib2to3/fixes/fix_idioms.py b/Lib/lib2to3/fixes/fix_idioms.py index 2ca2a83..8bc6397 100644 --- a/Lib/lib2to3/fixes/fix_idioms.py +++ b/Lib/lib2to3/fixes/fix_idioms.py @@ -28,13 +28,13 @@ into # Author: Jacques Frechet, Collin Winter # Local imports -from . import basefix -from .util import Call, Comma, Name, Node, syms +from .. import fixer_base +from ..fixer_util import Call, Comma, Name, Node, syms CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)" TYPE = "power< 'type' trailer< '(' x=any ')' > >" -class FixIdioms(basefix.BaseFix): +class FixIdioms(fixer_base.BaseFix): explicit = True # The user must ask for this fixer diff --git a/Lib/lib2to3/fixes/fix_import.py b/Lib/lib2to3/fixes/fix_import.py index c188fce..64397e4 100644 --- a/Lib/lib2to3/fixes/fix_import.py +++ b/Lib/lib2to3/fixes/fix_import.py @@ -11,11 +11,11 @@ Becomes: """ # Local imports -from . import basefix +from .. import fixer_base from os.path import dirname, join, exists, pathsep -from .util import FromImport +from ..fixer_util import FromImport -class FixImport(basefix.BaseFix): +class FixImport(fixer_base.BaseFix): PATTERN = """ import_from< type='from' imp=any 'import' any > diff --git a/Lib/lib2to3/fixes/fix_imports.py b/Lib/lib2to3/fixes/fix_imports.py index 11512e1..03a27bc 100644 --- a/Lib/lib2to3/fixes/fix_imports.py +++ b/Lib/lib2to3/fixes/fix_imports.py @@ -8,8 +8,8 @@ Fixes: # Author: Collin Winter # Local imports -from . import basefix -from .util import Name, attr_chain, any, set +from .. import fixer_base +from ..fixer_util import Name, attr_chain, any, set import builtins builtin_names = [name for name in dir(builtins) if name not in ("__name__", "__doc__")] @@ -150,6 +150,123 @@ MAPPING = {"StringIO": ("io", ["StringIO"]), 'error', 'exit', 'exit_thread', 'get_ident', 'interrupt_main', 'stack_size', 'start_new', 'start_new_thread']), + 'whichdb': ('dbm', ['whichdb']), + 'anydbm': ('dbm', ['error', 'open']), + 'dbhash': ('dbm.bsd', ['error', 'open']), + 'dumbdbm': ('dbm.dumb', ['error', 'open', '_Database']), + 'dbm': ('dbm.ndbm', ['error', 'open', 'library']), + 'gdbm': ('dbm.gnu', ['error', 'open', 'open_flags']), + 'xmlrpclib': ('xmlrpc.client', + ['Error', 'ProtocolError', 'ResponseError', 'Fault', + 'ServerProxy', 'Boolean', 'DateTime', 'Binary', + 'ExpatParser', 'FastMarshaller', 'FastParser', + 'FastUnmarshaller', 'MultiCall', 'MultiCallIterator', + 'SlowParser', 'Marshaller', 'Unmarshaller', 'Server', + 'Transport', 'SafeTransport', 'SgmlopParser', + 'boolean', 'getparser', 'dumps', 'loads', 'escape', + 'PARSE_ERROR', 'SERVER_ERROR', 'WRAPPERS', + 'APPLICATION_ERROR', 'SYSTEM_ERROR', + 'TRANSPORT_ERROR', 'NOT_WELLFORMED_ERROR', + 'UNSUPPORTED_ENCODING', 'INVALID_ENCODING_CHAR', + 'INVALID_XMLRPC', 'METHOD_NOT_FOUND', + 'INVALID_METHOD_PARAMS', 'INTERNAL_ERROR', + 'MININT', 'MAXINT']), + 'DocXMLRPCServer': ('xmlrpc.server', + ['CGIXMLRPCRequestHandler', + 'DocCGIXMLRPCRequestHandler', + 'DocXMLRPCRequestHandler', 'DocXMLRPCServer', + 'ServerHTMLDoc', 'SimpleXMLRPCRequestHandler', + 'SimpleXMLRPCServer', 'XMLRPCDocGenerator', + 'resolve_dotted_attribute']), + 'SimpleXMLRPCServer': ('xmlrpc.server', + ['CGIXMLRPCRequestHandler', + 'Fault', 'SimpleXMLRPCDispatcher', + 'SimpleXMLRPCRequestHandler', + 'SimpleXMLRPCServer', 'SocketServer', + 'list_public_methods', + 'remove_duplicates', + 'resolve_dotted_attribute']), + 'httplib': ('http.client', + ['ACCEPTED', 'BAD_GATEWAY', 'BAD_REQUEST', + 'BadStatusLine', 'CONFLICT', 'CONTINUE', 'CREATED', + 'CannotSendHeader', 'CannotSendRequest', + 'EXPECTATION_FAILED', 'FAILED_DEPENDENCY', 'FORBIDDEN', + 'FOUND', 'FakeSocket', 'GATEWAY_TIMEOUT', 'GONE', + 'HTTP', 'HTTPConnection', 'HTTPException', + 'HTTPMessage', 'HTTPResponse', 'HTTPS', + 'HTTPSConnection', 'HTTPS_PORT', 'HTTP_PORT', + 'HTTP_VERSION_NOT_SUPPORTED', 'IM_USED', + 'INSUFFICIENT_STORAGE', 'INTERNAL_SERVER_ERROR', + 'ImproperConnectionState', 'IncompleteRead', + 'InvalidURL', 'LENGTH_REQUIRED', 'LOCKED', + 'LineAndFileWrapper', 'MAXAMOUNT', 'METHOD_NOT_ALLOWED', + 'MOVED_PERMANENTLY', 'MULTIPLE_CHOICES', 'MULTI_STATUS', + 'NON_AUTHORITATIVE_INFORMATION', 'NOT_ACCEPTABLE', + 'NOT_EXTENDED', 'NOT_FOUND', 'NOT_IMPLEMENTED', + 'NOT_MODIFIED', 'NO_CONTENT', 'NotConnected', 'OK', + 'PARTIAL_CONTENT', 'PAYMENT_REQUIRED', + 'PRECONDITION_FAILED', 'PROCESSING', + 'PROXY_AUTHENTICATION_REQUIRED', + 'REQUESTED_RANGE_NOT_SATISFIABLE', + 'REQUEST_ENTITY_TOO_LARGE', 'REQUEST_TIMEOUT', + 'REQUEST_URI_TOO_LONG', 'RESET_CONTENT', + 'ResponseNotReady', 'SEE_OTHER', 'SERVICE_UNAVAILABLE', + 'SSLFile', 'SWITCHING_PROTOCOLS', 'SharedSocket', + 'SharedSocketClient', 'StringIO', 'TEMPORARY_REDIRECT', + 'UNAUTHORIZED', 'UNPROCESSABLE_ENTITY', + 'UNSUPPORTED_MEDIA_TYPE', 'UPGRADE_REQUIRED', + 'USE_PROXY', 'UnimplementedFileMode', 'UnknownProtocol', + 'UnknownTransferEncoding', 'error', 'responses']), + 'Cookie': ('http.cookies', + ['BaseCookie', 'Cookie', 'CookieError', 'Morsel', + 'SerialCookie', 'SimpleCookie', 'SmartCookie']), + 'cookielib': ('http.cookiejar', + ['Absent', 'Cookie', 'CookieJar', 'CookiePolicy', + 'DAYS', 'DEFAULT_HTTP_PORT', 'DefaultCookiePolicy', + 'EPOCH_YEAR', 'ESCAPED_CHAR_RE', 'FileCookieJar', + 'HEADER_ESCAPE_RE', 'HEADER_JOIN_ESCAPE_RE', + 'HEADER_QUOTED_VALUE_RE', 'HEADER_TOKEN_RE', + 'HEADER_VALUE_RE', 'HTTP_PATH_SAFE', 'IPV4_RE', + 'ISO_DATE_RE', 'LOOSE_HTTP_DATE_RE', 'LWPCookieJar', + 'LoadError', 'MISSING_FILENAME_TEXT', 'MONTHS', + 'MONTHS_LOWER', 'MozillaCookieJar', 'STRICT_DATE_RE', + 'TIMEZONE_RE', 'UTC_ZONES', 'WEEKDAY_RE', + 'cut_port_re', 'deepvalues', 'domain_match', + 'eff_request_host', 'escape_path', 'http2time', + 'is_HDN', 'is_third_party', 'iso2time', + 'join_header_words', 'liberal_is_HDN', 'logger', + 'lwp_cookie_str', 'month', 'offset_from_tz_string', + 'parse_ns_headers', 'reach', 'request_host', + 'request_path', 'request_port', 'split_header_words', + 'time', 'time2isoz', 'time2netscape', 'unmatched', + 'uppercase_escaped_char', 'urllib', + 'user_domain_match', 'vals_sorted_by_key']), + 'BaseHTTPServer': ('http.server', + ['BaseHTTPRequestHandler', + 'DEFAULT_ERROR_MESSAGE', 'HTTPServer']), + 'SimpleHTTPServer': ('http.server', ['SimpleHTTPRequestHandler']), + 'CGIHTTPServer': ('http.server', + ['CGIHTTPRequestHandler', 'executable', + 'nobody_uid', 'nobody']), + 'test.test_support': ('test.support', + ["Error", "TestFailed", "TestSkipped", "ResourceDenied", + "import_module", "verbose", "use_resources", + "max_memuse", "record_original_stdout", + "get_original_stdout", "unload", "unlink", "rmtree", + "forget", "is_resource_enabled", "requires", + "find_unused_port", "bind_port", + "fcmp", "is_jython", "TESTFN", "HOST", + "FUZZ", "findfile", "verify", "vereq", "sortdict", + "check_syntax_error", "open_urlresource", "WarningMessage", + "catch_warning", "CleanImport", "EnvironmentVarGuard", + "TransientResource", "captured_output", "captured_stdout", + "TransientResource", "transient_internet", "run_with_locale", + "set_memlimit", "bigmemtest", "bigaddrspacetest", + "BasicTestRunner", "run_unittest", "run_doctest", + "threading_setup", "threading_cleanup", "reap_children"]), + 'commands': ('subprocess', ['getstatusoutput', 'getoutput']), + 'UserString' : ('collections', ['UserString']), + 'UserList' : ('collections', ['UserList']), } @@ -180,7 +297,7 @@ def build_pattern(): yield """bare_name=%s""" % alternates(bare) -class FixImports(basefix.BaseFix): +class FixImports(fixer_base.BaseFix): PATTERN = "|".join(build_pattern()) order = "pre" # Pre-order tree traversal diff --git a/Lib/lib2to3/fixes/fix_input.py b/Lib/lib2to3/fixes/fix_input.py index 5b88f3a..e0264cf 100644 --- a/Lib/lib2to3/fixes/fix_input.py +++ b/Lib/lib2to3/fixes/fix_input.py @@ -2,15 +2,15 @@ # Author: Andre Roberge # Local imports -from . import basefix -from .util import Call, Name +from .. import fixer_base +from ..fixer_util import Call, Name from .. import patcomp context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >") -class FixInput(basefix.BaseFix): +class FixInput(fixer_base.BaseFix): PATTERN = """ power< 'input' args=trailer< '(' [any] ')' > > diff --git a/Lib/lib2to3/fixes/fix_intern.py b/Lib/lib2to3/fixes/fix_intern.py index 6d33f8c..921ba59 100644 --- a/Lib/lib2to3/fixes/fix_intern.py +++ b/Lib/lib2to3/fixes/fix_intern.py @@ -7,11 +7,11 @@ intern(s) -> sys.intern(s)""" # Local imports from .. import pytree -from . import basefix -from .util import Name, Attr +from .. import fixer_base +from ..fixer_util import Name, Attr -class FixIntern(basefix.BaseFix): +class FixIntern(fixer_base.BaseFix): PATTERN = """ power< 'intern' diff --git a/Lib/lib2to3/fixes/fix_itertools.py b/Lib/lib2to3/fixes/fix_itertools.py index ba10f26..86d6b46 100644 --- a/Lib/lib2to3/fixes/fix_itertools.py +++ b/Lib/lib2to3/fixes/fix_itertools.py @@ -8,10 +8,10 @@ """ # Local imports -from . import basefix -from .util import Name +from .. import fixer_base +from ..fixer_util import Name -class FixItertools(basefix.BaseFix): +class FixItertools(fixer_base.BaseFix): it_funcs = "('imap'|'ifilter'|'izip'|'ifilterfalse')" PATTERN = """ power< it='itertools' diff --git a/Lib/lib2to3/fixes/fix_itertools_imports.py b/Lib/lib2to3/fixes/fix_itertools_imports.py index e60ec20..ede8ad9 100644 --- a/Lib/lib2to3/fixes/fix_itertools_imports.py +++ b/Lib/lib2to3/fixes/fix_itertools_imports.py @@ -1,10 +1,10 @@ """ Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """ # Local imports -from . import basefix -from .util import BlankLine +from .. import fixer_base +from ..fixer_util import BlankLine -class FixItertoolsImports(basefix.BaseFix): +class FixItertoolsImports(fixer_base.BaseFix): PATTERN = """ import_from< 'from' 'itertools' 'import' imports=any > """ %(locals()) diff --git a/Lib/lib2to3/fixes/fix_long.py b/Lib/lib2to3/fixes/fix_long.py index 1987e96..f67f026 100644 --- a/Lib/lib2to3/fixes/fix_long.py +++ b/Lib/lib2to3/fixes/fix_long.py @@ -8,11 +8,11 @@ This also strips the trailing 'L' or 'l' from long loterals. # Local imports from .. import pytree -from . import basefix -from .util import Name, Number +from .. import fixer_base +from ..fixer_util import Name, Number -class FixLong(basefix.BaseFix): +class FixLong(fixer_base.BaseFix): PATTERN = """ (long_type = 'long' | number = NUMBER) diff --git a/Lib/lib2to3/fixes/fix_map.py b/Lib/lib2to3/fixes/fix_map.py index 7b57c50..29578e2 100644 --- a/Lib/lib2to3/fixes/fix_map.py +++ b/Lib/lib2to3/fixes/fix_map.py @@ -21,11 +21,11 @@ soon as the shortest argument is exhausted. # Local imports from ..pgen2 import token -from . import basefix -from .util import Name, Call, ListComp, in_special_context +from .. import fixer_base +from ..fixer_util import Name, Call, ListComp, in_special_context from ..pygram import python_symbols as syms -class FixMap(basefix.ConditionalFix): +class FixMap(fixer_base.ConditionalFix): PATTERN = """ map_none=power< diff --git a/Lib/lib2to3/fixes/fix_methodattrs.py b/Lib/lib2to3/fixes/fix_methodattrs.py index 3d0d7d7..ae4096c 100644 --- a/Lib/lib2to3/fixes/fix_methodattrs.py +++ b/Lib/lib2to3/fixes/fix_methodattrs.py @@ -3,8 +3,8 @@ # Author: Christian Heimes # Local imports -from . import basefix -from .util import Name +from .. import fixer_base +from ..fixer_util import Name MAP = { "im_func" : "__func__", @@ -12,7 +12,7 @@ MAP = { "im_class" : "__self__.__class__" } -class FixMethodattrs(basefix.BaseFix): +class FixMethodattrs(fixer_base.BaseFix): PATTERN = """ power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* > """ diff --git a/Lib/lib2to3/fixes/fix_ne.py b/Lib/lib2to3/fixes/fix_ne.py index f83ca43..382f43d 100644 --- a/Lib/lib2to3/fixes/fix_ne.py +++ b/Lib/lib2to3/fixes/fix_ne.py @@ -6,10 +6,10 @@ # Local imports from .. import pytree from ..pgen2 import token -from . import basefix +from .. import fixer_base -class FixNe(basefix.BaseFix): +class FixNe(fixer_base.BaseFix): # This is so simple that we don't need the pattern compiler. def match(self, node): diff --git a/Lib/lib2to3/fixes/fix_next.py b/Lib/lib2to3/fixes/fix_next.py index 6b54376..9791333 100644 --- a/Lib/lib2to3/fixes/fix_next.py +++ b/Lib/lib2to3/fixes/fix_next.py @@ -8,13 +8,13 @@ # Local imports from ..pgen2 import token from ..pygram import python_symbols as syms -from . import basefix -from .util import Name, Call, find_binding, any +from .. import fixer_base +from ..fixer_util import Name, Call, find_binding, any bind_warning = "Calls to builtin next() possibly shadowed by global binding" -class FixNext(basefix.BaseFix): +class FixNext(fixer_base.BaseFix): PATTERN = """ power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > | diff --git a/Lib/lib2to3/fixes/fix_nonzero.py b/Lib/lib2to3/fixes/fix_nonzero.py index 4cf6875..abb1f4e 100644 --- a/Lib/lib2to3/fixes/fix_nonzero.py +++ b/Lib/lib2to3/fixes/fix_nonzero.py @@ -2,10 +2,10 @@ # Author: Collin Winter # Local imports -from .import basefix -from .util import Name, syms +from .. import fixer_base +from ..fixer_util import Name, syms -class FixNonzero(basefix.BaseFix): +class FixNonzero(fixer_base.BaseFix): PATTERN = """ classdef< 'class' any+ ':' suite< any* diff --git a/Lib/lib2to3/fixes/fix_numliterals.py b/Lib/lib2to3/fixes/fix_numliterals.py index f88be60..682aac5 100644 --- a/Lib/lib2to3/fixes/fix_numliterals.py +++ b/Lib/lib2to3/fixes/fix_numliterals.py @@ -5,11 +5,11 @@ # Local imports from ..pgen2 import token -from .import basefix -from .util import Number, set +from .. import fixer_base +from ..fixer_util import Number, set -class FixNumliterals(basefix.BaseFix): +class FixNumliterals(fixer_base.BaseFix): # This is so simple that we don't need the pattern compiler. def match(self, node): diff --git a/Lib/lib2to3/fixes/fix_print.py b/Lib/lib2to3/fixes/fix_print.py index ae91a8d..6d01dfd 100644 --- a/Lib/lib2to3/fixes/fix_print.py +++ b/Lib/lib2to3/fixes/fix_print.py @@ -17,8 +17,8 @@ No changes are applied if print_function is imported from __future__ from .. import patcomp from .. import pytree from ..pgen2 import token -from .import basefix -from .util import Name, Call, Comma, String, is_tuple +from .. import fixer_base +from ..fixer_util import Name, Call, Comma, String, is_tuple parend_expr = patcomp.compile_pattern( @@ -26,7 +26,7 @@ parend_expr = patcomp.compile_pattern( ) -class FixPrint(basefix.ConditionalFix): +class FixPrint(fixer_base.ConditionalFix): PATTERN = """ simple_stmt< bare='print' any > | print_stmt diff --git a/Lib/lib2to3/fixes/fix_raise.py b/Lib/lib2to3/fixes/fix_raise.py index dcf4278..be785f5 100644 --- a/Lib/lib2to3/fixes/fix_raise.py +++ b/Lib/lib2to3/fixes/fix_raise.py @@ -24,10 +24,10 @@ CAVEATS: # Local imports from .. import pytree from ..pgen2 import token -from .import basefix -from .util import Name, Call, Attr, ArgList, is_tuple +from .. import fixer_base +from ..fixer_util import Name, Call, Attr, ArgList, is_tuple -class FixRaise(basefix.BaseFix): +class FixRaise(fixer_base.BaseFix): PATTERN = """ raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > diff --git a/Lib/lib2to3/fixes/fix_raw_input.py b/Lib/lib2to3/fixes/fix_raw_input.py index e746255..d9130d0 100644 --- a/Lib/lib2to3/fixes/fix_raw_input.py +++ b/Lib/lib2to3/fixes/fix_raw_input.py @@ -2,10 +2,10 @@ # Author: Andre Roberge # Local imports -from .import basefix -from .util import Name +from .. import fixer_base +from ..fixer_util import Name -class FixRawInput(basefix.BaseFix): +class FixRawInput(fixer_base.BaseFix): PATTERN = """ power< name='raw_input' trailer< '(' [any] ')' > > diff --git a/Lib/lib2to3/fixes/fix_renames.py b/Lib/lib2to3/fixes/fix_renames.py index 0b2bbf5..181c625 100644 --- a/Lib/lib2to3/fixes/fix_renames.py +++ b/Lib/lib2to3/fixes/fix_renames.py @@ -7,8 +7,8 @@ Fixes: # based on Collin Winter's fix_import # Local imports -from .import basefix -from .util import Name, attr_chain, any, set +from .. import fixer_base +from ..fixer_util import Name, attr_chain, any, set MAPPING = {"sys": {"maxint" : "maxsize"}, } @@ -39,7 +39,7 @@ def build_pattern(): #yield """bare_name=%s""" % alternates(bare) -class FixRenames(basefix.BaseFix): +class FixRenames(fixer_base.BaseFix): PATTERN = "|".join(build_pattern()) order = "pre" # Pre-order tree traversal diff --git a/Lib/lib2to3/fixes/fix_repr.py b/Lib/lib2to3/fixes/fix_repr.py index 62491df..99e7722 100644 --- a/Lib/lib2to3/fixes/fix_repr.py +++ b/Lib/lib2to3/fixes/fix_repr.py @@ -4,11 +4,11 @@ """Fixer that transforms `xyzzy` into repr(xyzzy).""" # Local imports -from .import basefix -from .util import Call, Name +from .. import fixer_base +from ..fixer_util import Call, Name -class FixRepr(basefix.BaseFix): +class FixRepr(fixer_base.BaseFix): PATTERN = """ atom < '`' expr=any '`' > diff --git a/Lib/lib2to3/fixes/fix_standarderror.py b/Lib/lib2to3/fixes/fix_standarderror.py index 61789d0..4f87014 100644 --- a/Lib/lib2to3/fixes/fix_standarderror.py +++ b/Lib/lib2to3/fixes/fix_standarderror.py @@ -4,11 +4,11 @@ """Fixer for StandardError -> Exception.""" # Local imports -from .import basefix -from .util import Name +from .. import fixer_base +from ..fixer_util import Name -class FixStandarderror(basefix.BaseFix): +class FixStandarderror(fixer_base.BaseFix): PATTERN = """ 'StandardError' diff --git a/Lib/lib2to3/fixes/fix_throw.py b/Lib/lib2to3/fixes/fix_throw.py index ef120bd..bf86d84 100644 --- a/Lib/lib2to3/fixes/fix_throw.py +++ b/Lib/lib2to3/fixes/fix_throw.py @@ -10,10 +10,10 @@ g.throw("foo"[, V[, T]]) will warn about string exceptions.""" # Local imports from .. import pytree from ..pgen2 import token -from .import basefix -from .util import Name, Call, ArgList, Attr, is_tuple +from .. import fixer_base +from ..fixer_util import Name, Call, ArgList, Attr, is_tuple -class FixThrow(basefix.BaseFix): +class FixThrow(fixer_base.BaseFix): PATTERN = """ power< any trailer< '.' 'throw' > diff --git a/Lib/lib2to3/fixes/fix_tuple_params.py b/Lib/lib2to3/fixes/fix_tuple_params.py index 5542ff5..fb29f5c 100644 --- a/Lib/lib2to3/fixes/fix_tuple_params.py +++ b/Lib/lib2to3/fixes/fix_tuple_params.py @@ -21,14 +21,14 @@ It will also support lambdas: # Local imports from .. import pytree from ..pgen2 import token -from .import basefix -from .util import Assign, Name, Newline, Number, Subscript, syms +from .. import fixer_base +from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms def is_docstring(stmt): return isinstance(stmt, pytree.Node) and \ stmt.children[0].type == token.STRING -class FixTupleParams(basefix.BaseFix): +class FixTupleParams(fixer_base.BaseFix): PATTERN = """ funcdef< 'def' any parameters< '(' args=any ')' > ['->' any] ':' suite=any+ > diff --git a/Lib/lib2to3/fixes/fix_types.py b/Lib/lib2to3/fixes/fix_types.py index fe7880a..445f1b2 100644 --- a/Lib/lib2to3/fixes/fix_types.py +++ b/Lib/lib2to3/fixes/fix_types.py @@ -21,8 +21,8 @@ There should be another fixer that handles at least the following constants: # Local imports from ..pgen2 import token -from .import basefix -from .util import Name +from .. import fixer_base +from ..fixer_util import Name _TYPE_MAPPING = { 'BooleanType' : 'bool', @@ -51,7 +51,7 @@ _TYPE_MAPPING = { _pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING] -class FixTypes(basefix.BaseFix): +class FixTypes(fixer_base.BaseFix): PATTERN = '|'.join(_pats) diff --git a/Lib/lib2to3/fixes/fix_unicode.py b/Lib/lib2to3/fixes/fix_unicode.py index d7ad3b8..7f5cc80 100644 --- a/Lib/lib2to3/fixes/fix_unicode.py +++ b/Lib/lib2to3/fixes/fix_unicode.py @@ -4,9 +4,9 @@ import re from ..pgen2 import token -from .import basefix +from .. import fixer_base -class FixUnicode(basefix.BaseFix): +class FixUnicode(fixer_base.BaseFix): PATTERN = "STRING | NAME<'unicode' | 'unichr'>" diff --git a/Lib/lib2to3/fixes/fix_ws_comma.py b/Lib/lib2to3/fixes/fix_ws_comma.py index 53e0008..e493498 100644 --- a/Lib/lib2to3/fixes/fix_ws_comma.py +++ b/Lib/lib2to3/fixes/fix_ws_comma.py @@ -7,9 +7,9 @@ uses of colons. It does not touch other uses of whitespace. from .. import pytree from ..pgen2 import token -from .import basefix +from .. import fixer_base -class FixWsComma(basefix.BaseFix): +class FixWsComma(fixer_base.BaseFix): explicit = True # The user must ask for this fixers diff --git a/Lib/lib2to3/fixes/fix_xrange.py b/Lib/lib2to3/fixes/fix_xrange.py index 2e4040e..85efcd0 100644 --- a/Lib/lib2to3/fixes/fix_xrange.py +++ b/Lib/lib2to3/fixes/fix_xrange.py @@ -4,12 +4,12 @@ """Fixer that changes xrange(...) into range(...).""" # Local imports -from .import basefix -from .util import Name, Call, consuming_calls +from .. import fixer_base +from ..fixer_util import Name, Call, consuming_calls from .. import patcomp -class FixXrange(basefix.BaseFix): +class FixXrange(fixer_base.BaseFix): PATTERN = """ power< (name='range'|name='xrange') trailer< '(' [any] ')' > any* > diff --git a/Lib/lib2to3/fixes/fix_xreadlines.py b/Lib/lib2to3/fixes/fix_xreadlines.py index 8857759..2d3f3c8 100644 --- a/Lib/lib2to3/fixes/fix_xreadlines.py +++ b/Lib/lib2to3/fixes/fix_xreadlines.py @@ -4,11 +4,11 @@ This fixer will also convert g(f.xreadlines) into g(f.__iter__).""" # Author: Collin Winter # Local imports -from .import basefix -from .util import Name +from .. import fixer_base +from ..fixer_util import Name -class FixXreadlines(basefix.BaseFix): +class FixXreadlines(fixer_base.BaseFix): PATTERN = """ power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > > | diff --git a/Lib/lib2to3/fixes/fix_zip.py b/Lib/lib2to3/fixes/fix_zip.py index e319b54..08296c0 100644 --- a/Lib/lib2to3/fixes/fix_zip.py +++ b/Lib/lib2to3/fixes/fix_zip.py @@ -8,10 +8,10 @@ iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. """ # Local imports -from . import basefix -from .util import Name, Call, in_special_context +from .. import fixer_base +from ..fixer_util import Name, Call, in_special_context -class FixZip(basefix.ConditionalFix): +class FixZip(fixer_base.ConditionalFix): PATTERN = """ power< 'zip' args=trailer< '(' [any] ')' > diff --git a/Lib/lib2to3/fixes/util.py b/Lib/lib2to3/fixes/util.py deleted file mode 100644 index c977237..0000000 --- a/Lib/lib2to3/fixes/util.py +++ /dev/null @@ -1,366 +0,0 @@ -"""Utility functions, node construction macros, etc.""" -# Author: Collin Winter - -# Local imports -from ..pgen2 import token -from ..pytree import Leaf, Node -from ..pygram import python_symbols as syms -from .. import patcomp - - -########################################################### -### Common node-construction "macros" -########################################################### - -def KeywordArg(keyword, value): - return Node(syms.argument, - [keyword, Leaf(token.EQUAL, '='), value]) - -def LParen(): - return Leaf(token.LPAR, "(") - -def RParen(): - return Leaf(token.RPAR, ")") - -def Assign(target, source): - """Build an assignment statement""" - if not isinstance(target, list): - target = [target] - if not isinstance(source, list): - source.set_prefix(" ") - source = [source] - - return Node(syms.atom, - target + [Leaf(token.EQUAL, "=", prefix=" ")] + source) - -def Name(name, prefix=None): - """Return a NAME leaf""" - return Leaf(token.NAME, name, prefix=prefix) - -def Attr(obj, attr): - """A node tuple for obj.attr""" - return [obj, Node(syms.trailer, [Dot(), attr])] - -def Comma(): - """A comma leaf""" - return Leaf(token.COMMA, ",") - -def Dot(): - """A period (.) leaf""" - return Leaf(token.DOT, ".") - -def ArgList(args, lparen=LParen(), rparen=RParen()): - """A parenthesised argument list, used by Call()""" - return Node(syms.trailer, - [lparen.clone(), - Node(syms.arglist, args), - rparen.clone()]) - -def Call(func_name, args, prefix=None): - """A function call""" - node = Node(syms.power, [func_name, ArgList(args)]) - if prefix is not None: - node.set_prefix(prefix) - return node - -def Newline(): - """A newline literal""" - return Leaf(token.NEWLINE, "\n") - -def BlankLine(): - """A blank line""" - return Leaf(token.NEWLINE, "") - -def Number(n, prefix=None): - return Leaf(token.NUMBER, n, prefix=prefix) - -def Subscript(index_node): - """A numeric or string subscript""" - return Node(syms.trailer, [Leaf(token.LBRACE, '['), - index_node, - Leaf(token.RBRACE, ']')]) - -def String(string, prefix=None): - """A string leaf""" - return Leaf(token.STRING, string, prefix=prefix) - -def ListComp(xp, fp, it, test=None): - """A list comprehension of the form [xp for fp in it if test]. - - If test is None, the "if test" part is omitted. - """ - xp.set_prefix("") - fp.set_prefix(" ") - it.set_prefix(" ") - for_leaf = Leaf(token.NAME, "for") - for_leaf.set_prefix(" ") - in_leaf = Leaf(token.NAME, "in") - in_leaf.set_prefix(" ") - inner_args = [for_leaf, fp, in_leaf, it] - if test: - test.set_prefix(" ") - if_leaf = Leaf(token.NAME, "if") - if_leaf.set_prefix(" ") - inner_args.append(Node(syms.comp_if, [if_leaf, test])) - inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)]) - return Node(syms.atom, - [Leaf(token.LBRACE, "["), - inner, - Leaf(token.RBRACE, "]")]) - -def FromImport(package_name, name_leafs): - """ Return an import statement in the form: - from package import name_leafs""" - # XXX: May not handle dotted imports properly (eg, package_name='foo.bar') - assert package_name == '.' or '.' not in package.name, "FromImport has "\ - "not been tested with dotted package names -- use at your own "\ - "peril!" - - for leaf in name_leafs: - # Pull the leaves out of their old tree - leaf.remove() - - children = [Leaf(token.NAME, 'from'), - Leaf(token.NAME, package_name, prefix=" "), - Leaf(token.NAME, 'import', prefix=" "), - Node(syms.import_as_names, name_leafs)] - imp = Node(syms.import_from, children) - return imp - - -########################################################### -### Determine whether a node represents a given literal -########################################################### - -def is_tuple(node): - """Does the node represent a tuple literal?""" - if isinstance(node, Node) and node.children == [LParen(), RParen()]: - return True - return (isinstance(node, Node) - and len(node.children) == 3 - and isinstance(node.children[0], Leaf) - and isinstance(node.children[1], Node) - and isinstance(node.children[2], Leaf) - and node.children[0].value == "(" - and node.children[2].value == ")") - -def is_list(node): - """Does the node represent a list literal?""" - return (isinstance(node, Node) - and len(node.children) > 1 - and isinstance(node.children[0], Leaf) - and isinstance(node.children[-1], Leaf) - and node.children[0].value == "[" - and node.children[-1].value == "]") - -########################################################### -### Common portability code. This allows fixers to do, eg, -### "from .util import set" and forget about it. -########################################################### - -try: - any = any -except NameError: - def any(l): - for o in l: - if o: - return True - return False - -try: - set = set -except NameError: - from sets import Set as set - -try: - reversed = reversed -except NameError: - def reversed(l): - return l[::-1] - -########################################################### -### Misc -########################################################### - - -consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum", - "min", "max"]) - -def attr_chain(obj, attr): - """Follow an attribute chain. - - If you have a chain of objects where a.foo -> b, b.foo-> c, etc, - use this to iterate over all objects in the chain. Iteration is - terminated by getattr(x, attr) is None. - - Args: - obj: the starting object - attr: the name of the chaining attribute - - Yields: - Each successive object in the chain. - """ - next = getattr(obj, attr) - while next: - yield next - next = getattr(next, attr) - -p0 = """for_stmt< 'for' any 'in' node=any ':' any* > - | comp_for< 'for' any 'in' node=any any* > - """ -p1 = """ -power< - ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | - 'any' | 'all' | (any* trailer< '.' 'join' >) ) - trailer< '(' node=any ')' > - any* -> -""" -p2 = """ -power< - 'sorted' - trailer< '(' arglist ')' > - any* -> -""" -pats_built = False -def in_special_context(node): - """ Returns true if node is in an environment where all that is required - of it is being itterable (ie, it doesn't matter if it returns a list - or an itterator). - See test_map_nochange in test_fixers.py for some examples and tests. - """ - global p0, p1, p2, pats_built - if not pats_built: - p1 = patcomp.compile_pattern(p1) - p0 = patcomp.compile_pattern(p0) - p2 = patcomp.compile_pattern(p2) - pats_built = True - patterns = [p0, p1, p2] - for pattern, parent in zip(patterns, attr_chain(node, "parent")): - results = {} - if pattern.match(parent, results) and results["node"] is node: - return True - return False - -########################################################### -### The following functions are to find bindings in a suite -########################################################### - -def make_suite(node): - if node.type == syms.suite: - return node - node = node.clone() - parent, node.parent = node.parent, None - suite = Node(syms.suite, [node]) - suite.parent = parent - return suite - -def does_tree_import(package, name, node): - """ Returns true if name is imported from package at the - top level of the tree which node belongs to. - To cover the case of an import like 'import foo', use - Null for the package and 'foo' for the name. """ - # Scamper up to the top level namespace - while node.type != syms.file_input: - assert node.parent, "Tree is insane! root found before "\ - "file_input node was found." - node = node.parent - - binding = find_binding(name, node, package) - return bool(binding) - -_def_syms = set([syms.classdef, syms.funcdef]) -def find_binding(name, node, package=None): - """ Returns the node which binds variable name, otherwise None. - If optional argument package is supplied, only imports will - be returned. - See test cases for examples.""" - for child in node.children: - ret = None - if child.type == syms.for_stmt: - if _find(name, child.children[1]): - return child - n = find_binding(name, make_suite(child.children[-1]), package) - if n: ret = n - elif child.type in (syms.if_stmt, syms.while_stmt): - n = find_binding(name, make_suite(child.children[-1]), package) - if n: ret = n - elif child.type == syms.try_stmt: - n = find_binding(name, make_suite(child.children[2]), package) - if n: - ret = n - else: - for i, kid in enumerate(child.children[3:]): - if kid.type == token.COLON and kid.value == ":": - # i+3 is the colon, i+4 is the suite - n = find_binding(name, make_suite(child.children[i+4]), package) - if n: ret = n - elif child.type in _def_syms and child.children[1].value == name: - ret = child - elif _is_import_binding(child, name, package): - ret = child - elif child.type == syms.simple_stmt: - ret = find_binding(name, child, package) - elif child.type == syms.expr_stmt: - if _find(name, child.children[0]): - ret = child - - if ret: - if not package: - return ret - if ret.type in (syms.import_name, syms.import_from): - return ret - return None - -_block_syms = set([syms.funcdef, syms.classdef, syms.trailer]) -def _find(name, node): - nodes = [node] - while nodes: - node = nodes.pop() - if node.type > 256 and node.type not in _block_syms: - nodes.extend(node.children) - elif node.type == token.NAME and node.value == name: - return node - return None - -def _is_import_binding(node, name, package=None): - """ Will reuturn node if node will import name, or node - will import * from package. None is returned otherwise. - See test cases for examples. """ - - if node.type == syms.import_name and not package: - imp = node.children[1] - if imp.type == syms.dotted_as_names: - for child in imp.children: - if child.type == syms.dotted_as_name: - if child.children[2].value == name: - return node - elif child.type == token.NAME and child.value == name: - return node - elif imp.type == syms.dotted_as_name: - last = imp.children[-1] - if last.type == token.NAME and last.value == name: - return node - elif imp.type == token.NAME and imp.value == name: - return node - elif node.type == syms.import_from: - # unicode(...) is used to make life easier here, because - # from a.b import parses to ['import', ['a', '.', 'b'], ...] - if package and str(node.children[1]).strip() != package: - return None - n = node.children[3] - if package and _find('as', n): - # See test_from_import_as for explanation - return None - elif n.type == syms.import_as_names and _find(name, n): - return node - elif n.type == syms.import_as_name: - child = n.children[2] - if child.type == token.NAME and child.value == name: - return node - elif n.type == token.NAME and n.value == name: - return node - elif package and n.type == token.STAR: - return node - return None diff --git a/Lib/lib2to3/refactor.py b/Lib/lib2to3/refactor.py index ae9d7d0..1b8a811 100755 --- a/Lib/lib2to3/refactor.py +++ b/Lib/lib2to3/refactor.py @@ -30,11 +30,13 @@ from . import patcomp from . import fixes from . import pygram -def main(args=None): +def main(fixer_dir, args=None): """Main program. - Call without arguments to use sys.argv[1:] as the arguments; or - call with a list of arguments (excluding sys.argv[0]). + Args: + fixer_dir: directory where fixer modules are located. + args: optional; a list of command line arguments. If omitted, + sys.argv[1:] is used. Returns a suggested exit status (0, 1, 2). """ @@ -57,7 +59,7 @@ def main(args=None): options, args = parser.parse_args(args) if options.list_fixes: print("Available transformations for the -f/--fix option:") - for fixname in get_all_fix_names(): + for fixname in get_all_fix_names(fixer_dir): print(fixname) if not args: return 0 @@ -76,7 +78,7 @@ def main(args=None): logging.basicConfig(format='%(name)s: %(message)s', level=logging.INFO) # Initialize the refactoring tool - rt = RefactoringTool(options) + rt = RefactoringTool(fixer_dir, options) # Refactor all files and directories passed as arguments if not rt.errors: @@ -87,10 +89,10 @@ def main(args=None): return int(bool(rt.errors)) -def get_all_fix_names(): +def get_all_fix_names(fixer_dir): """Return a sorted list of all available fix names.""" fix_names = [] - names = os.listdir(os.path.dirname(fixes.__file__)) + names = os.listdir(fixer_dir) names.sort() for name in names: if name.startswith("fix_") and name.endswith(".py"): @@ -138,11 +140,14 @@ def get_headnode_dict(fixer_list): class RefactoringTool(object): - def __init__(self, options): + def __init__(self, fixer_dir, options): """Initializer. - The argument is an optparse.Values instance. + Args: + fixer_dir: directory in which to find fixer modules. + options: an optparse.Values instance. """ + self.fixer_dir = fixer_dir self.options = options self.errors = [] self.logger = logging.getLogger("RefactoringTool") @@ -167,14 +172,15 @@ class RefactoringTool(object): want a pre-order AST traversal, and post_order is the list that want post-order traversal. """ + fixer_pkg = ".".join(self.fixer_dir.split(os.path.sep)) pre_order_fixers = [] post_order_fixers = [] fix_names = self.options.fix if not fix_names or "all" in fix_names: - fix_names = get_all_fix_names() + fix_names = get_all_fix_names(self.fixer_dir) for fix_name in fix_names: try: - mod = __import__("lib2to3.fixes.fix_" + fix_name, {}, {}, ["*"]) + mod = __import__(fixer_pkg + ".fix_" + fix_name, {}, {}, ["*"]) except ImportError: self.log_error("Can't find transformation %s", fix_name) continue diff --git a/Lib/lib2to3/tests/test_all_fixers.py b/Lib/lib2to3/tests/test_all_fixers.py index 5f4a9fc..c36f61d 100644 --- a/Lib/lib2to3/tests/test_all_fixers.py +++ b/Lib/lib2to3/tests/test_all_fixers.py @@ -29,7 +29,7 @@ class Test_all(support.TestCase): def setUp(self): options = Options(fix=["all", "idioms", "ws_comma", "buffer"], print_function=False) - self.refactor = refactor.RefactoringTool(options) + self.refactor = refactor.RefactoringTool("lib2to3/fixes", options) def test_all_project_files(self): for filepath in support.all_project_files(): diff --git a/Lib/lib2to3/tests/test_fixers.py b/Lib/lib2to3/tests/test_fixers.py index d1a231d..d1d2509 100755 --- a/Lib/lib2to3/tests/test_fixers.py +++ b/Lib/lib2to3/tests/test_fixers.py @@ -10,13 +10,14 @@ except ImportError: # Python imports import unittest +from itertools import chain from os.path import dirname, pathsep # Local imports from .. import pygram from .. import pytree from .. import refactor -from ..fixes import util +from .. import fixer_util class Options: @@ -29,11 +30,10 @@ class Options: class FixerTestCase(support.TestCase): def setUp(self): options = Options(fix=[self.fixer], print_function=False) - self.refactor = refactor.RefactoringTool(options) + self.refactor = refactor.RefactoringTool("lib2to3/fixes", options) self.fixer_log = [] self.filename = "" - from itertools import chain for order in (self.refactor.pre_order.values(),\ self.refactor.post_order.values()): for fixer in chain(*order): @@ -70,7 +70,7 @@ class FixerTestCase(support.TestCase): fix = [self.fixer] fix.extend(names) options = Options(fix=fix, print_function=False) - r = refactor.RefactoringTool(options) + r = refactor.RefactoringTool("lib2to3/fixes", options) (pre, post) = r.get_fixers() n = "fix_" + self.fixer if post and post[-1].__class__.__module__.endswith(n): @@ -1109,7 +1109,7 @@ class Test_dict(FixerTestCase): self.check(b, a) def test_unchanged(self): - for wrapper in util.consuming_calls: + for wrapper in fixer_util.consuming_calls: s = "s = %s(d.keys())" % wrapper self.unchanged(s) @@ -1302,7 +1302,7 @@ class Test_xrange(FixerTestCase): self.unchanged("x in range(10, 3, 9)") def test_in_consuming_context(self): - for call in util.consuming_calls: + for call in fixer_util.consuming_calls: self.unchanged("a = %s(range(10))" % call) class Test_raw_input(FixerTestCase): diff --git a/Lib/lib2to3/tests/test_util.py b/Lib/lib2to3/tests/test_util.py index 2c22378..5d02150 100644 --- a/Lib/lib2to3/tests/test_util.py +++ b/Lib/lib2to3/tests/test_util.py @@ -10,7 +10,8 @@ import os.path # Local imports from .. import pytree -from ..fixes import util +from .. import fixer_util +from ..fixer_util import Attr, Name def parse(code, strip_levels=0): @@ -25,13 +26,13 @@ def parse(code, strip_levels=0): class MacroTestCase(support.TestCase): def assertStr(self, node, string): if isinstance(node, (tuple, list)): - node = pytree.Node(util.syms.simple_stmt, node) + node = pytree.Node(fixer_util.syms.simple_stmt, node) self.assertEqual(str(node), string) class Test_is_tuple(support.TestCase): def is_tuple(self, string): - return util.is_tuple(parse(string, strip_levels=2)) + return fixer_util.is_tuple(parse(string, strip_levels=2)) def test_valid(self): self.failUnless(self.is_tuple("(a, b)")) @@ -47,7 +48,7 @@ class Test_is_tuple(support.TestCase): class Test_is_list(support.TestCase): def is_list(self, string): - return util.is_list(parse(string, strip_levels=2)) + return fixer_util.is_list(parse(string, strip_levels=2)) def test_valid(self): self.failUnless(self.is_list("[]")) @@ -62,23 +63,18 @@ class Test_is_list(support.TestCase): class Test_Attr(MacroTestCase): def test(self): - from ..fixes.util import Attr, Name call = parse("foo()", strip_levels=2) self.assertStr(Attr(Name("a"), Name("b")), "a.b") self.assertStr(Attr(call, Name("b")), "foo().b") def test_returns(self): - from ..fixes.util import Attr, Name - attr = Attr(Name("a"), Name("b")) self.assertEqual(type(attr), list) class Test_Name(MacroTestCase): def test(self): - from ..fixes.util import Name - self.assertStr(Name("a"), "a") self.assertStr(Name("foo.foo().bar"), "foo.foo().bar") self.assertStr(Name("a", prefix="b"), "ba") @@ -88,7 +84,7 @@ class Test_does_tree_import(support.TestCase): def _find_bind_rec(self, name, node): # Search a tree for a binding -- used to find the starting # point for these tests. - c = util.find_binding(name, node) + c = fixer_util.find_binding(name, node) if c: return c for child in node.children: c = self._find_bind_rec(name, child) @@ -98,7 +94,7 @@ class Test_does_tree_import(support.TestCase): node = parse(string) # Find the binding of start -- that's what we'll go from node = self._find_bind_rec('start', node) - return util.does_tree_import(package, name, node) + return fixer_util.does_tree_import(package, name, node) def try_with(self, string): failing_tests = (("a", "a", "from a import b"), @@ -130,7 +126,7 @@ class Test_does_tree_import(support.TestCase): class Test_find_binding(support.TestCase): def find_binding(self, name, string, package=None): - return util.find_binding(name, parse(string), package) + return fixer_util.find_binding(name, parse(string), package) def test_simple_assignment(self): self.failUnless(self.find_binding("a", "a = b")) -- cgit v0.12