summaryrefslogtreecommitdiffstats
path: root/Lib/lib2to3/fixes
diff options
context:
space:
mode:
authorMartin v. Löwis <martin@v.loewis.de>2008-03-19 05:04:44 (GMT)
committerMartin v. Löwis <martin@v.loewis.de>2008-03-19 05:04:44 (GMT)
commitef04c44e29a8276a484f58d03a75a2dec516302d (patch)
tree6231aa6bb789345a6a86c60b0f547a7bfa19927f /Lib/lib2to3/fixes
parentc42bcbb1f07723476cccd352eb0ae98ad2d1a809 (diff)
downloadcpython-ef04c44e29a8276a484f58d03a75a2dec516302d.zip
cpython-ef04c44e29a8276a484f58d03a75a2dec516302d.tar.gz
cpython-ef04c44e29a8276a484f58d03a75a2dec516302d.tar.bz2
Merged revisions 61596-61597 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk ........ r61596 | martin.v.loewis | 2008-03-18 23:43:46 -0500 (Di, 18 Mär 2008) | 2 lines Import lib2to3. ........ r61597 | martin.v.loewis | 2008-03-18 23:58:04 -0500 (Di, 18 Mär 2008) | 3 lines Initialized merge tracking via "svnmerge" with revisions "1-61595" from svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........
Diffstat (limited to 'Lib/lib2to3/fixes')
-rw-r--r--Lib/lib2to3/fixes/__init__.py1
-rw-r--r--Lib/lib2to3/fixes/basefix.py165
-rw-r--r--Lib/lib2to3/fixes/fix_apply.py58
-rw-r--r--Lib/lib2to3/fixes/fix_basestring.py13
-rw-r--r--Lib/lib2to3/fixes/fix_buffer.py21
-rw-r--r--Lib/lib2to3/fixes/fix_callable.py31
-rw-r--r--Lib/lib2to3/fixes/fix_dict.py99
-rw-r--r--Lib/lib2to3/fixes/fix_except.py89
-rw-r--r--Lib/lib2to3/fixes/fix_exec.py39
-rw-r--r--Lib/lib2to3/fixes/fix_execfile.py37
-rw-r--r--Lib/lib2to3/fixes/fix_filter.py119
-rw-r--r--Lib/lib2to3/fixes/fix_funcattrs.py19
-rw-r--r--Lib/lib2to3/fixes/fix_future.py16
-rw-r--r--Lib/lib2to3/fixes/fix_has_key.py109
-rw-r--r--Lib/lib2to3/fixes/fix_idioms.py134
-rw-r--r--Lib/lib2to3/fixes/fix_imports.py89
-rw-r--r--Lib/lib2to3/fixes/fix_input.py26
-rw-r--r--Lib/lib2to3/fixes/fix_intern.py43
-rw-r--r--Lib/lib2to3/fixes/fix_itertools.py36
-rw-r--r--Lib/lib2to3/fixes/fix_long.py35
-rw-r--r--Lib/lib2to3/fixes/fix_map.py126
-rw-r--r--Lib/lib2to3/fixes/fix_methodattrs.py23
-rw-r--r--Lib/lib2to3/fixes/fix_ne.py22
-rw-r--r--Lib/lib2to3/fixes/fix_next.py104
-rw-r--r--Lib/lib2to3/fixes/fix_nonzero.py20
-rw-r--r--Lib/lib2to3/fixes/fix_numliterals.py27
-rw-r--r--Lib/lib2to3/fixes/fix_print.py81
-rw-r--r--Lib/lib2to3/fixes/fix_raise.py82
-rw-r--r--Lib/lib2to3/fixes/fix_raw_input.py16
-rw-r--r--Lib/lib2to3/fixes/fix_renames.py70
-rw-r--r--Lib/lib2to3/fixes/fix_repr.py22
-rw-r--r--Lib/lib2to3/fixes/fix_standarderror.py18
-rw-r--r--Lib/lib2to3/fixes/fix_throw.py56
-rw-r--r--Lib/lib2to3/fixes/fix_tuple_params.py169
-rw-r--r--Lib/lib2to3/fixes/fix_types.py62
-rw-r--r--Lib/lib2to3/fixes/fix_unicode.py28
-rw-r--r--Lib/lib2to3/fixes/fix_ws_comma.py39
-rw-r--r--Lib/lib2to3/fixes/fix_xrange.py18
-rw-r--r--Lib/lib2to3/fixes/fix_xreadlines.py24
-rw-r--r--Lib/lib2to3/fixes/util.py303
40 files changed, 2489 insertions, 0 deletions
diff --git a/Lib/lib2to3/fixes/__init__.py b/Lib/lib2to3/fixes/__init__.py
new file mode 100644
index 0000000..b93054b
--- /dev/null
+++ b/Lib/lib2to3/fixes/__init__.py
@@ -0,0 +1 @@
+# Dummy file to make this directory a package.
diff --git a/Lib/lib2to3/fixes/basefix.py b/Lib/lib2to3/fixes/basefix.py
new file mode 100644
index 0000000..8eb4278
--- /dev/null
+++ b/Lib/lib2to3/fixes/basefix.py
@@ -0,0 +1,165 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Base class for fixers (optional, but recommended)."""
+
+# Python imports
+import logging
+import itertools
+
+# Get a usable 'set' constructor
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+# Local imports
+from ..patcomp import PatternCompiler
+from .. import pygram
+
+class BaseFix(object):
+
+ """Optional base class for fixers.
+
+ The subclass name must be FixFooBar where FooBar is the result of
+ removing underscores and capitalizing the words of the fix name.
+ For example, the class name for a fixer named 'has_key' should be
+ FixHasKey.
+ """
+
+ PATTERN = None # Most subclasses should override with a string literal
+ pattern = None # Compiled pattern, set by compile_pattern()
+ options = None # Options object passed to initializer
+ filename = None # The filename (set by set_filename)
+ logger = None # A logger (set by set_filename)
+ numbers = itertools.count(1) # For new_name()
+ used_names = set() # A set of all used NAMEs
+ order = "post" # Does the fixer prefer pre- or post-order traversal
+ explicit = False # Is this ignored by refactor.py -f all?
+
+ # Shortcut for access to Python grammar symbols
+ syms = pygram.python_symbols
+
+ def __init__(self, options, log):
+ """Initializer. Subclass may override.
+
+ Args:
+ options: an optparse.Values instance which can be used
+ to inspect the command line options.
+ log: a list to append warnings and other messages to.
+ """
+ self.options = options
+ self.log = log
+ self.compile_pattern()
+
+ def compile_pattern(self):
+ """Compiles self.PATTERN into self.pattern.
+
+ Subclass may override if it doesn't want to use
+ self.{pattern,PATTERN} in .match().
+ """
+ if self.PATTERN is not None:
+ self.pattern = PatternCompiler().compile_pattern(self.PATTERN)
+
+ def set_filename(self, filename):
+ """Set the filename, and a logger derived from it.
+
+ The main refactoring tool should call this.
+ """
+ self.filename = filename
+ self.logger = logging.getLogger(filename)
+
+ def match(self, node):
+ """Returns match for a given parse tree node.
+
+ Should return a true or false object (not necessarily a bool).
+ It may return a non-empty dict of matching sub-nodes as
+ returned by a matching pattern.
+
+ Subclass may override.
+ """
+ results = {"node": node}
+ return self.pattern.match(node, results) and results
+
+ def transform(self, node, results):
+ """Returns the transformation for a given parse tree node.
+
+ Args:
+ node: the root of the parse tree that matched the fixer.
+ results: a dict mapping symbolic names to part of the match.
+
+ Returns:
+ None, or a node that is a modified copy of the
+ argument node. The node argument may also be modified in-place to
+ effect the same change.
+
+ Subclass *must* override.
+ """
+ raise NotImplementedError()
+
+ def parenthesize(self, node):
+ """Wrapper around pygram.parenthesize()."""
+ return pygram.parenthesize(node)
+
+ def new_name(self, template="xxx_todo_changeme"):
+ """Return a string suitable for use as an identifier
+
+ The new name is guaranteed not to conflict with other identifiers.
+ """
+ name = template
+ while name in self.used_names:
+ name = template + str(self.numbers.next())
+ self.used_names.add(name)
+ return name
+
+ def log_message(self, message):
+ if self.first_log:
+ self.first_log = False
+ self.log.append("### In file %s ###" % self.filename)
+ self.log.append(message)
+
+ def cannot_convert(self, node, reason=None):
+ """Warn the user that a given chunk of code is not valid Python 3,
+ but that it cannot be converted automatically.
+
+ First argument is the top-level node for the code in question.
+ Optional second argument is why it can't be converted.
+ """
+ lineno = node.get_lineno()
+ for_output = node.clone()
+ for_output.set_prefix("")
+ msg = "Line %d: could not convert: %s"
+ self.log_message(msg % (lineno, for_output))
+ if reason:
+ self.log_message(reason)
+
+ def warning(self, node, reason):
+ """Used for warning the user about possible uncertainty in the
+ translation.
+
+ First argument is the top-level node for the code in question.
+ Optional second argument is why it can't be converted.
+ """
+ lineno = node.get_lineno()
+ self.log_message("Line %d: %s" % (lineno, reason))
+
+ def start_tree(self, tree, filename):
+ """Some fixers need to maintain tree-wide state.
+ This method is called once, at the start of tree fix-up.
+
+ tree - the root node of the tree to be processed.
+ filename - the name of the file the tree came from.
+ """
+ self.used_names = tree.used_names
+ self.set_filename(filename)
+ self.numbers = itertools.count(1)
+ self.first_log = True
+
+ def finish_tree(self, tree, filename):
+ """Some fixers need to maintain tree-wide state.
+ This method is called once, at the conclusion of tree fix-up.
+
+ tree - the root node of the tree to be processed.
+ filename - the name of the file the tree came from.
+ """
+ pass
diff --git a/Lib/lib2to3/fixes/fix_apply.py b/Lib/lib2to3/fixes/fix_apply.py
new file mode 100644
index 0000000..f233224
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_apply.py
@@ -0,0 +1,58 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for apply().
+
+This converts apply(func, v, k) into (func)(*v, **k)."""
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from . import basefix
+from .util import Call, Comma
+
+class FixApply(basefix.BaseFix):
+
+ PATTERN = """
+ power< 'apply'
+ trailer<
+ '('
+ arglist<
+ (not argument<NAME '=' any>) func=any ','
+ (not argument<NAME '=' any>) args=any [','
+ (not argument<NAME '=' any>) kwds=any] [',']
+ >
+ ')'
+ >
+ >
+ """
+
+ def transform(self, node, results):
+ syms = self.syms
+ assert results
+ func = results["func"]
+ args = results["args"]
+ kwds = results.get("kwds")
+ prefix = node.get_prefix()
+ func = func.clone()
+ if (func.type not in (token.NAME, syms.atom) and
+ (func.type != syms.power or
+ func.children[-2].type == token.DOUBLESTAR)):
+ # Need to parenthesize
+ func = self.parenthesize(func)
+ func.set_prefix("")
+ args = args.clone()
+ args.set_prefix("")
+ if kwds is not None:
+ kwds = kwds.clone()
+ kwds.set_prefix("")
+ l_newargs = [pytree.Leaf(token.STAR, "*"), args]
+ if kwds is not None:
+ l_newargs.extend([Comma(),
+ pytree.Leaf(token.DOUBLESTAR, "**"),
+ kwds])
+ l_newargs[-2].set_prefix(" ") # that's the ** token
+ # XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t)
+ # can be translated into f(x, y, *t) instead of f(*(x, y) + t)
+ #new = pytree.Node(syms.power, (func, ArgList(l_newargs)))
+ return Call(func, l_newargs, prefix=prefix)
diff --git a/Lib/lib2to3/fixes/fix_basestring.py b/Lib/lib2to3/fixes/fix_basestring.py
new file mode 100644
index 0000000..6d753d8
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_basestring.py
@@ -0,0 +1,13 @@
+"""Fixer for basestring -> str."""
+# Author: Christian Heimes
+
+# Local imports
+from . import basefix
+from .util import Name
+
+class FixBasestring(basefix.BaseFix):
+
+ PATTERN = "'basestring'"
+
+ def transform(self, node, results):
+ return Name("str", prefix=node.get_prefix())
diff --git a/Lib/lib2to3/fixes/fix_buffer.py b/Lib/lib2to3/fixes/fix_buffer.py
new file mode 100644
index 0000000..13168d6
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_buffer.py
@@ -0,0 +1,21 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that changes buffer(...) into memoryview(...)."""
+
+# Local imports
+from . import basefix
+from .util import Name
+
+
+class FixBuffer(basefix.BaseFix):
+
+ explicit = True # The user must ask for this fixer
+
+ PATTERN = """
+ power< name='buffer' trailer< '(' [any] ')' > >
+ """
+
+ def transform(self, node, results):
+ name = results["name"]
+ name.replace(Name("memoryview", prefix=name.get_prefix()))
diff --git a/Lib/lib2to3/fixes/fix_callable.py b/Lib/lib2to3/fixes/fix_callable.py
new file mode 100644
index 0000000..90b3515
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_callable.py
@@ -0,0 +1,31 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for callable().
+
+This converts callable(obj) into hasattr(obj, '__call__')."""
+
+# Local imports
+from .. import pytree
+from . import basefix
+from .util import Call, Name, String
+
+class FixCallable(basefix.BaseFix):
+
+ # Ignore callable(*args) or use of keywords.
+ # Either could be a hint that the builtin callable() is not being used.
+ PATTERN = """
+ power< 'callable'
+ trailer< lpar='('
+ ( not(arglist | argument<any '=' any>) func=any
+ | func=arglist<(not argument<any '=' any>) any ','> )
+ rpar=')' >
+ after=any*
+ >
+ """
+
+ def transform(self, node, results):
+ func = results["func"]
+
+ args = [func.clone(), String(', '), String("'__call__'")]
+ return Call(Name("hasattr"), args, prefix=node.get_prefix())
diff --git a/Lib/lib2to3/fixes/fix_dict.py b/Lib/lib2to3/fixes/fix_dict.py
new file mode 100644
index 0000000..f76ceb4
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_dict.py
@@ -0,0 +1,99 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for dict methods.
+
+d.keys() -> list(d.keys())
+d.items() -> list(d.items())
+d.values() -> list(d.values())
+
+d.iterkeys() -> iter(d.keys())
+d.iteritems() -> iter(d.items())
+d.itervalues() -> iter(d.values())
+
+Except in certain very specific contexts: the iter() can be dropped
+when the context is list(), sorted(), iter() or for...in; the list()
+can be dropped when the context is list() or sorted() (but not iter()
+or for...in!). Special contexts that apply to both: list(), sorted(), tuple()
+set(), any(), all(), sum().
+
+Note: iter(d.keys()) could be written as iter(d) but since the
+original d.iterkeys() was also redundant we don't fix this. And there
+are (rare) contexts where it makes a difference (e.g. when passing it
+as an argument to a function that introspects the argument).
+"""
+
+# Local imports
+from .. import pytree
+from .. import patcomp
+from ..pgen2 import token
+from . import basefix
+from .util import Name, Call, LParen, RParen, ArgList, Dot, set
+
+
+exempt = set(["sorted", "list", "set", "any", "all", "tuple", "sum"])
+iter_exempt = exempt | set(["iter"])
+
+
+class FixDict(basefix.BaseFix):
+ PATTERN = """
+ power< head=any+
+ trailer< '.' method=('keys'|'items'|'values'|
+ 'iterkeys'|'iteritems'|'itervalues') >
+ parens=trailer< '(' ')' >
+ tail=any*
+ >
+ """
+
+ def transform(self, node, results):
+ head = results["head"]
+ method = results["method"][0] # Extract node for method name
+ tail = results["tail"]
+ syms = self.syms
+ method_name = method.value
+ isiter = method_name.startswith("iter")
+ if isiter:
+ method_name = method_name[4:]
+ assert method_name in ("keys", "items", "values"), repr(method)
+ head = [n.clone() for n in head]
+ tail = [n.clone() for n in tail]
+ special = not tail and self.in_special_context(node, isiter)
+ args = head + [pytree.Node(syms.trailer,
+ [Dot(),
+ Name(method_name,
+ prefix=method.get_prefix())]),
+ results["parens"].clone()]
+ new = pytree.Node(syms.power, args)
+ if not special:
+ new.set_prefix("")
+ new = Call(Name(isiter and "iter" or "list"), [new])
+ if tail:
+ new = pytree.Node(syms.power, [new] + tail)
+ new.set_prefix(node.get_prefix())
+ return new
+
+ P1 = "power< func=NAME trailer< '(' node=any ')' > any* >"
+ p1 = patcomp.compile_pattern(P1)
+
+ P2 = """for_stmt< 'for' any 'in' node=any ':' any* >
+ | comp_for< 'for' any 'in' node=any any* >
+ """
+ p2 = patcomp.compile_pattern(P2)
+
+ def in_special_context(self, node, isiter):
+ if node.parent is None:
+ return False
+ results = {}
+ if (node.parent.parent is not None and
+ self.p1.match(node.parent.parent, results) and
+ results["node"] is node):
+ if isiter:
+ # iter(d.iterkeys()) -> iter(d.keys()), etc.
+ return results["func"].value in iter_exempt
+ else:
+ # list(d.keys()) -> list(d.keys()), etc.
+ return results["func"].value in exempt
+ if not isiter:
+ return False
+ # for ... in d.iterkeys() -> for ... in d.keys(), etc.
+ return self.p2.match(node.parent, results) and results["node"] is node
diff --git a/Lib/lib2to3/fixes/fix_except.py b/Lib/lib2to3/fixes/fix_except.py
new file mode 100644
index 0000000..340575a
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_except.py
@@ -0,0 +1,89 @@
+"""Fixer for except statements with named exceptions.
+
+The following cases will be converted:
+
+- "except E, T:" where T is a name:
+
+ except E as T:
+
+- "except E, T:" where T is not a name, tuple or list:
+
+ except E as t:
+ T = t
+
+ This is done because the target of an "except" clause must be a
+ name.
+
+- "except E, T:" where T is a tuple or list literal:
+
+ except E as t:
+ T = t.args
+"""
+# Author: Collin Winter
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from . import basefix
+from .util import Assign, Attr, Name, is_tuple, is_list, reversed
+
+def find_excepts(nodes):
+ for i, n in enumerate(nodes):
+ if isinstance(n, pytree.Node):
+ if n.children[0].value == 'except':
+ yield (n, nodes[i+2])
+
+class FixExcept(basefix.BaseFix):
+
+ PATTERN = """
+ try_stmt< 'try' ':' suite
+ cleanup=((except_clause ':' suite)+ ['else' ':' suite]
+ ['finally' ':' suite]
+ | 'finally' ':' suite) >
+ """
+
+ def transform(self, node, results):
+ syms = self.syms
+
+ try_cleanup = [ch.clone() for ch in results['cleanup']]
+ for except_clause, e_suite in find_excepts(try_cleanup):
+ if len(except_clause.children) == 4:
+ (E, comma, N) = except_clause.children[1:4]
+ comma.replace(Name("as", prefix=" "))
+
+ if N.type != token.NAME:
+ # Generate a new N for the except clause
+ new_N = Name(self.new_name(), prefix=" ")
+ target = N.clone()
+ target.set_prefix("")
+ N.replace(new_N)
+ new_N = new_N.clone()
+
+ # Insert "old_N = new_N" as the first statement in
+ # the except body. This loop skips leading whitespace
+ # and indents
+ #TODO(cwinter) suite-cleanup
+ suite_stmts = e_suite.children
+ for i, stmt in enumerate(suite_stmts):
+ if isinstance(stmt, pytree.Node):
+ break
+
+ # The assignment is different if old_N is a tuple or list
+ # In that case, the assignment is old_N = new_N.args
+ if is_tuple(N) or is_list(N):
+ assign = Assign(target, Attr(new_N, Name('args')))
+ else:
+ assign = Assign(target, new_N)
+
+ #TODO(cwinter) stopgap until children becomes a smart list
+ for child in reversed(suite_stmts[:i]):
+ e_suite.insert_child(0, child)
+ e_suite.insert_child(i, assign)
+ elif N.get_prefix() == "":
+ # No space after a comma is legal; no space after "as",
+ # not so much.
+ N.set_prefix(" ")
+
+ #TODO(cwinter) fix this when children becomes a smart list
+ children = [c.clone() for c in node.children[:3]] + try_cleanup
+ return pytree.Node(node.type, children)
diff --git a/Lib/lib2to3/fixes/fix_exec.py b/Lib/lib2to3/fixes/fix_exec.py
new file mode 100644
index 0000000..2e45cb6
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_exec.py
@@ -0,0 +1,39 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for exec.
+
+This converts usages of the exec statement into calls to a built-in
+exec() function.
+
+exec code in ns1, ns2 -> exec(code, ns1, ns2)
+"""
+
+# Local imports
+from .. import pytree
+from . import basefix
+from .util import Comma, Name, Call
+
+
+class FixExec(basefix.BaseFix):
+
+ PATTERN = """
+ exec_stmt< 'exec' a=any 'in' b=any [',' c=any] >
+ |
+ exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any >
+ """
+
+ def transform(self, node, results):
+ assert results
+ syms = self.syms
+ a = results["a"]
+ b = results.get("b")
+ c = results.get("c")
+ args = [a.clone()]
+ args[0].set_prefix("")
+ if b is not None:
+ args.extend([Comma(), b.clone()])
+ if c is not None:
+ args.extend([Comma(), c.clone()])
+
+ return Call(Name("exec"), args, prefix=node.get_prefix())
diff --git a/Lib/lib2to3/fixes/fix_execfile.py b/Lib/lib2to3/fixes/fix_execfile.py
new file mode 100644
index 0000000..0e67f09
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_execfile.py
@@ -0,0 +1,37 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for execfile.
+
+This converts usages of the execfile function into calls to the built-in
+exec() function.
+"""
+
+from .. import pytree
+from . import basefix
+from .util import Comma, Name, Call, LParen, RParen, Dot
+
+
+class FixExecfile(basefix.BaseFix):
+
+ PATTERN = """
+ power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >
+ |
+ power< 'execfile' trailer< '(' filename=any ')' > >
+ """
+
+ def transform(self, node, results):
+ assert results
+ syms = self.syms
+ filename = results["filename"]
+ globals = results.get("globals")
+ locals = results.get("locals")
+ args = [Name('open'), LParen(), filename.clone(), RParen(), Dot(),
+ Name('read'), LParen(), RParen()]
+ args[0].set_prefix("")
+ if globals is not None:
+ args.extend([Comma(), globals.clone()])
+ if locals is not None:
+ args.extend([Comma(), locals.clone()])
+
+ return Call(Name("exec"), args, prefix=node.get_prefix())
diff --git a/Lib/lib2to3/fixes/fix_filter.py b/Lib/lib2to3/fixes/fix_filter.py
new file mode 100644
index 0000000..096b9c8
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_filter.py
@@ -0,0 +1,119 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that changes filter(F, X) into list(filter(F, X)).
+
+We avoid the transformation if the filter() call is directly contained
+in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or
+for V in <>:.
+
+NOTE: This is still not correct if the original code was depending on
+filter(F, X) to return a string if X is a string and a tuple if X is a
+tuple. That would require type inference, which we don't do. Let
+Python 2.6 figure it out.
+"""
+
+# Local imports
+from .. import pytree
+from .. import patcomp
+from ..pgen2 import token
+from . import basefix
+from .util import Name, Call, ListComp, attr_chain, does_tree_import
+
+class FixFilter(basefix.BaseFix):
+
+ PATTERN = """
+ filter_lambda=power<
+ 'filter'
+ trailer<
+ '('
+ arglist<
+ lambdef< 'lambda'
+ (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
+ >
+ ','
+ it=any
+ >
+ ')'
+ >
+ >
+ |
+ power<
+ 'filter'
+ trailer< '(' arglist< none='None' ',' seq=any > ')' >
+ >
+ |
+ power<
+ 'filter'
+ args=trailer< '(' [any] ')' >
+ >
+ """
+
+ def start_tree(self, *args):
+ super(FixFilter, self).start_tree(*args)
+ self._new_filter = None
+
+ def has_new_filter(self, node):
+ if self._new_filter is not None:
+ return self._new_filter
+ self._new_filter = does_tree_import('future_builtins', 'filter', node)
+ return self._new_filter
+
+ def transform(self, node, results):
+ if self.has_new_filter(node):
+ # If filter is imported from future_builtins, we don't want to
+ # do anything here.
+ return
+
+ if "filter_lambda" in results:
+ new = ListComp(results.get("fp").clone(),
+ results.get("fp").clone(),
+ results.get("it").clone(),
+ results.get("xp").clone())
+
+ elif "none" in results:
+ new = ListComp(Name("_f"),
+ Name("_f"),
+ results["seq"].clone(),
+ Name("_f"))
+
+ else:
+ if in_special_context(node):
+ return None
+ new = node.clone()
+ new.set_prefix("")
+ new = Call(Name("list"), [new])
+ new.set_prefix(node.get_prefix())
+ return new
+
+P0 = """for_stmt< 'for' any 'in' node=any ':' any* >
+ | comp_for< 'for' any 'in' node=any any* >
+ """
+p0 = patcomp.compile_pattern(P0)
+
+P1 = """
+power<
+ ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' |
+ 'any' | 'all' | (any* trailer< '.' 'join' >) )
+ trailer< '(' node=any ')' >
+ any*
+>
+"""
+p1 = patcomp.compile_pattern(P1)
+
+P2 = """
+power<
+ 'sorted'
+ trailer< '(' arglist<node=any any*> ')' >
+ any*
+>
+"""
+p2 = patcomp.compile_pattern(P2)
+
+def in_special_context(node):
+ patterns = [p0, p1, p2]
+ for pattern, parent in zip(patterns, attr_chain(node, "parent")):
+ results = {}
+ if pattern.match(parent, results) and results["node"] is node:
+ return True
+ return False
diff --git a/Lib/lib2to3/fixes/fix_funcattrs.py b/Lib/lib2to3/fixes/fix_funcattrs.py
new file mode 100644
index 0000000..a9ba125
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_funcattrs.py
@@ -0,0 +1,19 @@
+"""Fix function attribute names (f.func_x -> f.__x__)."""
+# Author: Collin Winter
+
+# Local imports
+from . import basefix
+from .util import Name
+
+
+class FixFuncattrs(basefix.BaseFix):
+ PATTERN = """
+ power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals'
+ | 'func_name' | 'func_defaults' | 'func_code'
+ | 'func_dict') > any* >
+ """
+
+ def transform(self, node, results):
+ attr = results["attr"][0]
+ attr.replace(Name(("__%s__" % attr.value[5:]),
+ prefix=attr.get_prefix()))
diff --git a/Lib/lib2to3/fixes/fix_future.py b/Lib/lib2to3/fixes/fix_future.py
new file mode 100644
index 0000000..5476df4
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_future.py
@@ -0,0 +1,16 @@
+"""Remove __future__ imports
+
+from __future__ import foo is replaced with an empty line.
+"""
+# Author: Christian Heimes
+
+# Local imports
+from . import basefix
+from .util import BlankLine
+
+class FixFuture(basefix.BaseFix):
+ PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
+
+ def transform(self, node, results):
+ return BlankLine()
+
diff --git a/Lib/lib2to3/fixes/fix_has_key.py b/Lib/lib2to3/fixes/fix_has_key.py
new file mode 100644
index 0000000..ea7bfc5
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_has_key.py
@@ -0,0 +1,109 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for has_key().
+
+Calls to .has_key() methods are expressed in terms of the 'in'
+operator:
+
+ d.has_key(k) -> k in d
+
+CAVEATS:
+1) While the primary target of this fixer is dict.has_key(), the
+ fixer will change any has_key() method call, regardless of its
+ class.
+
+2) Cases like this will not be converted:
+
+ m = d.has_key
+ if m(k):
+ ...
+
+ Only *calls* to has_key() are converted. While it is possible to
+ convert the above to something like
+
+ m = d.__contains__
+ if m(k):
+ ...
+
+ this is currently not done.
+"""
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from . import basefix
+from .util import Name
+
+
+class FixHasKey(basefix.BaseFix):
+
+ PATTERN = """
+ anchor=power<
+ before=any+
+ trailer< '.' 'has_key' >
+ trailer<
+ '('
+ ( not(arglist | argument<any '=' any>) arg=any
+ | arglist<(not argument<any '=' any>) arg=any ','>
+ )
+ ')'
+ >
+ after=any*
+ >
+ |
+ negation=not_test<
+ 'not'
+ anchor=power<
+ before=any+
+ trailer< '.' 'has_key' >
+ trailer<
+ '('
+ ( not(arglist | argument<any '=' any>) arg=any
+ | arglist<(not argument<any '=' any>) arg=any ','>
+ )
+ ')'
+ >
+ >
+ >
+ """
+
+ def transform(self, node, results):
+ assert results
+ syms = self.syms
+ if (node.parent.type == syms.not_test and
+ self.pattern.match(node.parent)):
+ # Don't transform a node matching the first alternative of the
+ # pattern when its parent matches the second alternative
+ return None
+ negation = results.get("negation")
+ anchor = results["anchor"]
+ prefix = node.get_prefix()
+ before = [n.clone() for n in results["before"]]
+ arg = results["arg"].clone()
+ after = results.get("after")
+ if after:
+ after = [n.clone() for n in after]
+ if arg.type in (syms.comparison, syms.not_test, syms.and_test,
+ syms.or_test, syms.test, syms.lambdef, syms.argument):
+ arg = self.parenthesize(arg)
+ if len(before) == 1:
+ before = before[0]
+ else:
+ before = pytree.Node(syms.power, before)
+ before.set_prefix(" ")
+ n_op = Name("in", prefix=" ")
+ if negation:
+ n_not = Name("not", prefix=" ")
+ n_op = pytree.Node(syms.comp_op, (n_not, n_op))
+ new = pytree.Node(syms.comparison, (arg, n_op, before))
+ if after:
+ new = self.parenthesize(new)
+ new = pytree.Node(syms.power, (new,) + tuple(after))
+ if node.parent.type in (syms.comparison, syms.expr, syms.xor_expr,
+ syms.and_expr, syms.shift_expr,
+ syms.arith_expr, syms.term,
+ syms.factor, syms.power):
+ new = self.parenthesize(new)
+ new.set_prefix(prefix)
+ return new
diff --git a/Lib/lib2to3/fixes/fix_idioms.py b/Lib/lib2to3/fixes/fix_idioms.py
new file mode 100644
index 0000000..e2f937b
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_idioms.py
@@ -0,0 +1,134 @@
+"""Adjust some old Python 2 idioms to their modern counterparts.
+
+* Change some type comparisons to isinstance() calls:
+ type(x) == T -> isinstance(x, T)
+ type(x) is T -> isinstance(x, T)
+ type(x) != T -> not isinstance(x, T)
+ type(x) is not T -> not isinstance(x, T)
+
+* Change "while 1:" into "while True:".
+
+* Change both
+
+ v = list(EXPR)
+ v.sort()
+ foo(v)
+
+and the more general
+
+ v = EXPR
+ v.sort()
+ foo(v)
+
+into
+
+ v = sorted(EXPR)
+ foo(v)
+"""
+# Author: Jacques Frechet, Collin Winter
+
+# Local imports
+from . import basefix
+from .util import Call, Comma, Name, Node, syms
+
+CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
+TYPE = "power< 'type' trailer< '(' x=any ')' > >"
+
+class FixIdioms(basefix.BaseFix):
+
+ explicit = True # The user must ask for this fixer
+
+ PATTERN = r"""
+ isinstance=comparison< %s %s T=any >
+ |
+ isinstance=comparison< T=any %s %s >
+ |
+ while_stmt< 'while' while='1' ':' any+ >
+ |
+ sorted=any<
+ any*
+ simple_stmt<
+ expr_stmt< id1=any '='
+ power< list='list' trailer< '(' (not arglist<any+>) any ')' > >
+ >
+ '\n'
+ >
+ sort=
+ simple_stmt<
+ power< id2=any
+ trailer< '.' 'sort' > trailer< '(' ')' >
+ >
+ '\n'
+ >
+ next=any*
+ >
+ |
+ sorted=any<
+ any*
+ simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' >
+ sort=
+ simple_stmt<
+ power< id2=any
+ trailer< '.' 'sort' > trailer< '(' ')' >
+ >
+ '\n'
+ >
+ next=any*
+ >
+ """ % (TYPE, CMP, CMP, TYPE)
+
+ def match(self, node):
+ r = super(FixIdioms, self).match(node)
+ # If we've matched one of the sort/sorted subpatterns above, we
+ # want to reject matches where the initial assignment and the
+ # subsequent .sort() call involve different identifiers.
+ if r and "sorted" in r:
+ if r["id1"] == r["id2"]:
+ return r
+ return None
+ return r
+
+ def transform(self, node, results):
+ if "isinstance" in results:
+ return self.transform_isinstance(node, results)
+ elif "while" in results:
+ return self.transform_while(node, results)
+ elif "sorted" in results:
+ return self.transform_sort(node, results)
+ else:
+ raise RuntimeError("Invalid match")
+
+ def transform_isinstance(self, node, results):
+ x = results["x"].clone() # The thing inside of type()
+ T = results["T"].clone() # The type being compared against
+ x.set_prefix("")
+ T.set_prefix(" ")
+ test = Call(Name("isinstance"), [x, Comma(), T])
+ if "n" in results:
+ test.set_prefix(" ")
+ test = Node(syms.not_test, [Name("not"), test])
+ test.set_prefix(node.get_prefix())
+ return test
+
+ def transform_while(self, node, results):
+ one = results["while"]
+ one.replace(Name("True", prefix=one.get_prefix()))
+
+ def transform_sort(self, node, results):
+ sort_stmt = results["sort"]
+ next_stmt = results["next"]
+ list_call = results.get("list")
+ simple_expr = results.get("expr")
+
+ if list_call:
+ list_call.replace(Name("sorted", prefix=list_call.get_prefix()))
+ elif simple_expr:
+ new = simple_expr.clone()
+ new.set_prefix("")
+ simple_expr.replace(Call(Name("sorted"), [new],
+ prefix=simple_expr.get_prefix()))
+ else:
+ raise RuntimeError("should not have reached here")
+ sort_stmt.remove()
+ if next_stmt:
+ next_stmt[0].set_prefix(sort_stmt.get_prefix())
diff --git a/Lib/lib2to3/fixes/fix_imports.py b/Lib/lib2to3/fixes/fix_imports.py
new file mode 100644
index 0000000..1ad672b
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_imports.py
@@ -0,0 +1,89 @@
+"""Fix incompatible imports and module references.
+
+Fixes:
+ * StringIO -> io
+ * cStringIO -> io
+ * md5 -> hashlib
+"""
+# Author: Collin Winter
+
+# Local imports
+from . import basefix
+from .util import Name, attr_chain, any, set
+import __builtin__
+builtin_names = [name for name in dir(__builtin__)
+ if name not in ("__name__", "__doc__")]
+
+MAPPING = {"StringIO": ("io", ["StringIO"]),
+ "cStringIO": ("io", ["StringIO"]),
+ "__builtin__" : ("builtins", builtin_names),
+ }
+
+
+def alternates(members):
+ return "(" + "|".join(map(repr, members)) + ")"
+
+
+def build_pattern():
+ bare = set()
+ for old_module, (new_module, members) in MAPPING.items():
+ bare.add(old_module)
+ bare.update(members)
+ members = alternates(members)
+ yield """import_name< 'import' (module=%r
+ | dotted_as_names< any* module=%r any* >) >
+ """ % (old_module, old_module)
+ yield """import_from< 'from' module_name=%r 'import'
+ ( %s | import_as_name< %s 'as' any >) >
+ """ % (old_module, members, members)
+ yield """import_from< 'from' module_name=%r 'import' star='*' >
+ """ % old_module
+ yield """import_name< 'import'
+ dotted_as_name< module_name=%r 'as' any > >
+ """ % old_module
+ yield """power< module_name=%r trailer< '.' %s > any* >
+ """ % (old_module, members)
+ yield """bare_name=%s""" % alternates(bare)
+
+
+class FixImports(basefix.BaseFix):
+ PATTERN = "|".join(build_pattern())
+
+ order = "pre" # Pre-order tree traversal
+
+ # Don't match the node if it's within another match
+ def match(self, node):
+ match = super(FixImports, self).match
+ results = match(node)
+ if results:
+ if any([match(obj) for obj in attr_chain(node, "parent")]):
+ return False
+ return results
+ return False
+
+ def start_tree(self, tree, filename):
+ super(FixImports, self).start_tree(tree, filename)
+ self.replace = {}
+
+ def transform(self, node, results):
+ import_mod = results.get("module")
+ mod_name = results.get("module_name")
+ bare_name = results.get("bare_name")
+ star = results.get("star")
+
+ if import_mod or mod_name:
+ new_name, members = MAPPING[(import_mod or mod_name).value]
+
+ if import_mod:
+ self.replace[import_mod.value] = new_name
+ import_mod.replace(Name(new_name, prefix=import_mod.get_prefix()))
+ elif mod_name:
+ if star:
+ self.cannot_convert(node, "Cannot handle star imports.")
+ else:
+ mod_name.replace(Name(new_name, prefix=mod_name.get_prefix()))
+ elif bare_name:
+ bare_name = bare_name[0]
+ new_name = self.replace.get(bare_name.value)
+ if new_name:
+ bare_name.replace(Name(new_name, prefix=bare_name.get_prefix()))
diff --git a/Lib/lib2to3/fixes/fix_input.py b/Lib/lib2to3/fixes/fix_input.py
new file mode 100644
index 0000000..5b88f3a
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_input.py
@@ -0,0 +1,26 @@
+"""Fixer that changes input(...) into eval(input(...))."""
+# Author: Andre Roberge
+
+# Local imports
+from . import basefix
+from .util import Call, Name
+from .. import patcomp
+
+
+context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >")
+
+
+class FixInput(basefix.BaseFix):
+
+ PATTERN = """
+ power< 'input' args=trailer< '(' [any] ')' > >
+ """
+
+ def transform(self, node, results):
+ # If we're already wrapped in a eval() call, we're done.
+ if context.match(node.parent.parent):
+ return
+
+ new = node.clone()
+ new.set_prefix("")
+ return Call(Name("eval"), [new], prefix=node.get_prefix())
diff --git a/Lib/lib2to3/fixes/fix_intern.py b/Lib/lib2to3/fixes/fix_intern.py
new file mode 100644
index 0000000..6d33f8c
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_intern.py
@@ -0,0 +1,43 @@
+# Copyright 2006 Georg Brandl.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for intern().
+
+intern(s) -> sys.intern(s)"""
+
+# Local imports
+from .. import pytree
+from . import basefix
+from .util import Name, Attr
+
+
+class FixIntern(basefix.BaseFix):
+
+ PATTERN = """
+ power< 'intern'
+ trailer< lpar='('
+ ( not(arglist | argument<any '=' any>) obj=any
+ | obj=arglist<(not argument<any '=' any>) any ','> )
+ rpar=')' >
+ after=any*
+ >
+ """
+
+ def transform(self, node, results):
+ syms = self.syms
+ obj = results["obj"].clone()
+ if obj.type == syms.arglist:
+ newarglist = obj.clone()
+ else:
+ newarglist = pytree.Node(syms.arglist, [obj.clone()])
+ after = results["after"]
+ if after:
+ after = [n.clone() for n in after]
+ new = pytree.Node(syms.power,
+ Attr(Name("sys"), Name("intern")) +
+ [pytree.Node(syms.trailer,
+ [results["lpar"].clone(),
+ newarglist,
+ results["rpar"].clone()])] + after)
+ new.set_prefix(node.get_prefix())
+ return new
diff --git a/Lib/lib2to3/fixes/fix_itertools.py b/Lib/lib2to3/fixes/fix_itertools.py
new file mode 100644
index 0000000..af49270
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_itertools.py
@@ -0,0 +1,36 @@
+""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and
+ itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363)
+
+ If itertools is imported as something else (ie: import itertools as it;
+ it.izip(spam, eggs)) method calls will not get fixed.
+ """
+
+# Local imports
+from . import basefix
+from .util import Name
+
+class FixItertools(basefix.BaseFix):
+ it_funcs = "('imap'|'ifilter'|'izip'|'ifilterfalse')"
+ PATTERN = """
+ power< it='itertools'
+ trailer<
+ dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > >
+ |
+ power< func=%(it_funcs)s trailer< '(' [any] ')' > >
+ """ %(locals())
+
+ def transform(self, node, results):
+ prefix = None
+ func = results['func'][0]
+ if 'it' in results and func.value != 'ifilterfalse':
+ dot, it = (results['dot'], results['it'])
+ # Remove the 'itertools'
+ prefix = it.get_prefix()
+ it.remove()
+ # Replace the node wich contains ('.', 'function') with the
+ # function (to be consistant with the second part of the pattern)
+ dot.remove()
+ func.parent.replace(func)
+
+ prefix = prefix or func.get_prefix()
+ func.replace(Name(func.value[1:], prefix=prefix))
diff --git a/Lib/lib2to3/fixes/fix_long.py b/Lib/lib2to3/fixes/fix_long.py
new file mode 100644
index 0000000..1987e96
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_long.py
@@ -0,0 +1,35 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that turns 'long' into 'int' everywhere.
+
+This also strips the trailing 'L' or 'l' from long loterals.
+"""
+
+# Local imports
+from .. import pytree
+from . import basefix
+from .util import Name, Number
+
+
+class FixLong(basefix.BaseFix):
+
+ PATTERN = """
+ (long_type = 'long' | number = NUMBER)
+ """
+
+ static_long = Name("long")
+ static_int = Name("int")
+
+ def transform(self, node, results):
+ long_type = results.get("long_type")
+ number = results.get("number")
+ new = None
+ if long_type:
+ assert node == self.static_long, node
+ new = self.static_int.clone()
+ if number and node.value[-1] in ("l", "L"):
+ new = Number(node.value[:-1])
+ if new is not None:
+ new.set_prefix(node.get_prefix())
+ return new
diff --git a/Lib/lib2to3/fixes/fix_map.py b/Lib/lib2to3/fixes/fix_map.py
new file mode 100644
index 0000000..b5dcaa2
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_map.py
@@ -0,0 +1,126 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there
+exists a 'from future_builtins import map' statement in the top-level
+namespace.
+
+As a special case, map(None, X) is changed into list(X). (This is
+necessary because the semantics are changed in this case -- the new
+map(None, X) is equivalent to [(x,) for x in X].)
+
+We avoid the transformation (except for the special case mentioned
+above) if the map() call is directly contained in iter(<>), list(<>),
+tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
+
+NOTE: This is still not correct if the original code was depending on
+map(F, X, Y, ...) to go on until the longest argument is exhausted,
+substituting None for missing values -- like zip(), it now stops as
+soon as the shortest argument is exhausted.
+"""
+
+# Local imports
+from .. import pytree
+from .. import patcomp
+from ..pgen2 import token
+from . import basefix
+from .util import Name, Call, ListComp, attr_chain, does_tree_import
+from ..pygram import python_symbols as syms
+
+class FixMap(basefix.BaseFix):
+
+ PATTERN = """
+ map_none=power<
+ 'map'
+ trailer< '(' arglist< 'None' ',' arg=any [','] > ')' >
+ >
+ |
+ map_lambda=power<
+ 'map'
+ trailer<
+ '('
+ arglist<
+ lambdef< 'lambda'
+ (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
+ >
+ ','
+ it=any
+ >
+ ')'
+ >
+ >
+ |
+ power<
+ 'map'
+ args=trailer< '(' [any] ')' >
+ >
+ """
+
+ def start_tree(self, *args):
+ super(FixMap, self).start_tree(*args)
+ self._future_map_found = None
+
+ def has_future_map(self, node):
+ if self._future_map_found is not None:
+ return self._future_map_found
+ self._future_map_found = does_tree_import('future_builtins', 'map', node)
+ return self._future_map_found
+
+ def transform(self, node, results):
+ if self.has_future_map(node):
+ # If a future map has been imported for this file, we won't
+ # be making any modifications
+ return
+
+ if node.parent.type == syms.simple_stmt:
+ self.warning(node, "You should use a for loop here")
+ new = node.clone()
+ new.set_prefix("")
+ new = Call(Name("list"), [new])
+ elif "map_lambda" in results:
+ new = ListComp(results.get("xp").clone(),
+ results.get("fp").clone(),
+ results.get("it").clone())
+ else:
+ if "map_none" in results:
+ new = results["arg"].clone()
+ else:
+ if in_special_context(node):
+ return None
+ new = node.clone()
+ new.set_prefix("")
+ new = Call(Name("list"), [new])
+ new.set_prefix(node.get_prefix())
+ return new
+
+P0 = """for_stmt< 'for' any 'in' node=any ':' any* >
+ | comp_for< 'for' any 'in' node=any any* >
+ """
+p0 = patcomp.compile_pattern(P0)
+
+P1 = """
+power<
+ ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' |
+ 'any' | 'all' | (any* trailer< '.' 'join' >) )
+ trailer< '(' node=any ')' >
+ any*
+>
+"""
+p1 = patcomp.compile_pattern(P1)
+
+P2 = """
+power<
+ 'sorted'
+ trailer< '(' arglist<node=any any*> ')' >
+ any*
+>
+"""
+p2 = patcomp.compile_pattern(P2)
+
+def in_special_context(node):
+ patterns = [p0, p1, p2]
+ for pattern, parent in zip(patterns, attr_chain(node, "parent")):
+ results = {}
+ if pattern.match(parent, results) and results["node"] is node:
+ return True
+ return False
diff --git a/Lib/lib2to3/fixes/fix_methodattrs.py b/Lib/lib2to3/fixes/fix_methodattrs.py
new file mode 100644
index 0000000..3d0d7d7
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_methodattrs.py
@@ -0,0 +1,23 @@
+"""Fix bound method attributes (method.im_? -> method.__?__).
+"""
+# Author: Christian Heimes
+
+# Local imports
+from . import basefix
+from .util import Name
+
+MAP = {
+ "im_func" : "__func__",
+ "im_self" : "__self__",
+ "im_class" : "__self__.__class__"
+ }
+
+class FixMethodattrs(basefix.BaseFix):
+ PATTERN = """
+ power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
+ """
+
+ def transform(self, node, results):
+ attr = results["attr"][0]
+ new = MAP[attr.value]
+ attr.replace(Name(new, prefix=attr.get_prefix()))
diff --git a/Lib/lib2to3/fixes/fix_ne.py b/Lib/lib2to3/fixes/fix_ne.py
new file mode 100644
index 0000000..ecd18e0
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_ne.py
@@ -0,0 +1,22 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that turns <> into !=."""
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from . import basefix
+
+
+class FixNe(basefix.BaseFix):
+ # This is so simple that we don't need the pattern compiler.
+
+ def match(self, node):
+ # Override
+ return node.type == token.NOTEQUAL and node.value == "<>"
+
+ def transform(self, node, results):
+ new = pytree.Leaf(token.NOTEQUAL, "!=")
+ new.set_prefix(node.get_prefix())
+ return new
diff --git a/Lib/lib2to3/fixes/fix_next.py b/Lib/lib2to3/fixes/fix_next.py
new file mode 100644
index 0000000..adbc962
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_next.py
@@ -0,0 +1,104 @@
+"""Fixer for it.next() -> next(it), per PEP 3114."""
+# Author: Collin Winter
+
+# Things that currently aren't covered:
+# - listcomp "next" names aren't warned
+# - "with" statement targets aren't checked
+
+# Local imports
+from ..pgen2 import token
+from ..pygram import python_symbols as syms
+from . import basefix
+from .util import Name, Call, find_binding, any
+
+bind_warning = "Calls to builtin next() possibly shadowed by global binding"
+
+
+class FixNext(basefix.BaseFix):
+ PATTERN = """
+ power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
+ |
+ power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > >
+ |
+ classdef< 'class' any+ ':'
+ suite< any*
+ funcdef< 'def'
+ name='next'
+ parameters< '(' NAME ')' > any+ >
+ any* > >
+ |
+ global=global_stmt< 'global' any* 'next' any* >
+ |
+ mod=file_input< any+ >
+ """
+
+ order = "pre" # Pre-order tree traversal
+
+ def start_tree(self, tree, filename):
+ super(FixNext, self).start_tree(tree, filename)
+ self.shadowed_next = False
+
+ def transform(self, node, results):
+ assert results
+
+ base = results.get("base")
+ attr = results.get("attr")
+ name = results.get("name")
+ mod = results.get("mod")
+
+ if base:
+ if self.shadowed_next:
+ attr.replace(Name("__next__", prefix=attr.get_prefix()))
+ else:
+ base = [n.clone() for n in base]
+ base[0].set_prefix("")
+ node.replace(Call(Name("next", prefix=node.get_prefix()), base))
+ elif name:
+ n = Name("__next__", prefix=name.get_prefix())
+ name.replace(n)
+ elif attr:
+ # We don't do this transformation if we're assigning to "x.next".
+ # Unfortunately, it doesn't seem possible to do this in PATTERN,
+ # so it's being done here.
+ if is_assign_target(node):
+ head = results["head"]
+ if "".join([str(n) for n in head]).strip() == '__builtin__':
+ self.warning(node, bind_warning)
+ return
+ attr.replace(Name("__next__"))
+ elif "global" in results:
+ self.warning(node, bind_warning)
+ self.shadowed_next = True
+ elif mod:
+ n = find_binding('next', mod)
+ if n:
+ self.warning(n, bind_warning)
+ self.shadowed_next = True
+
+
+### The following functions help test if node is part of an assignment
+### target.
+
+def is_assign_target(node):
+ assign = find_assign(node)
+ if assign is None:
+ return False
+
+ for child in assign.children:
+ if child.type == token.EQUAL:
+ return False
+ elif is_subtree(child, node):
+ return True
+ return False
+
+def find_assign(node):
+ if node.type == syms.expr_stmt:
+ return node
+ if node.type == syms.simple_stmt or node.parent is None:
+ return None
+ return find_assign(node.parent)
+
+def is_subtree(root, node):
+ if root == node:
+ return True
+ return any([is_subtree(c, node) for c in root.children])
diff --git a/Lib/lib2to3/fixes/fix_nonzero.py b/Lib/lib2to3/fixes/fix_nonzero.py
new file mode 100644
index 0000000..4cf6875
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_nonzero.py
@@ -0,0 +1,20 @@
+"""Fixer for __nonzero__ -> __bool__ methods."""
+# Author: Collin Winter
+
+# Local imports
+from .import basefix
+from .util import Name, syms
+
+class FixNonzero(basefix.BaseFix):
+ PATTERN = """
+ classdef< 'class' any+ ':'
+ suite< any*
+ funcdef< 'def' name='__nonzero__'
+ parameters< '(' NAME ')' > any+ >
+ any* > >
+ """
+
+ def transform(self, node, results):
+ name = results["name"]
+ new = Name("__bool__", prefix=name.get_prefix())
+ name.replace(new)
diff --git a/Lib/lib2to3/fixes/fix_numliterals.py b/Lib/lib2to3/fixes/fix_numliterals.py
new file mode 100644
index 0000000..f88be60
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_numliterals.py
@@ -0,0 +1,27 @@
+"""Fixer that turns 1L into 1, 0755 into 0o755.
+"""
+# Copyright 2007 Georg Brandl.
+# Licensed to PSF under a Contributor Agreement.
+
+# Local imports
+from ..pgen2 import token
+from .import basefix
+from .util import Number, set
+
+
+class FixNumliterals(basefix.BaseFix):
+ # This is so simple that we don't need the pattern compiler.
+
+ def match(self, node):
+ # Override
+ return (node.type == token.NUMBER and
+ (node.value.startswith("0") or node.value[-1] in "Ll"))
+
+ def transform(self, node, results):
+ val = node.value
+ if val[-1] in 'Ll':
+ val = val[:-1]
+ elif val.startswith('0') and val.isdigit() and len(set(val)) > 1:
+ val = "0o" + val[1:]
+
+ return Number(val, prefix=node.get_prefix())
diff --git a/Lib/lib2to3/fixes/fix_print.py b/Lib/lib2to3/fixes/fix_print.py
new file mode 100644
index 0000000..aa5c60a
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_print.py
@@ -0,0 +1,81 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for print.
+
+Change:
+ 'print' into 'print()'
+ 'print ...' into 'print(...)'
+ 'print ... ,' into 'print(..., end=" ")'
+ 'print >>x, ...' into 'print(..., file=x)'
+"""
+
+# Local imports
+from .. import patcomp
+from .. import pytree
+from ..pgen2 import token
+from .import basefix
+from .util import Name, Call, Comma, String, is_tuple
+
+
+parend_expr = patcomp.compile_pattern(
+ """atom< '(' [atom|STRING|NAME] ')' >"""
+ )
+
+
+class FixPrint(basefix.BaseFix):
+
+ PATTERN = """
+ simple_stmt< bare='print' any > | print_stmt
+ """
+
+ def transform(self, node, results):
+ assert results
+ bare_print = results.get("bare")
+
+ if bare_print:
+ # Special-case print all by itself
+ bare_print.replace(Call(Name("print"), [],
+ prefix=bare_print.get_prefix()))
+ return
+ assert node.children[0] == Name("print")
+ args = node.children[1:]
+ if len(args) == 1 and parend_expr.match(args[0]):
+ # We don't want to keep sticking parens around an
+ # already-parenthesised expression.
+ return
+
+ sep = end = file = None
+ if args and args[-1] == Comma():
+ args = args[:-1]
+ end = " "
+ if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, ">>"):
+ assert len(args) >= 2
+ file = args[1].clone()
+ args = args[3:] # Strip a possible comma after the file expression
+ # Now synthesize a print(args, sep=..., end=..., file=...) node.
+ l_args = [arg.clone() for arg in args]
+ if l_args:
+ l_args[0].set_prefix("")
+ if sep is not None or end is not None or file is not None:
+ if sep is not None:
+ self.add_kwarg(l_args, "sep", String(repr(sep)))
+ if end is not None:
+ self.add_kwarg(l_args, "end", String(repr(end)))
+ if file is not None:
+ self.add_kwarg(l_args, "file", file)
+ n_stmt = Call(Name("print"), l_args)
+ n_stmt.set_prefix(node.get_prefix())
+ return n_stmt
+
+ def add_kwarg(self, l_nodes, s_kwd, n_expr):
+ # XXX All this prefix-setting may lose comments (though rarely)
+ n_expr.set_prefix("")
+ n_argument = pytree.Node(self.syms.argument,
+ (Name(s_kwd),
+ pytree.Leaf(token.EQUAL, "="),
+ n_expr))
+ if l_nodes:
+ l_nodes.append(Comma())
+ n_argument.set_prefix(" ")
+ l_nodes.append(n_argument)
diff --git a/Lib/lib2to3/fixes/fix_raise.py b/Lib/lib2to3/fixes/fix_raise.py
new file mode 100644
index 0000000..2e9ffbc
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_raise.py
@@ -0,0 +1,82 @@
+"""Fixer for 'raise E, V, T'
+
+raise -> raise
+raise E -> raise E
+raise E, V -> raise E(V)
+raise E, V, T -> raise E(V).with_traceback(T)
+
+raise (((E, E'), E''), E'''), V -> raise E(V)
+raise "foo", V, T -> warns about string exceptions
+
+
+CAVEATS:
+1) "raise E, V" will be incorrectly translated if V is an exception
+ instance. The correct Python 3 idiom is
+
+ raise E from V
+
+ but since we can't detect instance-hood by syntax alone and since
+ any client code would have to be changed as well, we don't automate
+ this.
+"""
+# Author: Collin Winter
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from .import basefix
+from .util import Name, Call, Attr, ArgList, is_tuple
+
+class FixRaise(basefix.BaseFix):
+
+ PATTERN = """
+ raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
+ """
+
+ def transform(self, node, results):
+ syms = self.syms
+
+ exc = results["exc"].clone()
+ if exc.type is token.STRING:
+ self.cannot_convert(node, "Python 3 does not support string exceptions")
+ return
+
+ # Python 2 supports
+ # raise ((((E1, E2), E3), E4), E5), V
+ # as a synonym for
+ # raise E1, V
+ # Since Python 3 will not support this, we recurse down any tuple
+ # literals, always taking the first element.
+ if is_tuple(exc):
+ while is_tuple(exc):
+ # exc.children[1:-1] is the unparenthesized tuple
+ # exc.children[1].children[0] is the first element of the tuple
+ exc = exc.children[1].children[0].clone()
+ exc.set_prefix(" ")
+
+ if "val" not in results:
+ # One-argument raise
+ new = pytree.Node(syms.raise_stmt, [Name("raise"), exc])
+ new.set_prefix(node.get_prefix())
+ return new
+
+ val = results["val"].clone()
+ if is_tuple(val):
+ args = [c.clone() for c in val.children[1:-1]]
+ else:
+ val.set_prefix("")
+ args = [val]
+
+ if "tb" in results:
+ tb = results["tb"].clone()
+ tb.set_prefix("")
+
+ e = Call(exc, args)
+ with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])]
+ new = pytree.Node(syms.simple_stmt, [Name("raise")] + with_tb)
+ new.set_prefix(node.get_prefix())
+ return new
+ else:
+ return pytree.Node(syms.raise_stmt,
+ [Name("raise"), Call(exc, args)],
+ prefix=node.get_prefix())
diff --git a/Lib/lib2to3/fixes/fix_raw_input.py b/Lib/lib2to3/fixes/fix_raw_input.py
new file mode 100644
index 0000000..e746255
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_raw_input.py
@@ -0,0 +1,16 @@
+"""Fixer that changes raw_input(...) into input(...)."""
+# Author: Andre Roberge
+
+# Local imports
+from .import basefix
+from .util import Name
+
+class FixRawInput(basefix.BaseFix):
+
+ PATTERN = """
+ power< name='raw_input' trailer< '(' [any] ')' > >
+ """
+
+ def transform(self, node, results):
+ name = results["name"]
+ name.replace(Name("input", prefix=name.get_prefix()))
diff --git a/Lib/lib2to3/fixes/fix_renames.py b/Lib/lib2to3/fixes/fix_renames.py
new file mode 100644
index 0000000..336654a
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_renames.py
@@ -0,0 +1,70 @@
+"""Fix incompatible renames
+
+Fixes:
+ * sys.maxint -> sys.maxsize
+"""
+# Author: Christian Heimes
+# based on Collin Winter's fix_import
+
+# Local imports
+from .import basefix
+from .util import Name, attr_chain, any, set
+
+MAPPING = {"sys": {"maxint" : "maxsize"},
+ }
+LOOKUP = {}
+
+def alternates(members):
+ return "(" + "|".join(map(repr, members)) + ")"
+
+
+def build_pattern():
+ #bare = set()
+ for module, replace in MAPPING.items():
+ for old_attr, new_attr in replace.items():
+ LOOKUP[(module, old_attr)] = new_attr
+ #bare.add(module)
+ #bare.add(old_attr)
+ #yield """
+ # import_name< 'import' (module=%r
+ # | dotted_as_names< any* module=%r any* >) >
+ # """ % (module, module)
+ yield """
+ import_from< 'from' module_name=%r 'import'
+ ( attr_name=%r | import_as_name< attr_name=%r 'as' any >) >
+ """ % (module, old_attr, old_attr)
+ yield """
+ power< module_name=%r trailer< '.' attr_name=%r > any* >
+ """ % (module, old_attr)
+ #yield """bare_name=%s""" % alternates(bare)
+
+
+class FixRenames(basefix.BaseFix):
+ PATTERN = "|".join(build_pattern())
+
+ order = "pre" # Pre-order tree traversal
+
+ # Don't match the node if it's within another match
+ def match(self, node):
+ match = super(FixRenames, self).match
+ results = match(node)
+ if results:
+ if any([match(obj) for obj in attr_chain(node, "parent")]):
+ return False
+ return results
+ return False
+
+ #def start_tree(self, tree, filename):
+ # super(FixRenames, self).start_tree(tree, filename)
+ # self.replace = {}
+
+ def transform(self, node, results):
+ mod_name = results.get("module_name")
+ attr_name = results.get("attr_name")
+ #bare_name = results.get("bare_name")
+ #import_mod = results.get("module")
+
+ if mod_name and attr_name:
+ new_attr = LOOKUP[(mod_name.value, attr_name.value)]
+ attr_name.replace(Name(new_attr, prefix=attr_name.get_prefix()))
+
diff --git a/Lib/lib2to3/fixes/fix_repr.py b/Lib/lib2to3/fixes/fix_repr.py
new file mode 100644
index 0000000..9917ad5
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_repr.py
@@ -0,0 +1,22 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that transforms `xyzzy` into repr(xyzzy)."""
+
+# Local imports
+from .import basefix
+from .util import Call, Name
+
+
+class FixRepr(basefix.BaseFix):
+
+ PATTERN = """
+ atom < '`' expr=any '`' >
+ """
+
+ def transform(self, node, results):
+ expr = results["expr"].clone()
+
+ if expr.type == self.syms.testlist1:
+ expr = self.parenthesize(expr)
+ return Call(Name("repr"), [expr], prefix=node.get_prefix())
diff --git a/Lib/lib2to3/fixes/fix_standarderror.py b/Lib/lib2to3/fixes/fix_standarderror.py
new file mode 100644
index 0000000..61789d0
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_standarderror.py
@@ -0,0 +1,18 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for StandardError -> Exception."""
+
+# Local imports
+from .import basefix
+from .util import Name
+
+
+class FixStandarderror(basefix.BaseFix):
+
+ PATTERN = """
+ 'StandardError'
+ """
+
+ def transform(self, node, results):
+ return Name("Exception", prefix=node.get_prefix())
diff --git a/Lib/lib2to3/fixes/fix_throw.py b/Lib/lib2to3/fixes/fix_throw.py
new file mode 100644
index 0000000..ef120bd
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_throw.py
@@ -0,0 +1,56 @@
+"""Fixer for generator.throw(E, V, T).
+
+g.throw(E) -> g.throw(E)
+g.throw(E, V) -> g.throw(E(V))
+g.throw(E, V, T) -> g.throw(E(V).with_traceback(T))
+
+g.throw("foo"[, V[, T]]) will warn about string exceptions."""
+# Author: Collin Winter
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from .import basefix
+from .util import Name, Call, ArgList, Attr, is_tuple
+
+class FixThrow(basefix.BaseFix):
+
+ PATTERN = """
+ power< any trailer< '.' 'throw' >
+ trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' >
+ >
+ |
+ power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > >
+ """
+
+ def transform(self, node, results):
+ syms = self.syms
+
+ exc = results["exc"].clone()
+ if exc.type is token.STRING:
+ self.cannot_convert(node, "Python 3 does not support string exceptions")
+ return
+
+ # Leave "g.throw(E)" alone
+ val = results.get("val")
+ if val is None:
+ return
+
+ val = val.clone()
+ if is_tuple(val):
+ args = [c.clone() for c in val.children[1:-1]]
+ else:
+ val.set_prefix("")
+ args = [val]
+
+ throw_args = results["args"]
+
+ if "tb" in results:
+ tb = results["tb"].clone()
+ tb.set_prefix("")
+
+ e = Call(exc, args)
+ with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])]
+ throw_args.replace(pytree.Node(syms.power, with_tb))
+ else:
+ throw_args.replace(Call(exc, args))
diff --git a/Lib/lib2to3/fixes/fix_tuple_params.py b/Lib/lib2to3/fixes/fix_tuple_params.py
new file mode 100644
index 0000000..199f6e0
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_tuple_params.py
@@ -0,0 +1,169 @@
+"""Fixer for function definitions with tuple parameters.
+
+def func(((a, b), c), d):
+ ...
+
+ ->
+
+def func(x, d):
+ ((a, b), c) = x
+ ...
+
+It will also support lambdas:
+
+ lambda (x, y): x + y -> lambda t: t[0] + t[1]
+
+ # The parens are a syntax error in Python 3
+ lambda (x): x + y -> lambda x: x + y
+"""
+# Author: Collin Winter
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from .import basefix
+from .util import Assign, Name, Newline, Number, Subscript, syms
+
+def is_docstring(stmt):
+ return isinstance(stmt, pytree.Node) and \
+ stmt.children[0].type == token.STRING
+
+class FixTupleParams(basefix.BaseFix):
+ PATTERN = """
+ funcdef< 'def' any parameters< '(' args=any ')' >
+ ['->' any] ':' suite=any+ >
+ |
+ lambda=
+ lambdef< 'lambda' args=vfpdef< '(' inner=any ')' >
+ ':' body=any
+ >
+ """
+
+ def transform(self, node, results):
+ if "lambda" in results:
+ return self.transform_lambda(node, results)
+
+ new_lines = []
+ suite = results["suite"]
+ args = results["args"]
+ # This crap is so "def foo(...): x = 5; y = 7" is handled correctly.
+ # TODO(cwinter): suite-cleanup
+ if suite[0].children[1].type == token.INDENT:
+ start = 2
+ indent = suite[0].children[1].value
+ end = Newline()
+ else:
+ start = 0
+ indent = "; "
+ end = pytree.Leaf(token.INDENT, "")
+
+ # We need access to self for new_name(), and making this a method
+ # doesn't feel right. Closing over self and new_lines makes the
+ # code below cleaner.
+ def handle_tuple(tuple_arg, add_prefix=False):
+ n = Name(self.new_name())
+ arg = tuple_arg.clone()
+ arg.set_prefix("")
+ stmt = Assign(arg, n.clone())
+ if add_prefix:
+ n.set_prefix(" ")
+ tuple_arg.replace(n)
+ new_lines.append(pytree.Node(syms.simple_stmt,
+ [stmt, end.clone()]))
+
+ if args.type == syms.tfpdef:
+ handle_tuple(args)
+ elif args.type == syms.typedargslist:
+ for i, arg in enumerate(args.children):
+ if arg.type == syms.tfpdef:
+ # Without add_prefix, the emitted code is correct,
+ # just ugly.
+ handle_tuple(arg, add_prefix=(i > 0))
+
+ if not new_lines:
+ return node
+
+ # This isn't strictly necessary, but it plays nicely with other fixers.
+ # TODO(cwinter) get rid of this when children becomes a smart list
+ for line in new_lines:
+ line.parent = suite[0]
+
+ # TODO(cwinter) suite-cleanup
+ after = start
+ if start == 0:
+ new_lines[0].set_prefix(" ")
+ elif is_docstring(suite[0].children[start]):
+ new_lines[0].set_prefix(indent)
+ after = start + 1
+
+ suite[0].children[after:after] = new_lines
+ for i in range(after+1, after+len(new_lines)+1):
+ suite[0].children[i].set_prefix(indent)
+ suite[0].changed()
+
+ def transform_lambda(self, node, results):
+ args = results["args"]
+ body = results["body"]
+ inner = simplify_args(results["inner"])
+
+ # Replace lambda ((((x)))): x with lambda x: x
+ if inner.type == token.NAME:
+ inner = inner.clone()
+ inner.set_prefix(" ")
+ args.replace(inner)
+ return
+
+ params = find_params(args)
+ to_index = map_to_index(params)
+ tup_name = self.new_name(tuple_name(params))
+
+ new_param = Name(tup_name, prefix=" ")
+ args.replace(new_param.clone())
+ for n in body.post_order():
+ if n.type == token.NAME and n.value in to_index:
+ subscripts = [c.clone() for c in to_index[n.value]]
+ new = pytree.Node(syms.power,
+ [new_param.clone()] + subscripts)
+ new.set_prefix(n.get_prefix())
+ n.replace(new)
+
+
+### Helper functions for transform_lambda()
+
+def simplify_args(node):
+ if node.type in (syms.vfplist, token.NAME):
+ return node
+ elif node.type == syms.vfpdef:
+ # These look like vfpdef< '(' x ')' > where x is NAME
+ # or another vfpdef instance (leading to recursion).
+ while node.type == syms.vfpdef:
+ node = node.children[1]
+ return node
+ raise RuntimeError("Received unexpected node %s" % node)
+
+def find_params(node):
+ if node.type == syms.vfpdef:
+ return find_params(node.children[1])
+ elif node.type == token.NAME:
+ return node.value
+ return [find_params(c) for c in node.children if c.type != token.COMMA]
+
+def map_to_index(param_list, prefix=[], d=None):
+ if d is None:
+ d = {}
+ for i, obj in enumerate(param_list):
+ trailer = [Subscript(Number(i))]
+ if isinstance(obj, list):
+ map_to_index(obj, trailer, d=d)
+ else:
+ d[obj] = prefix + trailer
+ return d
+
+def tuple_name(param_list):
+ l = []
+ for obj in param_list:
+ if isinstance(obj, list):
+ l.append(tuple_name(obj))
+ else:
+ l.append(obj)
+ return "_".join(l)
diff --git a/Lib/lib2to3/fixes/fix_types.py b/Lib/lib2to3/fixes/fix_types.py
new file mode 100644
index 0000000..fe7880a
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_types.py
@@ -0,0 +1,62 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for removing uses of the types module.
+
+These work for only the known names in the types module. The forms above
+can include types. or not. ie, It is assumed the module is imported either as:
+
+ import types
+ from types import ... # either * or specific types
+
+The import statements are not modified.
+
+There should be another fixer that handles at least the following constants:
+
+ type([]) -> list
+ type(()) -> tuple
+ type('') -> str
+
+"""
+
+# Local imports
+from ..pgen2 import token
+from .import basefix
+from .util import Name
+
+_TYPE_MAPPING = {
+ 'BooleanType' : 'bool',
+ 'BufferType' : 'memoryview',
+ 'ClassType' : 'type',
+ 'ComplexType' : 'complex',
+ 'DictType': 'dict',
+ 'DictionaryType' : 'dict',
+ 'EllipsisType' : 'type(Ellipsis)',
+ #'FileType' : 'io.IOBase',
+ 'FloatType': 'float',
+ 'IntType': 'int',
+ 'ListType': 'list',
+ 'LongType': 'int',
+ 'ObjectType' : 'object',
+ 'NoneType': 'type(None)',
+ 'NotImplementedType' : 'type(NotImplemented)',
+ 'SliceType' : 'slice',
+ 'StringType': 'bytes', # XXX ?
+ 'StringTypes' : 'str', # XXX ?
+ 'TupleType': 'tuple',
+ 'TypeType' : 'type',
+ 'UnicodeType': 'str',
+ 'XRangeType' : 'range',
+ }
+
+_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING]
+
+class FixTypes(basefix.BaseFix):
+
+ PATTERN = '|'.join(_pats)
+
+ def transform(self, node, results):
+ new_value = _TYPE_MAPPING.get(results["name"].value)
+ if new_value:
+ return Name(new_value, prefix=node.get_prefix())
+ return None
diff --git a/Lib/lib2to3/fixes/fix_unicode.py b/Lib/lib2to3/fixes/fix_unicode.py
new file mode 100644
index 0000000..380f241
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_unicode.py
@@ -0,0 +1,28 @@
+"""Fixer that changes unicode to str, unichr to chr, and u"..." into "...".
+
+"""
+
+import re
+from ..pgen2 import token
+from .import basefix
+
+class FixUnicode(basefix.BaseFix):
+
+ PATTERN = "STRING | NAME<'unicode' | 'unichr'>"
+
+ def transform(self, node, results):
+ if node.type == token.NAME:
+ if node.value == "unicode":
+ new = node.clone()
+ new.value = "str"
+ return new
+ if node.value == "unichr":
+ new = node.clone()
+ new.value = "chr"
+ return new
+ # XXX Warn when __unicode__ found?
+ elif node.type == token.STRING:
+ if re.match(r"[uU][rR]?[\'\"]", node.value):
+ new = node.clone()
+ new.value = new.value[1:]
+ return new
diff --git a/Lib/lib2to3/fixes/fix_ws_comma.py b/Lib/lib2to3/fixes/fix_ws_comma.py
new file mode 100644
index 0000000..2ba1b49
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_ws_comma.py
@@ -0,0 +1,39 @@
+"""Fixer that changes 'a ,b' into 'a, b'.
+
+This also changes '{a :b}' into '{a: b}', but does not touch other
+uses of colons. It does not touch other uses of whitespace.
+
+"""
+
+from .. import pytree
+from ..pgen2 import token
+from .import basefix
+
+class FixWsComma(basefix.BaseFix):
+
+ explicit = True # The user must ask for this fixers
+
+ PATTERN = """
+ any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]>
+ """
+
+ COMMA = pytree.Leaf(token.COMMA, ",")
+ COLON = pytree.Leaf(token.COLON, ":")
+ SEPS = (COMMA, COLON)
+
+ def transform(self, node, results):
+ new = node.clone()
+ comma = False
+ for child in new.children:
+ if child in self.SEPS:
+ prefix = child.get_prefix()
+ if prefix.isspace() and "\n" not in prefix:
+ child.set_prefix("")
+ comma = True
+ else:
+ if comma:
+ prefix = child.get_prefix()
+ if not prefix:
+ child.set_prefix(" ")
+ comma = False
+ return new
diff --git a/Lib/lib2to3/fixes/fix_xrange.py b/Lib/lib2to3/fixes/fix_xrange.py
new file mode 100644
index 0000000..410e601
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_xrange.py
@@ -0,0 +1,18 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that changes xrange(...) into range(...)."""
+
+# Local imports
+from .import basefix
+from .util import Name
+
+class FixXrange(basefix.BaseFix):
+
+ PATTERN = """
+ power< name='xrange' trailer< '(' [any] ')' > >
+ """
+
+ def transform(self, node, results):
+ name = results["name"]
+ name.replace(Name("range", prefix=name.get_prefix()))
diff --git a/Lib/lib2to3/fixes/fix_xreadlines.py b/Lib/lib2to3/fixes/fix_xreadlines.py
new file mode 100644
index 0000000..8857759
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_xreadlines.py
@@ -0,0 +1,24 @@
+"""Fix "for x in f.xreadlines()" -> "for x in f".
+
+This fixer will also convert g(f.xreadlines) into g(f.__iter__)."""
+# Author: Collin Winter
+
+# Local imports
+from .import basefix
+from .util import Name
+
+
+class FixXreadlines(basefix.BaseFix):
+ PATTERN = """
+ power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
+ |
+ power< any+ trailer< '.' no_call='xreadlines' > >
+ """
+
+ def transform(self, node, results):
+ no_call = results.get("no_call")
+
+ if no_call:
+ no_call.replace(Name("__iter__", prefix=no_call.get_prefix()))
+ else:
+ node.replace([x.clone() for x in results["call"]])
diff --git a/Lib/lib2to3/fixes/util.py b/Lib/lib2to3/fixes/util.py
new file mode 100644
index 0000000..8b7ad9f
--- /dev/null
+++ b/Lib/lib2to3/fixes/util.py
@@ -0,0 +1,303 @@
+"""Utility functions, node construction macros, etc."""
+# Author: Collin Winter
+
+# Local imports
+from ..pgen2 import token
+from ..pytree import Leaf, Node
+from ..pygram import python_symbols as syms
+
+
+###########################################################
+### Common node-construction "macros"
+###########################################################
+
+def KeywordArg(keyword, value):
+ return Node(syms.argument,
+ [keyword, Leaf(token.EQUAL, '='), value])
+
+def LParen():
+ return Leaf(token.LPAR, "(")
+
+def RParen():
+ return Leaf(token.RPAR, ")")
+
+def Assign(target, source):
+ """Build an assignment statement"""
+ if not isinstance(target, list):
+ target = [target]
+ if not isinstance(source, list):
+ source.set_prefix(" ")
+ source = [source]
+
+ return Node(syms.atom,
+ target + [Leaf(token.EQUAL, "=", prefix=" ")] + source)
+
+def Name(name, prefix=None):
+ """Return a NAME leaf"""
+ return Leaf(token.NAME, name, prefix=prefix)
+
+def Attr(obj, attr):
+ """A node tuple for obj.attr"""
+ return [obj, Node(syms.trailer, [Dot(), attr])]
+
+def Comma():
+ """A comma leaf"""
+ return Leaf(token.COMMA, ",")
+
+def Dot():
+ """A period (.) leaf"""
+ return Leaf(token.DOT, ".")
+
+def ArgList(args, lparen=LParen(), rparen=RParen()):
+ """A parenthesised argument list, used by Call()"""
+ return Node(syms.trailer,
+ [lparen.clone(),
+ Node(syms.arglist, args),
+ rparen.clone()])
+
+def Call(func_name, args, prefix=None):
+ """A function call"""
+ node = Node(syms.power, [func_name, ArgList(args)])
+ if prefix is not None:
+ node.set_prefix(prefix)
+ return node
+
+def Newline():
+ """A newline literal"""
+ return Leaf(token.NEWLINE, "\n")
+
+def BlankLine():
+ """A blank line"""
+ return Leaf(token.NEWLINE, "")
+
+def Number(n, prefix=None):
+ return Leaf(token.NUMBER, n, prefix=prefix)
+
+def Subscript(index_node):
+ """A numeric or string subscript"""
+ return Node(syms.trailer, [Leaf(token.LBRACE, '['),
+ index_node,
+ Leaf(token.RBRACE, ']')])
+
+def String(string, prefix=None):
+ """A string leaf"""
+ return Leaf(token.STRING, string, prefix=prefix)
+
+def ListComp(xp, fp, it, test=None):
+ """A list comprehension of the form [xp for fp in it if test].
+
+ If test is None, the "if test" part is omitted.
+ """
+ xp.set_prefix("")
+ fp.set_prefix(" ")
+ it.set_prefix(" ")
+ for_leaf = Leaf(token.NAME, "for")
+ for_leaf.set_prefix(" ")
+ in_leaf = Leaf(token.NAME, "in")
+ in_leaf.set_prefix(" ")
+ inner_args = [for_leaf, fp, in_leaf, it]
+ if test:
+ test.set_prefix(" ")
+ if_leaf = Leaf(token.NAME, "if")
+ if_leaf.set_prefix(" ")
+ inner_args.append(Node(syms.comp_if, [if_leaf, test]))
+ inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)])
+ return Node(syms.atom,
+ [Leaf(token.LBRACE, "["),
+ inner,
+ Leaf(token.RBRACE, "]")])
+
+###########################################################
+### Determine whether a node represents a given literal
+###########################################################
+
+def is_tuple(node):
+ """Does the node represent a tuple literal?"""
+ if isinstance(node, Node) and node.children == [LParen(), RParen()]:
+ return True
+ return (isinstance(node, Node)
+ and len(node.children) == 3
+ and isinstance(node.children[0], Leaf)
+ and isinstance(node.children[1], Node)
+ and isinstance(node.children[2], Leaf)
+ and node.children[0].value == "("
+ and node.children[2].value == ")")
+
+def is_list(node):
+ """Does the node represent a list literal?"""
+ return (isinstance(node, Node)
+ and len(node.children) > 1
+ and isinstance(node.children[0], Leaf)
+ and isinstance(node.children[-1], Leaf)
+ and node.children[0].value == "["
+ and node.children[-1].value == "]")
+
+###########################################################
+### Common portability code. This allows fixers to do, eg,
+### "from .util import set" and forget about it.
+###########################################################
+
+try:
+ any = any
+except NameError:
+ def any(l):
+ for o in l:
+ if o:
+ return True
+ return False
+
+try:
+ set = set
+except NameError:
+ from sets import Set as set
+
+try:
+ reversed = reversed
+except NameError:
+ def reversed(l):
+ return l[::-1]
+
+###########################################################
+### Misc
+###########################################################
+
+def attr_chain(obj, attr):
+ """Follow an attribute chain.
+
+ If you have a chain of objects where a.foo -> b, b.foo-> c, etc,
+ use this to iterate over all objects in the chain. Iteration is
+ terminated by getattr(x, attr) is None.
+
+ Args:
+ obj: the starting object
+ attr: the name of the chaining attribute
+
+ Yields:
+ Each successive object in the chain.
+ """
+ next = getattr(obj, attr)
+ while next:
+ yield next
+ next = getattr(next, attr)
+
+###########################################################
+### The following functions are to find bindings in a suite
+###########################################################
+
+def make_suite(node):
+ if node.type == syms.suite:
+ return node
+ node = node.clone()
+ parent, node.parent = node.parent, None
+ suite = Node(syms.suite, [node])
+ suite.parent = parent
+ return suite
+
+def does_tree_import(package, name, node):
+ """ Returns true if name is imported from package at the
+ top level of the tree which node belongs to.
+ To cover the case of an import like 'import foo', use
+ Null for the package and 'foo' for the name. """
+ # Scamper up to the top level namespace
+ while node.type != syms.file_input:
+ assert node.parent, "Tree is insane! root found before "\
+ "file_input node was found."
+ node = node.parent
+
+ binding = find_binding(name, node, package)
+ return bool(binding)
+
+_def_syms = set([syms.classdef, syms.funcdef])
+def find_binding(name, node, package=None):
+ """ Returns the node which binds variable name, otherwise None.
+ If optional argument package is supplied, only imports will
+ be returned.
+ See test cases for examples."""
+ for child in node.children:
+ ret = None
+ if child.type == syms.for_stmt:
+ if _find(name, child.children[1]):
+ return child
+ n = find_binding(name, make_suite(child.children[-1]), package)
+ if n: ret = n
+ elif child.type in (syms.if_stmt, syms.while_stmt):
+ n = find_binding(name, make_suite(child.children[-1]), package)
+ if n: ret = n
+ elif child.type == syms.try_stmt:
+ n = find_binding(name, make_suite(child.children[2]), package)
+ if n:
+ ret = n
+ else:
+ for i, kid in enumerate(child.children[3:]):
+ if kid.type == token.COLON and kid.value == ":":
+ # i+3 is the colon, i+4 is the suite
+ n = find_binding(name, make_suite(child.children[i+4]), package)
+ if n: ret = n
+ elif child.type in _def_syms and child.children[1].value == name:
+ ret = child
+ elif _is_import_binding(child, name, package):
+ ret = child
+ elif child.type == syms.simple_stmt:
+ ret = find_binding(name, child, package)
+ elif child.type == syms.expr_stmt:
+ if _find(name, child.children[0]):
+ ret = child
+
+ if ret:
+ if not package:
+ return ret
+ if ret.type in (syms.import_name, syms.import_from):
+ return ret
+ return None
+
+_block_syms = set([syms.funcdef, syms.classdef, syms.trailer])
+def _find(name, node):
+ nodes = [node]
+ while nodes:
+ node = nodes.pop()
+ if node.type > 256 and node.type not in _block_syms:
+ nodes.extend(node.children)
+ elif node.type == token.NAME and node.value == name:
+ return node
+ return None
+
+def _is_import_binding(node, name, package=None):
+ """ Will reuturn node if node will import name, or node
+ will import * from package. None is returned otherwise.
+ See test cases for examples. """
+
+ if node.type == syms.import_name and not package:
+ imp = node.children[1]
+ if imp.type == syms.dotted_as_names:
+ for child in imp.children:
+ if child.type == syms.dotted_as_name:
+ if child.children[2].value == name:
+ return node
+ elif child.type == token.NAME and child.value == name:
+ return node
+ elif imp.type == syms.dotted_as_name:
+ last = imp.children[-1]
+ if last.type == token.NAME and last.value == name:
+ return node
+ elif imp.type == token.NAME and imp.value == name:
+ return node
+ elif node.type == syms.import_from:
+ # unicode(...) is used to make life easier here, because
+ # from a.b import parses to ['import', ['a', '.', 'b'], ...]
+ if package and unicode(node.children[1]).strip() != package:
+ return None
+ n = node.children[3]
+ if package and _find('as', n):
+ # See test_from_import_as for explanation
+ return None
+ elif n.type == syms.import_as_names and _find(name, n):
+ return node
+ elif n.type == syms.import_as_name:
+ child = n.children[2]
+ if child.type == token.NAME and child.value == name:
+ return node
+ elif n.type == token.NAME and n.value == name:
+ return node
+ elif package and n.type == token.STAR:
+ return node
+ return None