summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGuido van Rossum <guido@python.org>2007-05-14 22:03:55 (GMT)
committerGuido van Rossum <guido@python.org>2007-05-14 22:03:55 (GMT)
commita8add0ec5ef05c26e1641b8310b65ddd75c0fec3 (patch)
tree1626110463ca617ab105990ee1923f6ee65c7476
parent827b055ffe8060ac229cda8d75eb24176cc697c0 (diff)
downloadcpython-a8add0ec5ef05c26e1641b8310b65ddd75c0fec3.zip
cpython-a8add0ec5ef05c26e1641b8310b65ddd75c0fec3.tar.gz
cpython-a8add0ec5ef05c26e1641b8310b65ddd75c0fec3.tar.bz2
Merged revisions 55270-55324 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/p3yk ........ r55271 | fred.drake | 2007-05-11 10:14:47 -0700 (Fri, 11 May 2007) | 3 lines remove jpeg, panel libraries for SGI; there is more IRIX stuff left over, I guess that should be removed too, but will leave for someone who is sure ........ r55280 | fred.drake | 2007-05-11 19:11:37 -0700 (Fri, 11 May 2007) | 1 line remove mention of file that has been removed ........ r55301 | brett.cannon | 2007-05-13 17:38:05 -0700 (Sun, 13 May 2007) | 4 lines Remove rexec and Bastion from the stdlib. This also eliminates the need for f_restricted on frames. This in turn negates the need for PyEval_GetRestricted() and PyFrame_IsRestricted(). ........ r55303 | brett.cannon | 2007-05-13 19:22:22 -0700 (Sun, 13 May 2007) | 2 lines Remove the md5 and sha modules. ........ r55305 | george.yoshida | 2007-05-13 19:45:55 -0700 (Sun, 13 May 2007) | 2 lines fix markup ........ r55306 | neal.norwitz | 2007-05-13 19:47:57 -0700 (Sun, 13 May 2007) | 1 line Get the doc building again after some removals. ........ r55307 | neal.norwitz | 2007-05-13 19:50:45 -0700 (Sun, 13 May 2007) | 1 line Get test_pyclbr passing again after getstatus was removed from commands. This "test case" was weird since it was just importing a seemingly random module. Remove the import ........ r55322 | brett.cannon | 2007-05-14 14:09:20 -0700 (Mon, 14 May 2007) | 3 lines Remove the compiler package. Will eventually need a mechanism to byte compile an AST. ........
-rw-r--r--Doc/Makefile.deps7
-rw-r--r--Doc/lib/compiler.tex353
-rw-r--r--Doc/lib/lib.tex9
-rw-r--r--Doc/lib/libbastion.tex57
-rw-r--r--Doc/lib/libjpeg.tex80
-rw-r--r--Doc/lib/libmd5.tex92
-rw-r--r--Doc/lib/libpanel.tex74
-rw-r--r--Doc/lib/librestricted.tex66
-rw-r--r--Doc/lib/librexec.tex275
-rw-r--r--Doc/lib/libsha.tex83
-rw-r--r--Doc/ref/ref3.tex9
-rw-r--r--Include/ceval.h1
-rw-r--r--Include/frameobject.h2
-rw-r--r--Lib/Bastion.py177
-rw-r--r--Lib/compiler/__init__.py26
-rw-r--r--Lib/compiler/ast.py1370
-rw-r--r--Lib/compiler/consts.py21
-rw-r--r--Lib/compiler/future.py73
-rw-r--r--Lib/compiler/misc.py73
-rw-r--r--Lib/compiler/pyassem.py847
-rw-r--r--Lib/compiler/pycodegen.py1580
-rw-r--r--Lib/compiler/symbols.py470
-rw-r--r--Lib/compiler/syntax.py46
-rw-r--r--Lib/compiler/transformer.py1534
-rw-r--r--Lib/compiler/visitor.py113
-rw-r--r--Lib/ihooks.py2
-rw-r--r--Lib/imputil.py3
-rw-r--r--Lib/inspect.py23
-rw-r--r--Lib/md5.py10
-rw-r--r--Lib/new.py7
-rw-r--r--Lib/pickle.py26
-rw-r--r--Lib/pickletools.py13
-rw-r--r--Lib/plat-irix6/jpeg.py111
-rw-r--r--Lib/plat-irix6/panel.py281
-rw-r--r--Lib/plat-irix6/panelparser.py128
-rw-r--r--Lib/rexec.py585
-rw-r--r--Lib/test/test___all__.py2
-rw-r--r--Lib/test/test_bastion.py3
-rw-r--r--Lib/test/test_compiler.py265
-rw-r--r--Lib/test/test_descr.py44
-rw-r--r--Lib/test/test_importhooks.py2
-rw-r--r--Lib/test/test_md5.py58
-rw-r--r--Lib/test/test_pep247.py4
-rw-r--r--Lib/test/test_pyclbr.py4
-rw-r--r--Lib/test/test_sha.py52
-rw-r--r--Lib/test/test_sundry.py1
-rw-r--r--Lib/test/test_tarfile.py4
-rw-r--r--Lib/test/test_transformer.py35
-rw-r--r--Lib/types.py17
-rw-r--r--Lib/uuid.py4
-rw-r--r--Misc/NEWS15
-rw-r--r--Modules/cPickle.c17
-rw-r--r--Objects/fileobject.c9
-rw-r--r--Objects/frameobject.c6
-rw-r--r--Objects/funcobject.c29
-rw-r--r--Objects/methodobject.c6
-rw-r--r--Python/ceval.c7
-rw-r--r--Python/marshal.c12
-rw-r--r--Python/structmember.c10
-rw-r--r--Tools/compiler/ACKS8
-rw-r--r--Tools/compiler/README18
-rw-r--r--Tools/compiler/ast.txt105
-rw-r--r--Tools/compiler/astgen.py292
-rw-r--r--Tools/compiler/compile.py51
-rwxr-xr-xTools/compiler/demo.py38
-rwxr-xr-xTools/compiler/dumppyc.py47
-rw-r--r--Tools/compiler/regrtest.py83
-rw-r--r--Tools/compiler/stacktest.py43
68 files changed, 52 insertions, 9866 deletions
diff --git a/Doc/Makefile.deps b/Doc/Makefile.deps
index 426c7ea1..0246653 100644
--- a/Doc/Makefile.deps
+++ b/Doc/Makefile.deps
@@ -88,7 +88,6 @@ LIBFILES= $(MANSTYLES) $(INDEXSTYLES) $(COMMONTEX) \
commontex/reportingbugs.tex \
lib/lib.tex \
lib/asttable.tex \
- lib/compiler.tex \
lib/distutils.tex \
lib/email.tex \
lib/emailencoders.tex \
@@ -200,21 +199,15 @@ LIBFILES= $(MANSTYLES) $(INDEXSTYLES) $(COMMONTEX) \
lib/libaudioop.tex \
lib/libimageop.tex \
lib/libaifc.tex \
- lib/libjpeg.tex \
lib/librgbimg.tex \
lib/libossaudiodev.tex \
lib/libcrypto.tex \
lib/libhashlib.tex \
- lib/libmd5.tex \
- lib/libsha.tex \
lib/libhmac.tex \
lib/libstdwin.tex \
lib/libsun.tex \
lib/libxdrlib.tex \
lib/libimghdr.tex \
- lib/librestricted.tex \
- lib/librexec.tex \
- lib/libbastion.tex \
lib/libformatter.tex \
lib/liboperator.tex \
lib/libresource.tex \
diff --git a/Doc/lib/compiler.tex b/Doc/lib/compiler.tex
deleted file mode 100644
index d4f4124..0000000
--- a/Doc/lib/compiler.tex
+++ /dev/null
@@ -1,353 +0,0 @@
-\chapter{Python compiler package \label{compiler}}
-
-\sectionauthor{Jeremy Hylton}{jeremy@zope.com}
-
-
-The Python compiler package is a tool for analyzing Python source code
-and generating Python bytecode. The compiler contains libraries to
-generate an abstract syntax tree from Python source code and to
-generate Python bytecode from the tree.
-
-The \refmodule{compiler} package is a Python source to bytecode
-translator written in Python. It uses the built-in parser and
-standard \refmodule{parser} module to generated a concrete syntax
-tree. This tree is used to generate an abstract syntax tree (AST) and
-then Python bytecode.
-
-The full functionality of the package duplicates the builtin compiler
-provided with the Python interpreter. It is intended to match its
-behavior almost exactly. Why implement another compiler that does the
-same thing? The package is useful for a variety of purposes. It can
-be modified more easily than the builtin compiler. The AST it
-generates is useful for analyzing Python source code.
-
-This chapter explains how the various components of the
-\refmodule{compiler} package work. It blends reference material with
-a tutorial.
-
-The following modules are part of the \refmodule{compiler} package:
-
-\localmoduletable
-
-
-\section{The basic interface}
-
-\declaremodule{}{compiler}
-
-The top-level of the package defines four functions. If you import
-\module{compiler}, you will get these functions and a collection of
-modules contained in the package.
-
-\begin{funcdesc}{parse}{buf}
-Returns an abstract syntax tree for the Python source code in \var{buf}.
-The function raises \exception{SyntaxError} if there is an error in the
-source code. The return value is a \class{compiler.ast.Module} instance
-that contains the tree.
-\end{funcdesc}
-
-\begin{funcdesc}{parseFile}{path}
-Return an abstract syntax tree for the Python source code in the file
-specified by \var{path}. It is equivalent to
-\code{parse(open(\var{path}).read())}.
-\end{funcdesc}
-
-\begin{funcdesc}{walk}{ast, visitor\optional{, verbose}}
-Do a pre-order walk over the abstract syntax tree \var{ast}. Call the
-appropriate method on the \var{visitor} instance for each node
-encountered.
-\end{funcdesc}
-
-\begin{funcdesc}{compile}{source, filename, mode, flags=None,
- dont_inherit=None}
-Compile the string \var{source}, a Python module, statement or
-expression, into a code object that can be executed by the exec
-statement or \function{eval()}. This function is a replacement for the
-built-in \function{compile()} function.
-
-The \var{filename} will be used for run-time error messages.
-
-The \var{mode} must be 'exec' to compile a module, 'single' to compile a
-single (interactive) statement, or 'eval' to compile an expression.
-
-The \var{flags} and \var{dont_inherit} arguments affect future-related
-statements, but are not supported yet.
-\end{funcdesc}
-
-\begin{funcdesc}{compileFile}{source}
-Compiles the file \var{source} and generates a .pyc file.
-\end{funcdesc}
-
-The \module{compiler} package contains the following modules:
-\refmodule[compiler.ast]{ast}, \module{consts}, \module{future},
-\module{misc}, \module{pyassem}, \module{pycodegen}, \module{symbols},
-\module{transformer}, and \refmodule[compiler.visitor]{visitor}.
-
-\section{Limitations}
-
-There are some problems with the error checking of the compiler
-package. The interpreter detects syntax errors in two distinct
-phases. One set of errors is detected by the interpreter's parser,
-the other set by the compiler. The compiler package relies on the
-interpreter's parser, so it get the first phases of error checking for
-free. It implements the second phase itself, and that implementation is
-incomplete. For example, the compiler package does not raise an error
-if a name appears more than once in an argument list:
-\code{def f(x, x): ...}
-
-A future version of the compiler should fix these problems.
-
-\section{Python Abstract Syntax}
-
-The \module{compiler.ast} module defines an abstract syntax for
-Python. In the abstract syntax tree, each node represents a syntactic
-construct. The root of the tree is \class{Module} object.
-
-The abstract syntax offers a higher level interface to parsed Python
-source code. The \refmodule{parser}
-module and the compiler written in C for the Python interpreter use a
-concrete syntax tree. The concrete syntax is tied closely to the
-grammar description used for the Python parser. Instead of a single
-node for a construct, there are often several levels of nested nodes
-that are introduced by Python's precedence rules.
-
-The abstract syntax tree is created by the
-\module{compiler.transformer} module. The transformer relies on the
-builtin Python parser to generate a concrete syntax tree. It
-generates an abstract syntax tree from the concrete tree.
-
-The \module{transformer} module was created by Greg
-Stein\index{Stein, Greg} and Bill Tutt\index{Tutt, Bill} for an
-experimental Python-to-C compiler. The current version contains a
-number of modifications and improvements, but the basic form of the
-abstract syntax and of the transformer are due to Stein and Tutt.
-
-\subsection{AST Nodes}
-
-\declaremodule{}{compiler.ast}
-
-The \module{compiler.ast} module is generated from a text file that
-describes each node type and its elements. Each node type is
-represented as a class that inherits from the abstract base class
-\class{compiler.ast.Node} and defines a set of named attributes for
-child nodes.
-
-\begin{classdesc}{Node}{}
-
- The \class{Node} instances are created automatically by the parser
- generator. The recommended interface for specific \class{Node}
- instances is to use the public attributes to access child nodes. A
- public attribute may be bound to a single node or to a sequence of
- nodes, depending on the \class{Node} type. For example, the
- \member{bases} attribute of the \class{Class} node, is bound to a
- list of base class nodes, and the \member{doc} attribute is bound to
- a single node.
-
- Each \class{Node} instance has a \member{lineno} attribute which may
- be \code{None}. XXX Not sure what the rules are for which nodes
- will have a useful lineno.
-\end{classdesc}
-
-All \class{Node} objects offer the following methods:
-
-\begin{methoddesc}{getChildren}{}
- Returns a flattened list of the child nodes and objects in the
- order they occur. Specifically, the order of the nodes is the
- order in which they appear in the Python grammar. Not all of the
- children are \class{Node} instances. The names of functions and
- classes, for example, are plain strings.
-\end{methoddesc}
-
-\begin{methoddesc}{getChildNodes}{}
- Returns a flattened list of the child nodes in the order they
- occur. This method is like \method{getChildren()}, except that it
- only returns those children that are \class{Node} instances.
-\end{methoddesc}
-
-Two examples illustrate the general structure of \class{Node}
-classes. The \keyword{while} statement is defined by the following
-grammar production:
-
-\begin{verbatim}
-while_stmt: "while" expression ":" suite
- ["else" ":" suite]
-\end{verbatim}
-
-The \class{While} node has three attributes: \member{test},
-\member{body}, and \member{else_}. (If the natural name for an
-attribute is also a Python reserved word, it can't be used as an
-attribute name. An underscore is appended to the word to make it a
-legal identifier, hence \member{else_} instead of \keyword{else}.)
-
-The \keyword{if} statement is more complicated because it can include
-several tests.
-
-\begin{verbatim}
-if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
-\end{verbatim}
-
-The \class{If} node only defines two attributes: \member{tests} and
-\member{else_}. The \member{tests} attribute is a sequence of test
-expression, consequent body pairs. There is one pair for each
-\keyword{if}/\keyword{elif} clause. The first element of the pair is
-the test expression. The second elements is a \class{Stmt} node that
-contains the code to execute if the test is true.
-
-The \method{getChildren()} method of \class{If} returns a flat list of
-child nodes. If there are three \keyword{if}/\keyword{elif} clauses
-and no \keyword{else} clause, then \method{getChildren()} will return
-a list of six elements: the first test expression, the first
-\class{Stmt}, the second text expression, etc.
-
-The following table lists each of the \class{Node} subclasses defined
-in \module{compiler.ast} and each of the public attributes available
-on their instances. The values of most of the attributes are
-themselves \class{Node} instances or sequences of instances. When the
-value is something other than an instance, the type is noted in the
-comment. The attributes are listed in the order in which they are
-returned by \method{getChildren()} and \method{getChildNodes()}.
-
-\input{asttable}
-
-
-\subsection{Assignment nodes}
-
-There is a collection of nodes used to represent assignments. Each
-assignment statement in the source code becomes a single
-\class{Assign} node in the AST. The \member{nodes} attribute is a
-list that contains a node for each assignment target. This is
-necessary because assignment can be chained, e.g. \code{a = b = 2}.
-Each \class{Node} in the list will be one of the following classes:
-\class{AssAttr}, \class{AssList}, \class{AssName}, or
-\class{AssTuple}.
-
-Each target assignment node will describe the kind of object being
-assigned to: \class{AssName} for a simple name, e.g. \code{a = 1}.
-\class{AssAttr} for an attribute assigned, e.g. \code{a.x = 1}.
-\class{AssList} and \class{AssTuple} for list and tuple expansion
-respectively, e.g. \code{a, b, c = a_tuple}.
-
-The target assignment nodes also have a \member{flags} attribute that
-indicates whether the node is being used for assignment or in a delete
-statement. The \class{AssName} is also used to represent a delete
-statement, e.g. \class{del x}.
-
-When an expression contains several attribute references, an
-assignment or delete statement will contain only one \class{AssAttr}
-node -- for the final attribute reference. The other attribute
-references will be represented as \class{Getattr} nodes in the
-\member{expr} attribute of the \class{AssAttr} instance.
-
-\subsection{Examples}
-
-This section shows several simple examples of ASTs for Python source
-code. The examples demonstrate how to use the \function{parse()}
-function, what the repr of an AST looks like, and how to access
-attributes of an AST node.
-
-The first module defines a single function. Assume it is stored in
-\file{/tmp/doublelib.py}.
-
-\begin{verbatim}
-"""This is an example module.
-
-This is the docstring.
-"""
-
-def double(x):
- "Return twice the argument"
- return x * 2
-\end{verbatim}
-
-In the interactive interpreter session below, I have reformatted the
-long AST reprs for readability. The AST reprs use unqualified class
-names. If you want to create an instance from a repr, you must import
-the class names from the \module{compiler.ast} module.
-
-\begin{verbatim}
->>> import compiler
->>> mod = compiler.parseFile("/tmp/doublelib.py")
->>> mod
-Module('This is an example module.\n\nThis is the docstring.\n',
- Stmt([Function(None, 'double', ['x'], [], 0,
- 'Return twice the argument',
- Stmt([Return(Mul((Name('x'), Const(2))))]))]))
->>> from compiler.ast import *
->>> Module('This is an example module.\n\nThis is the docstring.\n',
-... Stmt([Function(None, 'double', ['x'], [], 0,
-... 'Return twice the argument',
-... Stmt([Return(Mul((Name('x'), Const(2))))]))]))
-Module('This is an example module.\n\nThis is the docstring.\n',
- Stmt([Function(None, 'double', ['x'], [], 0,
- 'Return twice the argument',
- Stmt([Return(Mul((Name('x'), Const(2))))]))]))
->>> mod.doc
-'This is an example module.\n\nThis is the docstring.\n'
->>> for node in mod.node.nodes:
-... print node
-...
-Function(None, 'double', ['x'], [], 0, 'Return twice the argument',
- Stmt([Return(Mul((Name('x'), Const(2))))]))
->>> func = mod.node.nodes[0]
->>> func.code
-Stmt([Return(Mul((Name('x'), Const(2))))])
-\end{verbatim}
-
-\section{Using Visitors to Walk ASTs}
-
-\declaremodule{}{compiler.visitor}
-
-The visitor pattern is ... The \refmodule{compiler} package uses a
-variant on the visitor pattern that takes advantage of Python's
-introspection features to eliminate the need for much of the visitor's
-infrastructure.
-
-The classes being visited do not need to be programmed to accept
-visitors. The visitor need only define visit methods for classes it
-is specifically interested in; a default visit method can handle the
-rest.
-
-XXX The magic \method{visit()} method for visitors.
-
-\begin{funcdesc}{walk}{tree, visitor\optional{, verbose}}
-\end{funcdesc}
-
-\begin{classdesc}{ASTVisitor}{}
-
-The \class{ASTVisitor} is responsible for walking over the tree in the
-correct order. A walk begins with a call to \method{preorder()}. For
-each node, it checks the \var{visitor} argument to \method{preorder()}
-for a method named `visitNodeType,' where NodeType is the name of the
-node's class, e.g. for a \class{While} node a \method{visitWhile()}
-would be called. If the method exists, it is called with the node as
-its first argument.
-
-The visitor method for a particular node type can control how child
-nodes are visited during the walk. The \class{ASTVisitor} modifies
-the visitor argument by adding a visit method to the visitor; this
-method can be used to visit a particular child node. If no visitor is
-found for a particular node type, the \method{default()} method is
-called.
-\end{classdesc}
-
-\class{ASTVisitor} objects have the following methods:
-
-XXX describe extra arguments
-
-\begin{methoddesc}{default}{node\optional{, \moreargs}}
-\end{methoddesc}
-
-\begin{methoddesc}{dispatch}{node\optional{, \moreargs}}
-\end{methoddesc}
-
-\begin{methoddesc}{preorder}{tree, visitor}
-\end{methoddesc}
-
-
-\section{Bytecode Generation}
-
-The code generator is a visitor that emits bytecodes. Each visit method
-can call the \method{emit()} method to emit a new bytecode. The basic
-code generator is specialized for modules, classes, and functions. An
-assembler converts that emitted instructions to the low-level bytecode
-format. It handles things like generator of constant lists of code
-objects and calculation of jump offsets.
diff --git a/Doc/lib/lib.tex b/Doc/lib/lib.tex
index aa4d3e8..d87cd5e 100644
--- a/Doc/lib/lib.tex
+++ b/Doc/lib/lib.tex
@@ -182,8 +182,6 @@ and how to embed it in other applications.
\input{libcrypto} % Cryptographic Services
\input{libhashlib}
\input{libhmac}
-\input{libmd5}
-\input{libsha}
% =============
% FILE & DATABASE STORAGE
@@ -388,9 +386,6 @@ and how to embed it in other applications.
\input{custominterp} % Custom interpreter
\input{libcode}
\input{libcodeop}
-\input{librestricted} % Restricted Execution
-\input{librexec}
-\input{libbastion}
\input{modules} % Importing Modules
@@ -419,7 +414,6 @@ and how to embed it in other applications.
\input{libpickletools}
\input{distutils}
-\input{compiler} % compiler package
\input{libast}
\input{libmisc} % Miscellaneous Services
@@ -434,9 +428,6 @@ and how to embed it in other applications.
%\input{libstdwin} % STDWIN ONLY
-\input{libjpeg}
-%\input{libpanel}
-
\input{libsun} % SUNOS ONLY
\input{libsunaudio}
% XXX(nnorwitz): the modules below this comment should be kept.
diff --git a/Doc/lib/libbastion.tex b/Doc/lib/libbastion.tex
deleted file mode 100644
index 9f45c47..0000000
--- a/Doc/lib/libbastion.tex
+++ /dev/null
@@ -1,57 +0,0 @@
-\section{\module{Bastion} ---
- Restricting access to objects}
-
-\declaremodule{standard}{Bastion}
-\modulesynopsis{Providing restricted access to objects.}
-\moduleauthor{Barry Warsaw}{bwarsaw@python.org}
-\versionchanged[Disabled module]{2.3}
-
-\begin{notice}[warning]
- The documentation has been left in place to help in reading old code
- that uses the module.
-\end{notice}
-
-% I'm concerned that the word 'bastion' won't be understood by people
-% for whom English is a second language, making the module name
-% somewhat mysterious. Thus, the brief definition... --amk
-
-According to the dictionary, a bastion is ``a fortified area or
-position'', or ``something that is considered a stronghold.'' It's a
-suitable name for this module, which provides a way to forbid access
-to certain attributes of an object. It must always be used with the
-\refmodule{rexec} module, in order to allow restricted-mode programs
-access to certain safe attributes of an object, while denying access
-to other, unsafe attributes.
-
-% I've punted on the issue of documenting keyword arguments for now.
-
-\begin{funcdesc}{Bastion}{object\optional{, filter\optional{,
- name\optional{, class}}}}
-Protect the object \var{object}, returning a bastion for the
-object. Any attempt to access one of the object's attributes will
-have to be approved by the \var{filter} function; if the access is
-denied an \exception{AttributeError} exception will be raised.
-
-If present, \var{filter} must be a function that accepts a string
-containing an attribute name, and returns true if access to that
-attribute will be permitted; if \var{filter} returns false, the access
-is denied. The default filter denies access to any function beginning
-with an underscore (\character{_}). The bastion's string representation
-will be \samp{<Bastion for \var{name}>} if a value for
-\var{name} is provided; otherwise, \samp{repr(\var{object})} will be
-used.
-
-\var{class}, if present, should be a subclass of \class{BastionClass};
-see the code in \file{bastion.py} for the details. Overriding the
-default \class{BastionClass} will rarely be required.
-\end{funcdesc}
-
-
-\begin{classdesc}{BastionClass}{getfunc, name}
-Class which actually implements bastion objects. This is the default
-class used by \function{Bastion()}. The \var{getfunc} parameter is a
-function which returns the value of an attribute which should be
-exposed to the restricted execution environment when called with the
-name of the attribute as the only parameter. \var{name} is used to
-construct the \function{repr()} of the \class{BastionClass} instance.
-\end{classdesc}
diff --git a/Doc/lib/libjpeg.tex b/Doc/lib/libjpeg.tex
deleted file mode 100644
index a10e06c..0000000
--- a/Doc/lib/libjpeg.tex
+++ /dev/null
@@ -1,80 +0,0 @@
-\section{\module{jpeg} ---
- Read and write JPEG files}
-
-\declaremodule{builtin}{jpeg}
- \platform{IRIX}
-\modulesynopsis{Read and write image files in compressed JPEG format.}
-
-
-The module \module{jpeg} provides access to the jpeg compressor and
-decompressor written by the Independent JPEG Group
-\index{Independent JPEG Group}(IJG). JPEG is a standard for
-compressing pictures; it is defined in ISO 10918. For details on JPEG
-or the Independent JPEG Group software refer to the JPEG standard or
-the documentation provided with the software.
-
-A portable interface to JPEG image files is available with the Python
-Imaging Library (PIL) by Fredrik Lundh. Information on PIL is
-available at \url{http://www.pythonware.com/products/pil/}.
-\index{Python Imaging Library}
-\index{PIL (the Python Imaging Library)}
-\index{Lundh, Fredrik}
-
-The \module{jpeg} module defines an exception and some functions.
-
-\begin{excdesc}{error}
-Exception raised by \function{compress()} and \function{decompress()}
-in case of errors.
-\end{excdesc}
-
-\begin{funcdesc}{compress}{data, w, h, b}
-Treat data as a pixmap of width \var{w} and height \var{h}, with
-\var{b} bytes per pixel. The data is in SGI GL order, so the first
-pixel is in the lower-left corner. This means that \function{gl.lrectread()}
-return data can immediately be passed to \function{compress()}.
-Currently only 1 byte and 4 byte pixels are allowed, the former being
-treated as greyscale and the latter as RGB color.
-\function{compress()} returns a string that contains the compressed
-picture, in JFIF\index{JFIF} format.
-\end{funcdesc}
-
-\begin{funcdesc}{decompress}{data}
-Data is a string containing a picture in JFIF\index{JFIF} format. It
-returns a tuple \code{(\var{data}, \var{width}, \var{height},
-\var{bytesperpixel})}. Again, the data is suitable to pass to
-\function{gl.lrectwrite()}.
-\end{funcdesc}
-
-\begin{funcdesc}{setoption}{name, value}
-Set various options. Subsequent \function{compress()} and
-\function{decompress()} calls will use these options. The following
-options are available:
-
-\begin{tableii}{l|p{3in}}{code}{Option}{Effect}
- \lineii{'forcegray'}{%
- Force output to be grayscale, even if input is RGB.}
- \lineii{'quality'}{%
- Set the quality of the compressed image to a value between
- \code{0} and \code{100} (default is \code{75}). This only affects
- compression.}
- \lineii{'optimize'}{%
- Perform Huffman table optimization. Takes longer, but results in
- smaller compressed image. This only affects compression.}
- \lineii{'smooth'}{%
- Perform inter-block smoothing on uncompressed image. Only useful
- for low-quality images. This only affects decompression.}
-\end{tableii}
-\end{funcdesc}
-
-
-\begin{seealso}
- \seetitle{JPEG Still Image Data Compression Standard}{The
- canonical reference for the JPEG image format, by
- Pennebaker and Mitchell.}
-
- \seetitle[http://www.w3.org/Graphics/JPEG/itu-t81.pdf]{Information
- Technology - Digital Compression and Coding of
- Continuous-tone Still Images - Requirements and
- Guidelines}{The ISO standard for JPEG is also published as
- ITU T.81. This is available online in PDF form.}
-\end{seealso}
diff --git a/Doc/lib/libmd5.tex b/Doc/lib/libmd5.tex
deleted file mode 100644
index 38105ae..0000000
--- a/Doc/lib/libmd5.tex
+++ /dev/null
@@ -1,92 +0,0 @@
-\section{\module{md5} ---
- MD5 message digest algorithm}
-
-\declaremodule{builtin}{md5}
-\modulesynopsis{RSA's MD5 message digest algorithm.}
-
-\deprecated{2.5}{Use the \refmodule{hashlib} module instead.}
-
-This module implements the interface to RSA's MD5 message digest
-\index{message digest, MD5}
-algorithm (see also Internet \rfc{1321}). Its use is quite
-straightforward:\ use \function{new()} to create an md5 object.
-You can now feed this object with arbitrary strings using the
-\method{update()} method, and at any point you can ask it for the
-\dfn{digest} (a strong kind of 128-bit checksum,
-a.k.a. ``fingerprint'') of the concatenation of the strings fed to it
-so far using the \method{digest()} method.
-\index{checksum!MD5}
-
-For example, to obtain the digest of the string \code{'Nobody inspects
-the spammish repetition'}:
-
-\begin{verbatim}
->>> import md5
->>> m = md5.new()
->>> m.update("Nobody inspects")
->>> m.update(" the spammish repetition")
->>> m.digest()
-'\xbbd\x9c\x83\xdd\x1e\xa5\xc9\xd9\xde\xc9\xa1\x8d\xf0\xff\xe9'
-\end{verbatim}
-
-More condensed:
-
-\begin{verbatim}
->>> md5.new("Nobody inspects the spammish repetition").digest()
-'\xbbd\x9c\x83\xdd\x1e\xa5\xc9\xd9\xde\xc9\xa1\x8d\xf0\xff\xe9'
-\end{verbatim}
-
-The following values are provided as constants in the module and as
-attributes of the md5 objects returned by \function{new()}:
-
-\begin{datadesc}{digest_size}
- The size of the resulting digest in bytes. This is always
- \code{16}.
-\end{datadesc}
-
-The md5 module provides the following functions:
-
-\begin{funcdesc}{new}{\optional{arg}}
-Return a new md5 object. If \var{arg} is present, the method call
-\code{update(\var{arg})} is made.
-\end{funcdesc}
-
-\begin{funcdesc}{md5}{\optional{arg}}
-For backward compatibility reasons, this is an alternative name for the
-\function{new()} function.
-\end{funcdesc}
-
-An md5 object has the following methods:
-
-\begin{methoddesc}[md5]{update}{arg}
-Update the md5 object with the string \var{arg}. Repeated calls are
-equivalent to a single call with the concatenation of all the
-arguments: \code{m.update(a); m.update(b)} is equivalent to
-\code{m.update(a+b)}.
-\end{methoddesc}
-
-\begin{methoddesc}[md5]{digest}{}
-Return the digest of the strings passed to the \method{update()}
-method so far. This is a 16-byte string which may contain
-non-\ASCII{} characters, including null bytes.
-\end{methoddesc}
-
-\begin{methoddesc}[md5]{hexdigest}{}
-Like \method{digest()} except the digest is returned as a string of
-length 32, containing only hexadecimal digits. This may
-be used to exchange the value safely in email or other non-binary
-environments.
-\end{methoddesc}
-
-\begin{methoddesc}[md5]{copy}{}
-Return a copy (``clone'') of the md5 object. This can be used to
-efficiently compute the digests of strings that share a common initial
-substring.
-\end{methoddesc}
-
-
-\begin{seealso}
- \seemodule{sha}{Similar module implementing the Secure Hash
- Algorithm (SHA). The SHA algorithm is considered a
- more secure hash.}
-\end{seealso}
diff --git a/Doc/lib/libpanel.tex b/Doc/lib/libpanel.tex
deleted file mode 100644
index f2db0b0..0000000
--- a/Doc/lib/libpanel.tex
+++ /dev/null
@@ -1,74 +0,0 @@
-\section{\module{panel} ---
- None}
-\declaremodule{standard}{panel}
-
-\modulesynopsis{None}
-
-
-\strong{Please note:} The FORMS library, to which the
-\code{fl}\refbimodindex{fl} module described above interfaces, is a
-simpler and more accessible user interface library for use with GL
-than the \code{panel} module (besides also being by a Dutch author).
-
-This module should be used instead of the built-in module
-\code{pnl}\refbimodindex{pnl}
-to interface with the
-\emph{Panel Library}.
-
-The module is too large to document here in its entirety.
-One interesting function:
-
-\begin{funcdesc}{defpanellist}{filename}
-Parses a panel description file containing S-expressions written by the
-\emph{Panel Editor}
-that accompanies the Panel Library and creates the described panels.
-It returns a list of panel objects.
-\end{funcdesc}
-
-\warning{The Python interpreter will dump core if you don't create a
-GL window before calling
-\code{panel.mkpanel()}
-or
-\code{panel.defpanellist()}.}
-
-\section{\module{panelparser} ---
- None}
-\declaremodule{standard}{panelparser}
-
-\modulesynopsis{None}
-
-
-This module defines a self-contained parser for S-expressions as output
-by the Panel Editor (which is written in Scheme so it can't help writing
-S-expressions).
-The relevant function is
-\code{panelparser.parse_file(\var{file})}
-which has a file object (not a filename!) as argument and returns a list
-of parsed S-expressions.
-Each S-expression is converted into a Python list, with atoms converted
-to Python strings and sub-expressions (recursively) to Python lists.
-For more details, read the module file.
-% XXXXJH should be funcdesc, I think
-
-\section{\module{pnl} ---
- None}
-\declaremodule{builtin}{pnl}
-
-\modulesynopsis{None}
-
-
-This module provides access to the
-\emph{Panel Library}
-built by NASA Ames\index{NASA} (to get it, send email to
-\code{panel-request@nas.nasa.gov}).
-All access to it should be done through the standard module
-\code{panel}\refstmodindex{panel},
-which transparently exports most functions from
-\code{pnl}
-but redefines
-\code{pnl.dopanel()}.
-
-\warning{The Python interpreter will dump core if you don't create a
-GL window before calling \code{pnl.mkpanel()}.}
-
-The module is too large to document here in its entirety.
diff --git a/Doc/lib/librestricted.tex b/Doc/lib/librestricted.tex
deleted file mode 100644
index 5d4b157..0000000
--- a/Doc/lib/librestricted.tex
+++ /dev/null
@@ -1,66 +0,0 @@
-\chapter{Restricted Execution \label{restricted}}
-
-\begin{notice}[warning]
- In Python 2.3 these modules have been disabled due to various known
- and not readily fixable security holes. The modules are still
- documented here to help in reading old code that uses the
- \module{rexec} and \module{Bastion} modules.
-\end{notice}
-
-\emph{Restricted execution} is the basic framework in Python that allows
-for the segregation of trusted and untrusted code. The framework is based on the
-notion that trusted Python code (a \emph{supervisor}) can create a
-``padded cell' (or environment) with limited permissions, and run the
-untrusted code within this cell. The untrusted code cannot break out
-of its cell, and can only interact with sensitive system resources
-through interfaces defined and managed by the trusted code. The term
-``restricted execution'' is favored over ``safe-Python''
-since true safety is hard to define, and is determined by the way the
-restricted environment is created. Note that the restricted
-environments can be nested, with inner cells creating subcells of
-lesser, but never greater, privilege.
-
-An interesting aspect of Python's restricted execution model is that
-the interfaces presented to untrusted code usually have the same names
-as those presented to trusted code. Therefore no special interfaces
-need to be learned to write code designed to run in a restricted
-environment. And because the exact nature of the padded cell is
-determined by the supervisor, different restrictions can be imposed,
-depending on the application. For example, it might be deemed
-``safe'' for untrusted code to read any file within a specified
-directory, but never to write a file. In this case, the supervisor
-may redefine the built-in \function{open()} function so that it raises
-an exception whenever the \var{mode} parameter is \code{'w'}. It
-might also perform a \cfunction{chroot()}-like operation on the
-\var{filename} parameter, such that root is always relative to some
-safe ``sandbox'' area of the filesystem. In this case, the untrusted
-code would still see an built-in \function{open()} function in its
-environment, with the same calling interface. The semantics would be
-identical too, with \exception{IOError}s being raised when the
-supervisor determined that an unallowable parameter is being used.
-
-The Python run-time determines whether a particular code block is
-executing in restricted execution mode based on the identity of the
-\code{__builtins__} object in its global variables: if this is (the
-dictionary of) the standard \refmodule[builtin]{__builtin__} module,
-the code is deemed to be unrestricted, else it is deemed to be
-restricted.
-
-Python code executing in restricted mode faces a number of limitations
-that are designed to prevent it from escaping from the padded cell.
-For instance, the function object attribute \member{func_globals} and
-the class and instance object attribute \member{__dict__} are
-unavailable.
-
-Two modules provide the framework for setting up restricted execution
-environments:
-
-\localmoduletable
-
-\begin{seealso}
- \seetitle[http://grail.sourceforge.net/]{Grail Home Page}
- {Grail, an Internet browser written in Python, uses these
- modules to support Python applets. More
- information on the use of Python's restricted execution
- mode in Grail is available on the Web site.}
-\end{seealso}
diff --git a/Doc/lib/librexec.tex b/Doc/lib/librexec.tex
deleted file mode 100644
index 3104004..0000000
--- a/Doc/lib/librexec.tex
+++ /dev/null
@@ -1,275 +0,0 @@
-\section{\module{rexec} ---
- Restricted execution framework}
-
-\declaremodule{standard}{rexec}
-\modulesynopsis{Basic restricted execution framework.}
-\versionchanged[Disabled module]{2.3}
-
-\begin{notice}[warning]
- The documentation has been left in place to help in reading old code
- that uses the module.
-\end{notice}
-
-This module contains the \class{RExec} class, which supports
-\method{r_exec()}, \method{r_eval()}, \method{r_execfile()}, and
-\method{r_import()} methods, which are restricted versions of the standard
-Python functions \method{exec()}, \method{eval()}, \method{execfile()} and
-the \keyword{import} statement.
-Code executed in this restricted environment will
-only have access to modules and functions that are deemed safe; you
-can subclass \class{RExec} to add or remove capabilities as desired.
-
-\begin{notice}[warning]
- While the \module{rexec} module is designed to perform as described
- below, it does have a few known vulnerabilities which could be
- exploited by carefully written code. Thus it should not be relied
- upon in situations requiring ``production ready'' security. In such
- situations, execution via sub-processes or very careful
- ``cleansing'' of both code and data to be processed may be
- necessary. Alternatively, help in patching known \module{rexec}
- vulnerabilities would be welcomed.
-\end{notice}
-
-\begin{notice}
- The \class{RExec} class can prevent code from performing unsafe
- operations like reading or writing disk files, or using TCP/IP
- sockets. However, it does not protect against code using extremely
- large amounts of memory or processor time.
-\end{notice}
-
-\begin{classdesc}{RExec}{\optional{hooks\optional{, verbose}}}
-Returns an instance of the \class{RExec} class.
-
-\var{hooks} is an instance of the \class{RHooks} class or a subclass of it.
-If it is omitted or \code{None}, the default \class{RHooks} class is
-instantiated.
-Whenever the \module{rexec} module searches for a module (even a
-built-in one) or reads a module's code, it doesn't actually go out to
-the file system itself. Rather, it calls methods of an \class{RHooks}
-instance that was passed to or created by its constructor. (Actually,
-the \class{RExec} object doesn't make these calls --- they are made by
-a module loader object that's part of the \class{RExec} object. This
-allows another level of flexibility, which can be useful when changing
-the mechanics of \keyword{import} within the restricted environment.)
-
-By providing an alternate \class{RHooks} object, we can control the
-file system accesses made to import a module, without changing the
-actual algorithm that controls the order in which those accesses are
-made. For instance, we could substitute an \class{RHooks} object that
-passes all filesystem requests to a file server elsewhere, via some
-RPC mechanism such as ILU. Grail's applet loader uses this to support
-importing applets from a URL for a directory.
-
-If \var{verbose} is true, additional debugging output may be sent to
-standard output.
-\end{classdesc}
-
-It is important to be aware that code running in a restricted
-environment can still call the \function{sys.exit()} function. To
-disallow restricted code from exiting the interpreter, always protect
-calls that cause restricted code to run with a
-\keyword{try}/\keyword{except} statement that catches the
-\exception{SystemExit} exception. Removing the \function{sys.exit()}
-function from the restricted environment is not sufficient --- the
-restricted code could still use \code{raise SystemExit}. Removing
-\exception{SystemExit} is not a reasonable option; some library code
-makes use of this and would break were it not available.
-
-
-\begin{seealso}
- \seetitle[http://grail.sourceforge.net/]{Grail Home Page}{Grail is a
- Web browser written entirely in Python. It uses the
- \module{rexec} module as a foundation for supporting
- Python applets, and can be used as an example usage of
- this module.}
-\end{seealso}
-
-
-\subsection{RExec Objects \label{rexec-objects}}
-
-\class{RExec} instances support the following methods:
-
-\begin{methoddesc}[RExec]{r_eval}{code}
-\var{code} must either be a string containing a Python expression, or
-a compiled code object, which will be evaluated in the restricted
-environment's \module{__main__} module. The value of the expression or
-code object will be returned.
-\end{methoddesc}
-
-\begin{methoddesc}[RExec]{r_exec}{code}
-\var{code} must either be a string containing one or more lines of
-Python code, or a compiled code object, which will be executed in the
-restricted environment's \module{__main__} module.
-\end{methoddesc}
-
-\begin{methoddesc}[RExec]{r_execfile}{filename}
-Execute the Python code contained in the file \var{filename} in the
-restricted environment's \module{__main__} module.
-\end{methoddesc}
-
-Methods whose names begin with \samp{s_} are similar to the functions
-beginning with \samp{r_}, but the code will be granted access to
-restricted versions of the standard I/O streams \code{sys.stdin},
-\code{sys.stderr}, and \code{sys.stdout}.
-
-\begin{methoddesc}[RExec]{s_eval}{code}
-\var{code} must be a string containing a Python expression, which will
-be evaluated in the restricted environment.
-\end{methoddesc}
-
-\begin{methoddesc}[RExec]{s_exec}{code}
-\var{code} must be a string containing one or more lines of Python code,
-which will be executed in the restricted environment.
-\end{methoddesc}
-
-\begin{methoddesc}[RExec]{s_execfile}{code}
-Execute the Python code contained in the file \var{filename} in the
-restricted environment.
-\end{methoddesc}
-
-\class{RExec} objects must also support various methods which will be
-implicitly called by code executing in the restricted environment.
-Overriding these methods in a subclass is used to change the policies
-enforced by a restricted environment.
-
-\begin{methoddesc}[RExec]{r_import}{modulename\optional{, globals\optional{,
- locals\optional{, fromlist}}}}
-Import the module \var{modulename}, raising an \exception{ImportError}
-exception if the module is considered unsafe.
-\end{methoddesc}
-
-\begin{methoddesc}[RExec]{r_open}{filename\optional{, mode\optional{, bufsize}}}
-Method called when \function{open()} is called in the restricted
-environment. The arguments are identical to those of \function{open()},
-and a file object (or a class instance compatible with file objects)
-should be returned. \class{RExec}'s default behaviour is allow opening
-any file for reading, but forbidding any attempt to write a file. See
-the example below for an implementation of a less restrictive
-\method{r_open()}.
-\end{methoddesc}
-
-\begin{methoddesc}[RExec]{r_reload}{module}
-Reload the module object \var{module}, re-parsing and re-initializing it.
-\end{methoddesc}
-
-\begin{methoddesc}[RExec]{r_unload}{module}
-Unload the module object \var{module} (remove it from the
-restricted environment's \code{sys.modules} dictionary).
-\end{methoddesc}
-
-And their equivalents with access to restricted standard I/O streams:
-
-\begin{methoddesc}[RExec]{s_import}{modulename\optional{, globals\optional{,
- locals\optional{, fromlist}}}}
-Import the module \var{modulename}, raising an \exception{ImportError}
-exception if the module is considered unsafe.
-\end{methoddesc}
-
-\begin{methoddesc}[RExec]{s_reload}{module}
-Reload the module object \var{module}, re-parsing and re-initializing it.
-\end{methoddesc}
-
-\begin{methoddesc}[RExec]{s_unload}{module}
-Unload the module object \var{module}.
-% XXX what are the semantics of this?
-\end{methoddesc}
-
-
-\subsection{Defining restricted environments \label{rexec-extension}}
-
-The \class{RExec} class has the following class attributes, which are
-used by the \method{__init__()} method. Changing them on an existing
-instance won't have any effect; instead, create a subclass of
-\class{RExec} and assign them new values in the class definition.
-Instances of the new class will then use those new values. All these
-attributes are tuples of strings.
-
-\begin{memberdesc}[RExec]{nok_builtin_names}
-Contains the names of built-in functions which will \emph{not} be
-available to programs running in the restricted environment. The
-value for \class{RExec} is \code{('open', 'reload', '__import__')}.
-(This gives the exceptions, because by far the majority of built-in
-functions are harmless. A subclass that wants to override this
-variable should probably start with the value from the base class and
-concatenate additional forbidden functions --- when new dangerous
-built-in functions are added to Python, they will also be added to
-this module.)
-\end{memberdesc}
-
-\begin{memberdesc}[RExec]{ok_builtin_modules}
-Contains the names of built-in modules which can be safely imported.
-The value for \class{RExec} is \code{('audioop', 'array', 'binascii',
-'cmath', 'errno', 'imageop', 'marshal', 'math', 'md5', 'operator',
-'parser', 'regex', 'select', 'sha', '_sre', 'strop',
-'struct', 'time')}. A similar remark about overriding this variable
-applies --- use the value from the base class as a starting point.
-\end{memberdesc}
-
-\begin{memberdesc}[RExec]{ok_path}
-Contains the directories which will be searched when an \keyword{import}
-is performed in the restricted environment.
-The value for \class{RExec} is the same as \code{sys.path} (at the time
-the module is loaded) for unrestricted code.
-\end{memberdesc}
-
-\begin{memberdesc}[RExec]{ok_posix_names}
-% Should this be called ok_os_names?
-Contains the names of the functions in the \refmodule{os} module which will be
-available to programs running in the restricted environment. The
-value for \class{RExec} is \code{('error', 'fstat', 'listdir',
-'lstat', 'readlink', 'stat', 'times', 'uname', 'getpid', 'getppid',
-'getcwd', 'getuid', 'getgid', 'geteuid', 'getegid')}.
-\end{memberdesc}
-
-\begin{memberdesc}[RExec]{ok_sys_names}
-Contains the names of the functions and variables in the \refmodule{sys}
-module which will be available to programs running in the restricted
-environment. The value for \class{RExec} is \code{('ps1', 'ps2',
-'copyright', 'version', 'platform', 'exit', 'maxint')}.
-\end{memberdesc}
-
-\begin{memberdesc}[RExec]{ok_file_types}
-Contains the file types from which modules are allowed to be loaded.
-Each file type is an integer constant defined in the \refmodule{imp} module.
-The meaningful values are \constant{PY_SOURCE}, \constant{PY_COMPILED}, and
-\constant{C_EXTENSION}. The value for \class{RExec} is \code{(C_EXTENSION,
-PY_SOURCE)}. Adding \constant{PY_COMPILED} in subclasses is not recommended;
-an attacker could exit the restricted execution mode by putting a forged
-byte-compiled file (\file{.pyc}) anywhere in your file system, for example
-by writing it to \file{/tmp} or uploading it to the \file{/incoming}
-directory of your public FTP server.
-\end{memberdesc}
-
-
-\subsection{An example}
-
-Let us say that we want a slightly more relaxed policy than the
-standard \class{RExec} class. For example, if we're willing to allow
-files in \file{/tmp} to be written, we can subclass the \class{RExec}
-class:
-
-\begin{verbatim}
-class TmpWriterRExec(rexec.RExec):
- def r_open(self, file, mode='r', buf=-1):
- if mode in ('r', 'rb'):
- pass
- elif mode in ('w', 'wb', 'a', 'ab'):
- # check filename : must begin with /tmp/
- if file[:5]!='/tmp/':
- raise IOError, "can't write outside /tmp"
- elif (string.find(file, '/../') >= 0 or
- file[:3] == '../' or file[-3:] == '/..'):
- raise IOError, "'..' in filename forbidden"
- else: raise IOError, "Illegal open() mode"
- return open(file, mode, buf)
-\end{verbatim}
-%
-Notice that the above code will occasionally forbid a perfectly valid
-filename; for example, code in the restricted environment won't be
-able to open a file called \file{/tmp/foo/../bar}. To fix this, the
-\method{r_open()} method would have to simplify the filename to
-\file{/tmp/bar}, which would require splitting apart the filename and
-performing various operations on it. In cases where security is at
-stake, it may be preferable to write simple code which is sometimes
-overly restrictive, instead of more general code that is also more
-complex and may harbor a subtle security hole.
diff --git a/Doc/lib/libsha.tex b/Doc/lib/libsha.tex
deleted file mode 100644
index 6d1da68..0000000
--- a/Doc/lib/libsha.tex
+++ /dev/null
@@ -1,83 +0,0 @@
-\section{\module{sha} ---
- SHA-1 message digest algorithm}
-
-\declaremodule{builtin}{sha}
-\modulesynopsis{NIST's secure hash algorithm, SHA.}
-\sectionauthor{Fred L. Drake, Jr.}{fdrake@acm.org}
-
-\deprecated{2.5}{Use the \refmodule{hashlib} module instead.}
-
-
-This module implements the interface to NIST's\index{NIST} secure hash
-algorithm,\index{Secure Hash Algorithm} known as SHA-1. SHA-1 is an
-improved version of the original SHA hash algorithm. It is used in
-the same way as the \refmodule{md5} module:\ use \function{new()}
-to create an sha object, then feed this object with arbitrary strings
-using the \method{update()} method, and at any point you can ask it
-for the \dfn{digest} of the concatenation of the strings fed to it
-so far.\index{checksum!SHA} SHA-1 digests are 160 bits instead of
-MD5's 128 bits.
-
-
-\begin{funcdesc}{new}{\optional{string}}
- Return a new sha object. If \var{string} is present, the method
- call \code{update(\var{string})} is made.
-\end{funcdesc}
-
-
-The following values are provided as constants in the module and as
-attributes of the sha objects returned by \function{new()}:
-
-\begin{datadesc}{blocksize}
- Size of the blocks fed into the hash function; this is always
- \code{1}. This size is used to allow an arbitrary string to be
- hashed.
-\end{datadesc}
-
-\begin{datadesc}{digest_size}
- The size of the resulting digest in bytes. This is always
- \code{20}.
-\end{datadesc}
-
-
-An sha object has the same methods as md5 objects:
-
-\begin{methoddesc}[sha]{update}{arg}
-Update the sha object with the string \var{arg}. Repeated calls are
-equivalent to a single call with the concatenation of all the
-arguments: \code{m.update(a); m.update(b)} is equivalent to
-\code{m.update(a+b)}.
-\end{methoddesc}
-
-\begin{methoddesc}[sha]{digest}{}
-Return the digest of the strings passed to the \method{update()}
-method so far. This is a 20-byte string which may contain
-non-\ASCII{} characters, including null bytes.
-\end{methoddesc}
-
-\begin{methoddesc}[sha]{hexdigest}{}
-Like \method{digest()} except the digest is returned as a string of
-length 40, containing only hexadecimal digits. This may
-be used to exchange the value safely in email or other non-binary
-environments.
-\end{methoddesc}
-
-\begin{methoddesc}[sha]{copy}{}
-Return a copy (``clone'') of the sha object. This can be used to
-efficiently compute the digests of strings that share a common initial
-substring.
-\end{methoddesc}
-
-\begin{seealso}
- \seetitle[http://csrc.nist.gov/publications/fips/fips180-2/fips180-2withchangenotice.pdf]
- {Secure Hash Standard}
- {The Secure Hash Algorithm is defined by NIST document FIPS
- PUB 180-2:
- \citetitle[http://csrc.nist.gov/publications/fips/fips180-2/fips180-2withchangenotice.pdf]
- {Secure Hash Standard}, published in August 2002.}
-
- \seetitle[http://csrc.nist.gov/encryption/tkhash.html]
- {Cryptographic Toolkit (Secure Hashing)}
- {Links from NIST to various information on secure hashing.}
-\end{seealso}
-
diff --git a/Doc/ref/ref3.tex b/Doc/ref/ref3.tex
index 8340e17..3f82a8c 100644
--- a/Doc/ref/ref3.tex
+++ b/Doc/ref/ref3.tex
@@ -941,18 +941,15 @@ stack frame; \member{f_code} is the code object being executed in this
frame; \member{f_locals} is the dictionary used to look up local
variables; \member{f_globals} is used for global variables;
\member{f_builtins} is used for built-in (intrinsic) names;
-\member{f_restricted} is a flag indicating whether the function is
-executing in restricted execution mode; \member{f_lasti} gives the
-precise instruction (this is an index into the bytecode string of
-the code object).
+ \member{f_lasti} gives the precise instruction (this is an index into
+ the bytecode string of the code object).
\withsubitem{(frame attribute)}{
\ttindex{f_back}
\ttindex{f_code}
\ttindex{f_globals}
\ttindex{f_locals}
\ttindex{f_lasti}
- \ttindex{f_builtins}
- \ttindex{f_restricted}}
+ \ttindex{f_builtins}}
Special writable attributes: \member{f_trace}, if not \code{None}, is
a function called at the start of each source code line (this is used
diff --git a/Include/ceval.h b/Include/ceval.h
index 9c77a85..15b29c6 100644
--- a/Include/ceval.h
+++ b/Include/ceval.h
@@ -33,7 +33,6 @@ PyAPI_FUNC(PyObject *) PyEval_GetBuiltins(void);
PyAPI_FUNC(PyObject *) PyEval_GetGlobals(void);
PyAPI_FUNC(PyObject *) PyEval_GetLocals(void);
PyAPI_FUNC(struct _frame *) PyEval_GetFrame(void);
-PyAPI_FUNC(int) PyEval_GetRestricted(void);
/* Look at the current frame's (if any) code's co_flags, and turn on
the corresponding compiler flags in cf->cf_flags. Return 1 if any
diff --git a/Include/frameobject.h b/Include/frameobject.h
index 794f651..392db46 100644
--- a/Include/frameobject.h
+++ b/Include/frameobject.h
@@ -52,8 +52,6 @@ typedef struct _frame {
PyAPI_DATA(PyTypeObject) PyFrame_Type;
#define PyFrame_Check(op) ((op)->ob_type == &PyFrame_Type)
-#define PyFrame_IsRestricted(f) \
- ((f)->f_builtins != (f)->f_tstate->interp->builtins)
PyAPI_FUNC(PyFrameObject *) PyFrame_New(PyThreadState *, PyCodeObject *,
PyObject *, PyObject *);
diff --git a/Lib/Bastion.py b/Lib/Bastion.py
deleted file mode 100644
index 5331ba9..0000000
--- a/Lib/Bastion.py
+++ /dev/null
@@ -1,177 +0,0 @@
-"""Bastionification utility.
-
-A bastion (for another object -- the 'original') is an object that has
-the same methods as the original but does not give access to its
-instance variables. Bastions have a number of uses, but the most
-obvious one is to provide code executing in restricted mode with a
-safe interface to an object implemented in unrestricted mode.
-
-The bastionification routine has an optional second argument which is
-a filter function. Only those methods for which the filter method
-(called with the method name as argument) returns true are accessible.
-The default filter method returns true unless the method name begins
-with an underscore.
-
-There are a number of possible implementations of bastions. We use a
-'lazy' approach where the bastion's __getattr__() discipline does all
-the work for a particular method the first time it is used. This is
-usually fastest, especially if the user doesn't call all available
-methods. The retrieved methods are stored as instance variables of
-the bastion, so the overhead is only occurred on the first use of each
-method.
-
-Detail: the bastion class has a __repr__() discipline which includes
-the repr() of the original object. This is precomputed when the
-bastion is created.
-
-"""
-
-__all__ = ["BastionClass", "Bastion"]
-
-from types import MethodType
-
-
-class BastionClass:
-
- """Helper class used by the Bastion() function.
-
- You could subclass this and pass the subclass as the bastionclass
- argument to the Bastion() function, as long as the constructor has
- the same signature (a get() function and a name for the object).
-
- """
-
- def __init__(self, get, name):
- """Constructor.
-
- Arguments:
-
- get - a function that gets the attribute value (by name)
- name - a human-readable name for the original object
- (suggestion: use repr(object))
-
- """
- self._get_ = get
- self._name_ = name
-
- def __repr__(self):
- """Return a representation string.
-
- This includes the name passed in to the constructor, so that
- if you print the bastion during debugging, at least you have
- some idea of what it is.
-
- """
- return "<Bastion for %s>" % self._name_
-
- def __getattr__(self, name):
- """Get an as-yet undefined attribute value.
-
- This calls the get() function that was passed to the
- constructor. The result is stored as an instance variable so
- that the next time the same attribute is requested,
- __getattr__() won't be invoked.
-
- If the get() function raises an exception, this is simply
- passed on -- exceptions are not cached.
-
- """
- attribute = self._get_(name)
- self.__dict__[name] = attribute
- return attribute
-
-
-def Bastion(object, filter = lambda name: name[:1] != '_',
- name=None, bastionclass=BastionClass):
- """Create a bastion for an object, using an optional filter.
-
- See the Bastion module's documentation for background.
-
- Arguments:
-
- object - the original object
- filter - a predicate that decides whether a function name is OK;
- by default all names are OK that don't start with '_'
- name - the name of the object; default repr(object)
- bastionclass - class used to create the bastion; default BastionClass
-
- """
-
- raise RuntimeError, "This code is not secure in Python 2.2 and later"
-
- # Note: we define *two* ad-hoc functions here, get1 and get2.
- # Both are intended to be called in the same way: get(name).
- # It is clear that the real work (getting the attribute
- # from the object and calling the filter) is done in get1.
- # Why can't we pass get1 to the bastion? Because the user
- # would be able to override the filter argument! With get2,
- # overriding the default argument is no security loophole:
- # all it does is call it.
- # Also notice that we can't place the object and filter as
- # instance variables on the bastion object itself, since
- # the user has full access to all instance variables!
-
- def get1(name, object=object, filter=filter):
- """Internal function for Bastion(). See source comments."""
- if filter(name):
- attribute = getattr(object, name)
- if type(attribute) == MethodType:
- return attribute
- raise AttributeError, name
-
- def get2(name, get1=get1):
- """Internal function for Bastion(). See source comments."""
- return get1(name)
-
- if name is None:
- name = repr(object)
- return bastionclass(get2, name)
-
-
-def _test():
- """Test the Bastion() function."""
- class Original:
- def __init__(self):
- self.sum = 0
- def add(self, n):
- self._add(n)
- def _add(self, n):
- self.sum = self.sum + n
- def total(self):
- return self.sum
- o = Original()
- b = Bastion(o)
- testcode = """if 1:
- b.add(81)
- b.add(18)
- print "b.total() =", b.total()
- try:
- print "b.sum =", b.sum,
- except:
- print "inaccessible"
- else:
- print "accessible"
- try:
- print "b._add =", b._add,
- except:
- print "inaccessible"
- else:
- print "accessible"
- try:
- print "b._get_.__defaults__ =", map(type, b._get_.__defaults__),
- except:
- print "inaccessible"
- else:
- print "accessible"
- \n"""
- exec(testcode)
- print('='*20, "Using rexec:", '='*20)
- import rexec
- r = rexec.RExec()
- m = r.add_module('__main__')
- m.b = b
- r.r_exec(testcode)
-
-
-if __name__ == '__main__':
- _test()
diff --git a/Lib/compiler/__init__.py b/Lib/compiler/__init__.py
deleted file mode 100644
index ce89144..0000000
--- a/Lib/compiler/__init__.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""Package for parsing and compiling Python source code
-
-There are several functions defined at the top level that are imported
-from modules contained in the package.
-
-parse(buf, mode="exec") -> AST
- Converts a string containing Python source code to an abstract
- syntax tree (AST). The AST is defined in compiler.ast.
-
-parseFile(path) -> AST
- The same as parse(open(path))
-
-walk(ast, visitor, verbose=None)
- Does a pre-order walk over the ast using the visitor instance.
- See compiler.visitor for details.
-
-compile(source, filename, mode, flags=None, dont_inherit=None)
- Returns a code object. A replacement for the builtin compile() function.
-
-compileFile(filename)
- Generates a .pyc file by compiling filename.
-"""
-
-from compiler.transformer import parse, parseFile
-from compiler.visitor import walk
-from compiler.pycodegen import compile, compileFile
diff --git a/Lib/compiler/ast.py b/Lib/compiler/ast.py
deleted file mode 100644
index fb9be2a..0000000
--- a/Lib/compiler/ast.py
+++ /dev/null
@@ -1,1370 +0,0 @@
-"""Python abstract syntax node definitions
-
-This file is automatically generated by Tools/compiler/astgen.py
-"""
-from compiler.consts import CO_VARARGS, CO_VARKEYWORDS
-
-def flatten(seq):
- l = []
- for elt in seq:
- t = type(elt)
- if t is tuple or t is list:
- for elt2 in flatten(elt):
- l.append(elt2)
- else:
- l.append(elt)
- return l
-
-def flatten_nodes(seq):
- return [n for n in flatten(seq) if isinstance(n, Node)]
-
-nodes = {}
-
-class Node:
- """Abstract base class for ast nodes."""
- def getChildren(self):
- pass # implemented by subclasses
- def __iter__(self):
- for n in self.getChildren():
- yield n
- def asList(self): # for backwards compatibility
- return self.getChildren()
- def getChildNodes(self):
- pass # implemented by subclasses
-
-class EmptyNode(Node):
- def getChildNodes(self):
- return ()
- def getChildren(self):
- return ()
-
-class Expression(Node):
- # Expression is an artificial node class to support "eval"
- nodes["expression"] = "Expression"
- def __init__(self, node):
- self.node = node
-
- def getChildren(self):
- return self.node,
-
- def getChildNodes(self):
- return self.node,
-
- def __repr__(self):
- return "Expression(%s)" % (repr(self.node))
-
-class Add(Node):
- def __init__(self, (left, right), lineno=None):
- self.left = left
- self.right = right
- self.lineno = lineno
-
- def getChildren(self):
- return self.left, self.right
-
- def getChildNodes(self):
- return self.left, self.right
-
- def __repr__(self):
- return "Add((%s, %s))" % (repr(self.left), repr(self.right))
-
-class And(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "And(%s)" % (repr(self.nodes),)
-
-class AssAttr(Node):
- def __init__(self, expr, attrname, flags, lineno=None):
- self.expr = expr
- self.attrname = attrname
- self.flags = flags
- self.lineno = lineno
-
- def getChildren(self):
- return self.expr, self.attrname, self.flags
-
- def getChildNodes(self):
- return self.expr,
-
- def __repr__(self):
- return "AssAttr(%s, %s, %s)" % (repr(self.expr), repr(self.attrname), repr(self.flags))
-
-class AssList(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "AssList(%s)" % (repr(self.nodes),)
-
-class AssName(Node):
- def __init__(self, name, flags, lineno=None):
- self.name = name
- self.flags = flags
- self.lineno = lineno
-
- def getChildren(self):
- return self.name, self.flags
-
- def getChildNodes(self):
- return ()
-
- def __repr__(self):
- return "AssName(%s, %s)" % (repr(self.name), repr(self.flags))
-
-class AssTuple(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "AssTuple(%s)" % (repr(self.nodes),)
-
-class Assert(Node):
- def __init__(self, test, fail, lineno=None):
- self.test = test
- self.fail = fail
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.test)
- children.append(self.fail)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.test)
- if self.fail is not None:
- nodelist.append(self.fail)
- return tuple(nodelist)
-
- def __repr__(self):
- return "Assert(%s, %s)" % (repr(self.test), repr(self.fail))
-
-class Assign(Node):
- def __init__(self, nodes, expr, lineno=None):
- self.nodes = nodes
- self.expr = expr
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.extend(flatten(self.nodes))
- children.append(self.expr)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- nodelist.append(self.expr)
- return tuple(nodelist)
-
- def __repr__(self):
- return "Assign(%s, %s)" % (repr(self.nodes), repr(self.expr))
-
-class AugAssign(Node):
- def __init__(self, node, op, expr, lineno=None):
- self.node = node
- self.op = op
- self.expr = expr
- self.lineno = lineno
-
- def getChildren(self):
- return self.node, self.op, self.expr
-
- def getChildNodes(self):
- return self.node, self.expr
-
- def __repr__(self):
- return "AugAssign(%s, %s, %s)" % (repr(self.node), repr(self.op), repr(self.expr))
-
-class Bitand(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Bitand(%s)" % (repr(self.nodes),)
-
-class Bitor(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Bitor(%s)" % (repr(self.nodes),)
-
-class Bitxor(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Bitxor(%s)" % (repr(self.nodes),)
-
-class Break(Node):
- def __init__(self, lineno=None):
- self.lineno = lineno
-
- def getChildren(self):
- return ()
-
- def getChildNodes(self):
- return ()
-
- def __repr__(self):
- return "Break()"
-
-class Bytes(Node):
- def __init__(self, value, lineno=None):
- self.value = value
- self.lineno = lineno
-
- def getChildren(self):
- return self.value,
-
- def getChildNodes(self):
- return ()
-
- def __repr__(self):
- return "Bytes(%s)" % (repr(self.value),)
-
-class CallFunc(Node):
- def __init__(self, node, args, star_args = None, dstar_args = None, lineno=None):
- self.node = node
- self.args = args
- self.star_args = star_args
- self.dstar_args = dstar_args
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.node)
- children.extend(flatten(self.args))
- children.append(self.star_args)
- children.append(self.dstar_args)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.node)
- nodelist.extend(flatten_nodes(self.args))
- if self.star_args is not None:
- nodelist.append(self.star_args)
- if self.dstar_args is not None:
- nodelist.append(self.dstar_args)
- return tuple(nodelist)
-
- def __repr__(self):
- return "CallFunc(%s, %s, %s, %s)" % (repr(self.node), repr(self.args), repr(self.star_args), repr(self.dstar_args))
-
-class Class(Node):
- def __init__(self, name, args, star_args, dstar_args,
- doc, code, lineno=None):
- self.name = name
- self.args = args
- self.star_args = star_args
- self.dstar_args = dstar_args
- self.doc = doc
- self.code = code
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.name)
- children.extend(flatten(self.args))
- children.extend(self.star_args)
- children.extend(self.dstar_args)
- children.append(self.doc)
- children.append(self.code)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.args))
- if self.star_args is not None:
- nodelist.append(self.star_args)
- if self.dstar_args is not None:
- nodelist.append(self.dstar_args)
- nodelist.append(self.code)
- return tuple(nodelist)
-
- def __repr__(self):
- return "Class(%r, %r, %r, %r, %r, %r)" % (self.name,
- self.args,
- self.star_args,
- self.dstar_args,
- self.doc,
- self.code)
-
-class Compare(Node):
- def __init__(self, expr, ops, lineno=None):
- self.expr = expr
- self.ops = ops
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.expr)
- children.extend(flatten(self.ops))
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.expr)
- nodelist.extend(flatten_nodes(self.ops))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Compare(%s, %s)" % (repr(self.expr), repr(self.ops))
-
-class Const(Node):
- def __init__(self, value, lineno=None):
- self.value = value
- self.lineno = lineno
-
- def getChildren(self):
- return self.value,
-
- def getChildNodes(self):
- return ()
-
- def __repr__(self):
- return "Const(%s)" % (repr(self.value),)
-
-class Continue(Node):
- def __init__(self, lineno=None):
- self.lineno = lineno
-
- def getChildren(self):
- return ()
-
- def getChildNodes(self):
- return ()
-
- def __repr__(self):
- return "Continue()"
-
-class Decorators(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Decorators(%s)" % (repr(self.nodes),)
-
-class Dict(Node):
- def __init__(self, items, lineno=None):
- self.items = items
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.items))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.items))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Dict(%s)" % (repr(self.items),)
-
-class Discard(Node):
- def __init__(self, expr, lineno=None):
- self.expr = expr
- self.lineno = lineno
-
- def getChildren(self):
- return self.expr,
-
- def getChildNodes(self):
- return self.expr,
-
- def __repr__(self):
- return "Discard(%s)" % (repr(self.expr),)
-
-class Div(Node):
- def __init__(self, (left, right), lineno=None):
- self.left = left
- self.right = right
- self.lineno = lineno
-
- def getChildren(self):
- return self.left, self.right
-
- def getChildNodes(self):
- return self.left, self.right
-
- def __repr__(self):
- return "Div((%s, %s))" % (repr(self.left), repr(self.right))
-
-class FloorDiv(Node):
- def __init__(self, (left, right), lineno=None):
- self.left = left
- self.right = right
- self.lineno = lineno
-
- def getChildren(self):
- return self.left, self.right
-
- def getChildNodes(self):
- return self.left, self.right
-
- def __repr__(self):
- return "FloorDiv((%s, %s))" % (repr(self.left), repr(self.right))
-
-class For(Node):
- def __init__(self, assign, list, body, else_, lineno=None):
- self.assign = assign
- self.list = list
- self.body = body
- self.else_ = else_
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.assign)
- children.append(self.list)
- children.append(self.body)
- children.append(self.else_)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.assign)
- nodelist.append(self.list)
- nodelist.append(self.body)
- if self.else_ is not None:
- nodelist.append(self.else_)
- return tuple(nodelist)
-
- def __repr__(self):
- return "For(%s, %s, %s, %s)" % (repr(self.assign), repr(self.list), repr(self.body), repr(self.else_))
-
-class From(Node):
- def __init__(self, modname, names, level, lineno=None):
- self.modname = modname
- self.names = names
- self.level = level
- self.lineno = lineno
-
- def getChildren(self):
- return self.modname, self.names, self.level
-
- def getChildNodes(self):
- return ()
-
- def __repr__(self):
- return "From(%s, %s, %s)" % (repr(self.modname), repr(self.names), repr(self.level))
-
-class Function(Node):
- def __init__(self, decorators, name, arguments, defaults, kwonlyargs, returns, flags, doc, code, lineno=None):
- self.decorators = decorators
- self.name = name
- self.arguments = arguments
- self.defaults = defaults
- self.kwonlyargs = kwonlyargs
- self.returns = returns
- self.flags = flags
- self.doc = doc
- self.code = code
- self.lineno = lineno
- self.varargs = self.kwargs = None
- if flags & CO_VARARGS:
- self.varargs = 1
- if flags & CO_VARKEYWORDS:
- self.kwargs = 1
-
-
- def getChildren(self):
- children = []
- children.append(self.decorators)
- children.append(self.name)
- children.extend(flatten(self.arguments))
- children.extend(flatten(self.defaults))
- children.extend(flatten(self.kwonlyargs))
- children.append(self.returns)
- children.append(self.flags)
- children.append(self.doc)
- children.append(self.code)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- if self.decorators is not None:
- nodelist.append(self.decorators)
- nodelist.extend(flatten_nodes(self.arguments))
- nodelist.extend(flatten_nodes(self.defaults))
- nodelist.extend(flatten_nodes(self.kwonlyargs))
- if self.returns is not None:
- nodelist.append(self.returns)
- nodelist.append(self.code)
- return tuple(nodelist)
-
- def __repr__(self):
- return "Function(%s, %s, %s, %s, %s, %s, %s, %s, %s)" % (repr(self.decorators), repr(self.name), repr(self.arguments), repr(self.defaults), repr(self.kwonlyargs), repr(self.returns), repr(self.flags), repr(self.doc), repr(self.code))
-
-class GenExpr(Node):
- def __init__(self, code, lineno=None):
- self.code = code
- self.lineno = lineno
- self.arguments = [SimpleArg('.0', None)]
- self.varargs = self.kwargs = None
- self.kwonlyargs = ()
-
-
- def getChildren(self):
- return self.code,
-
- def getChildNodes(self):
- return self.code,
-
- def __repr__(self):
- return "GenExpr(%s)" % (repr(self.code),)
-
-class GenExprFor(Node):
- def __init__(self, assign, iter, ifs, lineno=None):
- self.assign = assign
- self.iter = iter
- self.ifs = ifs
- self.lineno = lineno
- self.is_outmost = False
-
- def getChildren(self):
- children = []
- children.append(self.assign)
- children.append(self.iter)
- children.extend(flatten(self.ifs))
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.assign)
- nodelist.append(self.iter)
- nodelist.extend(flatten_nodes(self.ifs))
- return tuple(nodelist)
-
- def __repr__(self):
- return "GenExprFor(%s, %s, %s)" % (repr(self.assign), repr(self.iter), repr(self.ifs))
-
-class GenExprIf(Node):
- def __init__(self, test, lineno=None):
- self.test = test
- self.lineno = lineno
-
- def getChildren(self):
- return self.test,
-
- def getChildNodes(self):
- return self.test,
-
- def __repr__(self):
- return "GenExprIf(%s)" % (repr(self.test),)
-
-class GenExprInner(Node):
- def __init__(self, expr, quals, lineno=None):
- self.expr = expr
- self.quals = quals
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.expr)
- children.extend(flatten(self.quals))
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.expr)
- nodelist.extend(flatten_nodes(self.quals))
- return tuple(nodelist)
-
- def __repr__(self):
- return "GenExprInner(%s, %s)" % (repr(self.expr), repr(self.quals))
-
-class Getattr(Node):
- def __init__(self, expr, attrname, lineno=None):
- self.expr = expr
- self.attrname = attrname
- self.lineno = lineno
-
- def getChildren(self):
- return self.expr, self.attrname
-
- def getChildNodes(self):
- return self.expr,
-
- def __repr__(self):
- return "Getattr(%s, %s)" % (repr(self.expr), repr(self.attrname))
-
-class Global(Node):
- def __init__(self, names, lineno=None):
- self.names = names
- self.lineno = lineno
-
- def getChildren(self):
- return self.names,
-
- def getChildNodes(self):
- return ()
-
- def __repr__(self):
- return "Global(%s)" % (repr(self.names),)
-
-class If(Node):
- def __init__(self, tests, else_, lineno=None):
- self.tests = tests
- self.else_ = else_
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.extend(flatten(self.tests))
- children.append(self.else_)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.tests))
- if self.else_ is not None:
- nodelist.append(self.else_)
- return tuple(nodelist)
-
- def __repr__(self):
- return "If(%s, %s)" % (repr(self.tests), repr(self.else_))
-
-class IfExp(Node):
- def __init__(self, test, then, else_, lineno=None):
- self.test = test
- self.then = then
- self.else_ = else_
- self.lineno = lineno
-
- def getChildren(self):
- return self.test, self.then, self.else_
-
- def getChildNodes(self):
- return self.test, self.then, self.else_
-
- def __repr__(self):
- return "IfExp(%s, %s, %s)" % (repr(self.test), repr(self.then), repr(self.else_))
-
-class Import(Node):
- def __init__(self, names, lineno=None):
- self.names = names
- self.lineno = lineno
-
- def getChildren(self):
- return self.names,
-
- def getChildNodes(self):
- return ()
-
- def __repr__(self):
- return "Import(%s)" % (repr(self.names),)
-
-class Invert(Node):
- def __init__(self, expr, lineno=None):
- self.expr = expr
- self.lineno = lineno
-
- def getChildren(self):
- return self.expr,
-
- def getChildNodes(self):
- return self.expr,
-
- def __repr__(self):
- return "Invert(%s)" % (repr(self.expr),)
-
-class Keyword(Node):
- def __init__(self, name, expr, lineno=None):
- self.name = name
- self.expr = expr
- self.lineno = lineno
-
- def getChildren(self):
- return self.name, self.expr
-
- def getChildNodes(self):
- return self.expr,
-
- def __repr__(self):
- return "Keyword(%s, %s)" % (repr(self.name), repr(self.expr))
-
-class Kwarg(Node):
- def __init__(self, arg, expr, lineno=None):
- self.arg = arg
- self.expr = expr
- self.lineno = lineno
-
- def getChildren(self):
- return self.arg, self.expr
-
- def getChildNodes(self):
- return self.arg, self.expr
-
- def __repr__(self):
- return "Kwarg(%s, %s)" % (repr(self.arg), repr(self.expr))
-
-class Lambda(Node):
- def __init__(self, arguments, defaults, kwonlyargs, flags, code, lineno=None):
- self.arguments = arguments
- self.defaults = defaults
- self.kwonlyargs = kwonlyargs
- self.flags = flags
- self.code = code
- self.lineno = lineno
- self.varargs = self.kwargs = None
- if flags & CO_VARARGS:
- self.varargs = 1
- if flags & CO_VARKEYWORDS:
- self.kwargs = 1
- self.returns = None
-
-
- def getChildren(self):
- children = []
- children.extend(flatten(self.arguments))
- children.extend(flatten(self.defaults))
- children.extend(flatten(self.kwonlyargs))
- children.append(self.flags)
- children.append(self.code)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.arguments))
- nodelist.extend(flatten_nodes(self.defaults))
- nodelist.extend(flatten_nodes(self.kwonlyargs))
- nodelist.append(self.code)
- return tuple(nodelist)
-
- def __repr__(self):
- return "Lambda(%s, %s, %s, %s, %s)" % (repr(self.arguments), repr(self.defaults), repr(self.kwonlyargs), repr(self.flags), repr(self.code))
-
-class LeftShift(Node):
- def __init__(self, (left, right), lineno=None):
- self.left = left
- self.right = right
- self.lineno = lineno
-
- def getChildren(self):
- return self.left, self.right
-
- def getChildNodes(self):
- return self.left, self.right
-
- def __repr__(self):
- return "LeftShift((%s, %s))" % (repr(self.left), repr(self.right))
-
-class List(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "List(%s)" % (repr(self.nodes),)
-
-class ListComp(Node):
- def __init__(self, expr, quals, lineno=None):
- self.expr = expr
- self.quals = quals
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.expr)
- children.extend(flatten(self.quals))
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.expr)
- nodelist.extend(flatten_nodes(self.quals))
- return tuple(nodelist)
-
- def __repr__(self):
- return "ListComp(%s, %s)" % (repr(self.expr), repr(self.quals))
-
-class ListCompFor(Node):
- def __init__(self, assign, list, ifs, lineno=None):
- self.assign = assign
- self.list = list
- self.ifs = ifs
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.assign)
- children.append(self.list)
- children.extend(flatten(self.ifs))
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.assign)
- nodelist.append(self.list)
- nodelist.extend(flatten_nodes(self.ifs))
- return tuple(nodelist)
-
- def __repr__(self):
- return "ListCompFor(%s, %s, %s)" % (repr(self.assign), repr(self.list), repr(self.ifs))
-
-class ListCompIf(Node):
- def __init__(self, test, lineno=None):
- self.test = test
- self.lineno = lineno
-
- def getChildren(self):
- return self.test,
-
- def getChildNodes(self):
- return self.test,
-
- def __repr__(self):
- return "ListCompIf(%s)" % (repr(self.test),)
-
-class Mod(Node):
- def __init__(self, (left, right), lineno=None):
- self.left = left
- self.right = right
- self.lineno = lineno
-
- def getChildren(self):
- return self.left, self.right
-
- def getChildNodes(self):
- return self.left, self.right
-
- def __repr__(self):
- return "Mod((%s, %s))" % (repr(self.left), repr(self.right))
-
-class Module(Node):
- def __init__(self, doc, node, lineno=None):
- self.doc = doc
- self.node = node
- self.lineno = lineno
-
- def getChildren(self):
- return self.doc, self.node
-
- def getChildNodes(self):
- return self.node,
-
- def __repr__(self):
- return "Module(%s, %s)" % (repr(self.doc), repr(self.node))
-
-class Mul(Node):
- def __init__(self, (left, right), lineno=None):
- self.left = left
- self.right = right
- self.lineno = lineno
-
- def getChildren(self):
- return self.left, self.right
-
- def getChildNodes(self):
- return self.left, self.right
-
- def __repr__(self):
- return "Mul((%s, %s))" % (repr(self.left), repr(self.right))
-
-class Name(Node):
- def __init__(self, name, lineno=None):
- self.name = name
- self.lineno = lineno
-
- def getChildren(self):
- return self.name,
-
- def getChildNodes(self):
- return ()
-
- def __repr__(self):
- return "Name(%s)" % (repr(self.name),)
-
-class NestedArgs(Node):
- def __init__(self, args, lineno=None):
- self.args = args
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.args))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.args))
- return tuple(nodelist)
-
- def __repr__(self):
- return "NestedArgs(%s)" % (repr(self.args),)
-
-class Not(Node):
- def __init__(self, expr, lineno=None):
- self.expr = expr
- self.lineno = lineno
-
- def getChildren(self):
- return self.expr,
-
- def getChildNodes(self):
- return self.expr,
-
- def __repr__(self):
- return "Not(%s)" % (repr(self.expr),)
-
-class Or(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Or(%s)" % (repr(self.nodes),)
-
-class Pass(Node):
- def __init__(self, lineno=None):
- self.lineno = lineno
-
- def getChildren(self):
- return ()
-
- def getChildNodes(self):
- return ()
-
- def __repr__(self):
- return "Pass()"
-
-class Power(Node):
- def __init__(self, (left, right), lineno=None):
- self.left = left
- self.right = right
- self.lineno = lineno
-
- def getChildren(self):
- return self.left, self.right
-
- def getChildNodes(self):
- return self.left, self.right
-
- def __repr__(self):
- return "Power((%s, %s))" % (repr(self.left), repr(self.right))
-
-class Raise(Node):
- def __init__(self, expr1, expr2, expr3, lineno=None):
- self.expr1 = expr1
- self.expr2 = expr2
- self.expr3 = expr3
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.expr1)
- children.append(self.expr2)
- children.append(self.expr3)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- if self.expr1 is not None:
- nodelist.append(self.expr1)
- if self.expr2 is not None:
- nodelist.append(self.expr2)
- if self.expr3 is not None:
- nodelist.append(self.expr3)
- return tuple(nodelist)
-
- def __repr__(self):
- return "Raise(%s, %s, %s)" % (repr(self.expr1), repr(self.expr2), repr(self.expr3))
-
-class Return(Node):
- def __init__(self, value, lineno=None):
- self.value = value
- self.lineno = lineno
-
- def getChildren(self):
- return self.value,
-
- def getChildNodes(self):
- return self.value,
-
- def __repr__(self):
- return "Return(%s)" % (repr(self.value),)
-
-class RightShift(Node):
- def __init__(self, (left, right), lineno=None):
- self.left = left
- self.right = right
- self.lineno = lineno
-
- def getChildren(self):
- return self.left, self.right
-
- def getChildNodes(self):
- return self.left, self.right
-
- def __repr__(self):
- return "RightShift((%s, %s))" % (repr(self.left), repr(self.right))
-
-class Set(Node):
- def __init__(self, items, lineno=None):
- self.items = items
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.items))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.items))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Set(%s)" % (repr(self.items),)
-
-class SimpleArg(Node):
- def __init__(self, name, annotation, lineno=None):
- self.name = name
- self.annotation = annotation
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.name)
- children.append(self.annotation)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- if self.annotation is not None:
- nodelist.append(self.annotation)
- return tuple(nodelist)
-
- def __repr__(self):
- return "SimpleArg(%s, %s)" % (repr(self.name), repr(self.annotation))
-
-class Slice(Node):
- def __init__(self, expr, flags, lower, upper, lineno=None):
- self.expr = expr
- self.flags = flags
- self.lower = lower
- self.upper = upper
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.expr)
- children.append(self.flags)
- children.append(self.lower)
- children.append(self.upper)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.expr)
- if self.lower is not None:
- nodelist.append(self.lower)
- if self.upper is not None:
- nodelist.append(self.upper)
- return tuple(nodelist)
-
- def __repr__(self):
- return "Slice(%s, %s, %s, %s)" % (repr(self.expr), repr(self.flags), repr(self.lower), repr(self.upper))
-
-class Sliceobj(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Sliceobj(%s)" % (repr(self.nodes),)
-
-class Stmt(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Stmt(%s)" % (repr(self.nodes),)
-
-class Sub(Node):
- def __init__(self, (left, right), lineno=None):
- self.left = left
- self.right = right
- self.lineno = lineno
-
- def getChildren(self):
- return self.left, self.right
-
- def getChildNodes(self):
- return self.left, self.right
-
- def __repr__(self):
- return "Sub((%s, %s))" % (repr(self.left), repr(self.right))
-
-class Subscript(Node):
- def __init__(self, expr, flags, subs, lineno=None):
- self.expr = expr
- self.flags = flags
- self.subs = subs
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.expr)
- children.append(self.flags)
- children.extend(flatten(self.subs))
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.expr)
- nodelist.extend(flatten_nodes(self.subs))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Subscript(%s, %s, %s)" % (repr(self.expr), repr(self.flags), repr(self.subs))
-
-class TryExcept(Node):
- def __init__(self, body, handlers, else_, lineno=None):
- self.body = body
- self.handlers = handlers
- self.else_ = else_
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.body)
- children.extend(flatten(self.handlers))
- children.append(self.else_)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.body)
- nodelist.extend(flatten_nodes(self.handlers))
- if self.else_ is not None:
- nodelist.append(self.else_)
- return tuple(nodelist)
-
- def __repr__(self):
- return "TryExcept(%s, %s, %s)" % (repr(self.body), repr(self.handlers), repr(self.else_))
-
-class TryFinally(Node):
- def __init__(self, body, final, lineno=None):
- self.body = body
- self.final = final
- self.lineno = lineno
-
- def getChildren(self):
- return self.body, self.final
-
- def getChildNodes(self):
- return self.body, self.final
-
- def __repr__(self):
- return "TryFinally(%s, %s)" % (repr(self.body), repr(self.final))
-
-class Tuple(Node):
- def __init__(self, nodes, lineno=None):
- self.nodes = nodes
- self.lineno = lineno
-
- def getChildren(self):
- return tuple(flatten(self.nodes))
-
- def getChildNodes(self):
- nodelist = []
- nodelist.extend(flatten_nodes(self.nodes))
- return tuple(nodelist)
-
- def __repr__(self):
- return "Tuple(%s)" % (repr(self.nodes),)
-
-class UnaryAdd(Node):
- def __init__(self, expr, lineno=None):
- self.expr = expr
- self.lineno = lineno
-
- def getChildren(self):
- return self.expr,
-
- def getChildNodes(self):
- return self.expr,
-
- def __repr__(self):
- return "UnaryAdd(%s)" % (repr(self.expr),)
-
-class UnarySub(Node):
- def __init__(self, expr, lineno=None):
- self.expr = expr
- self.lineno = lineno
-
- def getChildren(self):
- return self.expr,
-
- def getChildNodes(self):
- return self.expr,
-
- def __repr__(self):
- return "UnarySub(%s)" % (repr(self.expr),)
-
-class While(Node):
- def __init__(self, test, body, else_, lineno=None):
- self.test = test
- self.body = body
- self.else_ = else_
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.test)
- children.append(self.body)
- children.append(self.else_)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.test)
- nodelist.append(self.body)
- if self.else_ is not None:
- nodelist.append(self.else_)
- return tuple(nodelist)
-
- def __repr__(self):
- return "While(%s, %s, %s)" % (repr(self.test), repr(self.body), repr(self.else_))
-
-class With(Node):
- def __init__(self, expr, vars, body, lineno=None):
- self.expr = expr
- self.vars = vars
- self.body = body
- self.lineno = lineno
-
- def getChildren(self):
- children = []
- children.append(self.expr)
- children.append(self.vars)
- children.append(self.body)
- return tuple(children)
-
- def getChildNodes(self):
- nodelist = []
- nodelist.append(self.expr)
- if self.vars is not None:
- nodelist.append(self.vars)
- nodelist.append(self.body)
- return tuple(nodelist)
-
- def __repr__(self):
- return "With(%s, %s, %s)" % (repr(self.expr), repr(self.vars), repr(self.body))
-
-class Yield(Node):
- def __init__(self, value, lineno=None):
- self.value = value
- self.lineno = lineno
-
- def getChildren(self):
- return self.value,
-
- def getChildNodes(self):
- return self.value,
-
- def __repr__(self):
- return "Yield(%s)" % (repr(self.value),)
-
-for name, obj in list(globals().items()):
- if isinstance(obj, type) and issubclass(obj, Node):
- nodes[name.lower()] = obj
diff --git a/Lib/compiler/consts.py b/Lib/compiler/consts.py
deleted file mode 100644
index c79e814..0000000
--- a/Lib/compiler/consts.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# operation flags
-OP_ASSIGN = 'OP_ASSIGN'
-OP_DELETE = 'OP_DELETE'
-OP_APPLY = 'OP_APPLY'
-
-SC_LOCAL = 1
-SC_GLOBAL = 2
-SC_FREE = 3
-SC_CELL = 4
-SC_UNKNOWN = 5
-
-CO_OPTIMIZED = 0x0001
-CO_NEWLOCALS = 0x0002
-CO_VARARGS = 0x0004
-CO_VARKEYWORDS = 0x0008
-CO_NESTED = 0x0010
-CO_GENERATOR = 0x0020
-CO_GENERATOR_ALLOWED = 0
-CO_FUTURE_DIVISION = 0x2000
-CO_FUTURE_ABSIMPORT = 0x4000
-CO_FUTURE_WITH_STATEMENT = 0x8000
diff --git a/Lib/compiler/future.py b/Lib/compiler/future.py
deleted file mode 100644
index 6e72490..0000000
--- a/Lib/compiler/future.py
+++ /dev/null
@@ -1,73 +0,0 @@
-"""Parser for future statements
-
-"""
-
-from compiler import ast, walk
-
-def is_future(stmt):
- """Return true if statement is a well-formed future statement"""
- if not isinstance(stmt, ast.From):
- return 0
- if stmt.modname == "__future__":
- return 1
- else:
- return 0
-
-class FutureParser:
-
- features = ("nested_scopes", "generators", "division",
- "absolute_import", "with_statement")
-
- def __init__(self):
- self.found = {} # set
-
- def visitModule(self, node):
- stmt = node.node
- for s in stmt.nodes:
- if not self.check_stmt(s):
- break
-
- def check_stmt(self, stmt):
- if is_future(stmt):
- for name, asname in stmt.names:
- if name in self.features:
- self.found[name] = 1
- else:
- raise SyntaxError, \
- "future feature %s is not defined" % name
- stmt.valid_future = 1
- return 1
- return 0
-
- def get_features(self):
- """Return list of features enabled by future statements"""
- return self.found.keys()
-
-class BadFutureParser:
- """Check for invalid future statements"""
-
- def visitFrom(self, node):
- if hasattr(node, 'valid_future'):
- return
- if node.modname != "__future__":
- return
- raise SyntaxError, "invalid future statement " + repr(node)
-
-def find_futures(node):
- p1 = FutureParser()
- p2 = BadFutureParser()
- walk(node, p1)
- walk(node, p2)
- return p1.get_features()
-
-if __name__ == "__main__":
- import sys
- from compiler import parseFile, walk
-
- for file in sys.argv[1:]:
- print(file)
- tree = parseFile(file)
- v = FutureParser()
- walk(tree, v)
- print(v.found)
- print()
diff --git a/Lib/compiler/misc.py b/Lib/compiler/misc.py
deleted file mode 100644
index b32d0dc..0000000
--- a/Lib/compiler/misc.py
+++ /dev/null
@@ -1,73 +0,0 @@
-
-def flatten(tup):
- elts = []
- for elt in tup:
- if isinstance(elt, tuple):
- elts = elts + flatten(elt)
- else:
- elts.append(elt)
- return elts
-
-class Set:
- def __init__(self):
- self.elts = {}
- def __len__(self):
- return len(self.elts)
- def __contains__(self, elt):
- return elt in self.elts
- def add(self, elt):
- self.elts[elt] = elt
- def elements(self):
- return list(self.elts.keys())
- def has_elt(self, elt):
- return elt in self.elts
- def remove(self, elt):
- del self.elts[elt]
- def copy(self):
- c = Set()
- c.elts.update(self.elts)
- return c
-
-class Stack:
- def __init__(self):
- self.stack = []
- self.pop = self.stack.pop
- def __len__(self):
- return len(self.stack)
- def push(self, elt):
- self.stack.append(elt)
- def top(self):
- return self.stack[-1]
- def __getitem__(self, index): # needed by visitContinue()
- return self.stack[index]
-
-MANGLE_LEN = 256 # magic constant from compile.c
-
-def mangle(name, klass):
- if not name.startswith('__'):
- return name
- if len(name) + 2 >= MANGLE_LEN:
- return name
- if name.endswith('__'):
- return name
- try:
- i = 0
- while klass[i] == '_':
- i = i + 1
- except IndexError:
- return name
- klass = klass[i:]
-
- tlen = len(klass) + len(name)
- if tlen > MANGLE_LEN:
- klass = klass[:MANGLE_LEN-tlen]
-
- return "_%s%s" % (klass, name)
-
-def set_filename(filename, tree):
- """Set the filename attribute to filename on every node in tree"""
- worklist = [tree]
- while worklist:
- node = worklist.pop(0)
- node.filename = filename
- worklist.extend(node.getChildNodes())
diff --git a/Lib/compiler/pyassem.py b/Lib/compiler/pyassem.py
deleted file mode 100644
index 2dcc8db..0000000
--- a/Lib/compiler/pyassem.py
+++ /dev/null
@@ -1,847 +0,0 @@
-"""A flow graph representation for Python bytecode"""
-
-import dis
-import new
-import sys
-
-from compiler import misc
-from compiler.consts \
- import CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS
-
-class FlowGraph:
- def __init__(self):
- self.current = self.entry = Block()
- self.exit = Block("exit")
- self.blocks = misc.Set()
- self.blocks.add(self.entry)
- self.blocks.add(self.exit)
-
- def startBlock(self, block):
- if self._debug:
- if self.current:
- print("end", repr(self.current))
- print(" next", self.current.next)
- print(" ", self.current.get_children())
- print(repr(block))
- self.current = block
-
- def nextBlock(self, block=None):
- # XXX think we need to specify when there is implicit transfer
- # from one block to the next. might be better to represent this
- # with explicit JUMP_ABSOLUTE instructions that are optimized
- # out when they are unnecessary.
- #
- # I think this strategy works: each block has a child
- # designated as "next" which is returned as the last of the
- # children. because the nodes in a graph are emitted in
- # reverse post order, the "next" block will always be emitted
- # immediately after its parent.
- # Worry: maintaining this invariant could be tricky
- if block is None:
- block = self.newBlock()
-
- # Note: If the current block ends with an unconditional
- # control transfer, then it is incorrect to add an implicit
- # transfer to the block graph. The current code requires
- # these edges to get the blocks emitted in the right order,
- # however. :-( If a client needs to remove these edges, call
- # pruneEdges().
-
- self.current.addNext(block)
- self.startBlock(block)
-
- def newBlock(self):
- b = Block()
- self.blocks.add(b)
- return b
-
- def startExitBlock(self):
- self.startBlock(self.exit)
-
- _debug = 0
-
- def _enable_debug(self):
- self._debug = 1
-
- def _disable_debug(self):
- self._debug = 0
-
- def emit(self, *inst):
- if self._debug:
- print("\t", inst)
- if inst[0] in ['RETURN_VALUE', 'YIELD_VALUE']:
- self.current.addOutEdge(self.exit)
- if len(inst) == 2 and isinstance(inst[1], Block):
- self.current.addOutEdge(inst[1])
- self.current.emit(inst)
-
- def getBlocksInOrder(self):
- """Return the blocks in reverse postorder
-
- i.e. each node appears before all of its successors
- """
- # XXX make sure every node that doesn't have an explicit next
- # is set so that next points to exit
- for b in self.blocks.elements():
- if b is self.exit:
- continue
- if not b.next:
- b.addNext(self.exit)
- order = dfs_postorder(self.entry, {})
- order.reverse()
- self.fixupOrder(order, self.exit)
- # hack alert
- if not self.exit in order:
- order.append(self.exit)
-
- return order
-
- def fixupOrder(self, blocks, default_next):
- """Fixup bad order introduced by DFS."""
-
- # XXX This is a total mess. There must be a better way to get
- # the code blocks in the right order.
-
- self.fixupOrderHonorNext(blocks, default_next)
- self.fixupOrderForward(blocks, default_next)
-
- def fixupOrderHonorNext(self, blocks, default_next):
- """Fix one problem with DFS.
-
- The DFS uses child block, but doesn't know about the special
- "next" block. As a result, the DFS can order blocks so that a
- block isn't next to the right block for implicit control
- transfers.
- """
- index = {}
- for i in range(len(blocks)):
- index[blocks[i]] = i
-
- for i in range(0, len(blocks) - 1):
- b = blocks[i]
- n = blocks[i + 1]
- if not b.next or b.next[0] == default_next or b.next[0] == n:
- continue
- # The blocks are in the wrong order. Find the chain of
- # blocks to insert where they belong.
- cur = b
- chain = []
- elt = cur
- while elt.next and elt.next[0] != default_next:
- chain.append(elt.next[0])
- elt = elt.next[0]
- # Now remove the blocks in the chain from the current
- # block list, so that they can be re-inserted.
- l = []
- for b in chain:
- assert index[b] > i
- l.append((index[b], b))
- l.sort()
- l.reverse()
- for j, b in l:
- del blocks[index[b]]
- # Insert the chain in the proper location
- blocks[i:i + 1] = [cur] + chain
- # Finally, re-compute the block indexes
- for i in range(len(blocks)):
- index[blocks[i]] = i
-
- def fixupOrderForward(self, blocks, default_next):
- """Make sure all JUMP_FORWARDs jump forward"""
- index = {}
- chains = []
- cur = []
- for b in blocks:
- index[b] = len(chains)
- cur.append(b)
- if b.next and b.next[0] == default_next:
- chains.append(cur)
- cur = []
- chains.append(cur)
-
- while 1:
- constraints = []
-
- for i in range(len(chains)):
- l = chains[i]
- for b in l:
- for c in b.get_children():
- if index[c] < i:
- forward_p = 0
- for inst in b.insts:
- if inst[0] == 'JUMP_FORWARD':
- if inst[1] == c:
- forward_p = 1
- if not forward_p:
- continue
- constraints.append((index[c], i))
-
- if not constraints:
- break
-
- # XXX just do one for now
- # do swaps to get things in the right order
- goes_before, a_chain = constraints[0]
- assert a_chain > goes_before
- c = chains[a_chain]
- chains.remove(c)
- chains.insert(goes_before, c)
-
- del blocks[:]
- for c in chains:
- for b in c:
- blocks.append(b)
-
- def getBlocks(self):
- return self.blocks.elements()
-
- def getRoot(self):
- """Return nodes appropriate for use with dominator"""
- return self.entry
-
- def getContainedGraphs(self):
- l = []
- for b in self.getBlocks():
- l.extend(b.getContainedGraphs())
- return l
-
-def dfs_postorder(b, seen):
- """Depth-first search of tree rooted at b, return in postorder"""
- order = []
- seen[b] = b
- for c in b.get_children():
- if c in seen:
- continue
- order = order + dfs_postorder(c, seen)
- order.append(b)
- return order
-
-class Block:
- _count = 0
-
- def __init__(self, label=''):
- self.insts = []
- self.inEdges = misc.Set()
- self.outEdges = misc.Set()
- self.label = label
- self.bid = Block._count
- self.next = []
- Block._count = Block._count + 1
-
- def __repr__(self):
- if self.label:
- return "<block %s id=%d>" % (self.label, self.bid)
- else:
- return "<block id=%d>" % (self.bid)
-
- def __str__(self):
- insts = map(str, self.insts)
- return "<block %s %d:\n%s>" % (self.label, self.bid,
- '\n'.join(insts))
-
- def emit(self, inst):
- op = inst[0]
- if op[:4] == 'JUMP':
- self.outEdges.add(inst[1])
- self.insts.append(inst)
-
- def getInstructions(self):
- return self.insts
-
- def addInEdge(self, block):
- self.inEdges.add(block)
-
- def addOutEdge(self, block):
- self.outEdges.add(block)
-
- def addNext(self, block):
- self.next.append(block)
- assert len(self.next) == 1, map(str, self.next)
-
- _uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS', 'YIELD_VALUE',
- 'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'CONTINUE_LOOP')
-
- def pruneNext(self):
- """Remove bogus edge for unconditional transfers
-
- Each block has a next edge that accounts for implicit control
- transfers, e.g. from a JUMP_IF_FALSE to the block that will be
- executed if the test is true.
-
- These edges must remain for the current assembler code to
- work. If they are removed, the dfs_postorder gets things in
- weird orders. However, they shouldn't be there for other
- purposes, e.g. conversion to SSA form. This method will
- remove the next edge when it follows an unconditional control
- transfer.
- """
- try:
- op, arg = self.insts[-1]
- except (IndexError, ValueError):
- return
- if op in self._uncond_transfer:
- self.next = []
-
- def get_children(self):
- if self.next and self.next[0] in self.outEdges:
- self.outEdges.remove(self.next[0])
- return self.outEdges.elements() + self.next
-
- def getContainedGraphs(self):
- """Return all graphs contained within this block.
-
- For example, a MAKE_FUNCTION block will contain a reference to
- the graph for the function body.
- """
- contained = []
- for inst in self.insts:
- if len(inst) == 1:
- continue
- op = inst[1]
- if hasattr(op, 'graph'):
- contained.append(op.graph)
- return contained
-
-# flags for code objects
-
-# the FlowGraph is transformed in place; it exists in one of these states
-RAW = "RAW"
-FLAT = "FLAT"
-CONV = "CONV"
-DONE = "DONE"
-
-class PyFlowGraph(FlowGraph):
- super_init = FlowGraph.__init__
-
- def __init__(self, name, filename,
- args=(), kwonlyargs=(), optimized=0, klass=None):
- self.super_init()
- self.name = name
- self.filename = filename
- self.docstring = None
- self.args = args # XXX
- self.argcount = getArgCount(args)
- self.kwonlyargs = kwonlyargs
- self.klass = klass
- if optimized:
- self.flags = CO_OPTIMIZED | CO_NEWLOCALS
- else:
- self.flags = 0
- self.consts = []
- self.names = []
- # Free variables found by the symbol table scan, including
- # variables used only in nested scopes, are included here.
- self.freevars = []
- self.cellvars = []
- # The closure list is used to track the order of cell
- # variables and free variables in the resulting code object.
- # The offsets used by LOAD_CLOSURE/LOAD_DEREF refer to both
- # kinds of variables.
- self.closure = []
- # The varnames list needs to be computed after flags have been set
- self.varnames = []
- self.stage = RAW
-
- def computeVarnames(self):
- # self.args is positional, vararg, kwarg, kwonly, unpacked. This
- # order is due to the visit order in symbol module and could change.
- # argcount is # len(self.args) - len(unpacked). We want
- # self.varnames to be positional, kwonly, vararg, kwarg, unpacked
- # and argcount to be len(positional).
-
- # determine starting index of unpacked, kwonly, vararg
- u = self.argcount # starting index of unpacked
- k = u - len(self.kwonlyargs) # starting index of kwonly
- v = k - self.checkFlag(CO_VARARGS) - self.checkFlag(CO_VARKEYWORDS)
-
- vars = list(self.args)
- self.varnames = vars[:v] + vars[k:u] + vars[v:k] + vars[u:]
- self.argcount = v
-
- # replace TupleArgs with calculated var name
- for i in range(self.argcount):
- var = self.varnames[i]
- if isinstance(var, TupleArg):
- self.varnames[i] = var.getName()
-
- def setDocstring(self, doc):
- self.docstring = doc
-
- def setFlag(self, flag):
- self.flags = self.flags | flag
-
- def checkFlag(self, flag):
- return (self.flags & flag) == flag
-
- def setFreeVars(self, names):
- self.freevars = list(names)
-
- def setCellVars(self, names):
- self.cellvars = names
-
- def getCode(self):
- """Get a Python code object"""
- assert self.stage == RAW
- self.computeVarnames()
- self.computeStackDepth()
- self.flattenGraph()
- assert self.stage == FLAT
- self.convertArgs()
- assert self.stage == CONV
- self.makeByteCode()
- assert self.stage == DONE
- return self.newCodeObject()
-
- def dump(self, io=None):
- if io:
- save = sys.stdout
- sys.stdout = io
- pc = 0
- for t in self.insts:
- opname = t[0]
- if opname == "SET_LINENO":
- print()
- if len(t) == 1:
- print("\t", "%3d" % pc, opname)
- pc = pc + 1
- else:
- print("\t", "%3d" % pc, opname, t[1])
- pc = pc + 3
- if io:
- sys.stdout = save
-
- def computeStackDepth(self):
- """Compute the max stack depth.
-
- Approach is to compute the stack effect of each basic block.
- Then find the path through the code with the largest total
- effect.
- """
- depth = {}
- exit = None
- for b in self.getBlocks():
- depth[b] = findDepth(b.getInstructions())
-
- seen = {}
-
- def max_depth(b, d):
- if b in seen:
- return d
- seen[b] = 1
- d = d + depth[b]
- children = b.get_children()
- if children:
- return max([max_depth(c, d) for c in children])
- else:
- if not b.label == "exit":
- return max_depth(self.exit, d)
- else:
- return d
-
- self.stacksize = max_depth(self.entry, 0)
-
- def flattenGraph(self):
- """Arrange the blocks in order and resolve jumps"""
- assert self.stage == RAW
- self.insts = insts = []
- pc = 0
- begin = {}
- end = {}
- for b in self.getBlocksInOrder():
- begin[b] = pc
- for inst in b.getInstructions():
- insts.append(inst)
- if len(inst) == 1:
- pc = pc + 1
- elif inst[0] != "SET_LINENO":
- # arg takes 2 bytes
- pc = pc + 3
- end[b] = pc
- pc = 0
- for i in range(len(insts)):
- inst = insts[i]
- if len(inst) == 1:
- pc = pc + 1
- elif inst[0] != "SET_LINENO":
- pc = pc + 3
- opname = inst[0]
- if self.hasjrel.has_elt(opname):
- oparg = inst[1]
- offset = begin[oparg] - pc
- insts[i] = opname, offset
- elif self.hasjabs.has_elt(opname):
- insts[i] = opname, begin[inst[1]]
- self.stage = FLAT
-
- hasjrel = misc.Set()
- for i in dis.hasjrel:
- hasjrel.add(dis.opname[i])
- hasjabs = misc.Set()
- for i in dis.hasjabs:
- hasjabs.add(dis.opname[i])
-
- def convertArgs(self):
- """Convert arguments from symbolic to concrete form"""
- assert self.stage == FLAT
- self.consts.insert(0, self.docstring)
- self.sort_cellvars()
- for i in range(len(self.insts)):
- t = self.insts[i]
- if len(t) == 2:
- opname, oparg = t
- conv = self._converters.get(opname, None)
- if conv:
- self.insts[i] = opname, conv(self, oparg)
- self.stage = CONV
-
- def sort_cellvars(self):
- """Sort cellvars in the order of varnames and prune from freevars.
- """
- cells = {}
- for name in self.cellvars:
- cells[name] = 1
- self.cellvars = [name for name in self.varnames
- if name in cells]
- for name in self.cellvars:
- del cells[name]
- self.cellvars = self.cellvars + list(cells.keys())
- self.closure = self.cellvars + self.freevars
-
- def _lookupName(self, name, list):
- """Return index of name in list, appending if necessary
-
- This routine uses a list instead of a dictionary, because a
- dictionary can't store two different keys if the keys have the
- same value but different types, e.g. 2 and 2L. The compiler
- must treat these two separately, so it does an explicit type
- comparison before comparing the values.
- """
- t = type(name)
- for i in range(len(list)):
- if t == type(list[i]) and list[i] == name:
- return i
- end = len(list)
- list.append(name)
- return end
-
- _converters = {}
- def _convert_LOAD_CONST(self, arg):
- if hasattr(arg, 'getCode'):
- arg = arg.getCode()
- return self._lookupName(arg, self.consts)
-
- def _convert_LOAD_FAST(self, arg):
- self._lookupName(arg, self.names)
- return self._lookupName(arg, self.varnames)
- _convert_STORE_FAST = _convert_LOAD_FAST
- _convert_DELETE_FAST = _convert_LOAD_FAST
-
- def _convert_LOAD_NAME(self, arg):
- if self.klass is None:
- self._lookupName(arg, self.varnames)
- return self._lookupName(arg, self.names)
-
- def _convert_NAME(self, arg):
- if self.klass is None:
- self._lookupName(arg, self.varnames)
- return self._lookupName(arg, self.names)
- _convert_STORE_NAME = _convert_NAME
- _convert_DELETE_NAME = _convert_NAME
- _convert_IMPORT_NAME = _convert_NAME
- _convert_IMPORT_FROM = _convert_NAME
- _convert_STORE_ATTR = _convert_NAME
- _convert_LOAD_ATTR = _convert_NAME
- _convert_DELETE_ATTR = _convert_NAME
- _convert_LOAD_GLOBAL = _convert_NAME
- _convert_STORE_GLOBAL = _convert_NAME
- _convert_DELETE_GLOBAL = _convert_NAME
-
- def _convert_DEREF(self, arg):
- self._lookupName(arg, self.names)
- self._lookupName(arg, self.varnames)
- return self._lookupName(arg, self.closure)
- _convert_LOAD_DEREF = _convert_DEREF
- _convert_STORE_DEREF = _convert_DEREF
-
- def _convert_LOAD_CLOSURE(self, arg):
- self._lookupName(arg, self.varnames)
- return self._lookupName(arg, self.closure)
-
- _cmp = list(dis.cmp_op)
- def _convert_COMPARE_OP(self, arg):
- return self._cmp.index(arg)
-
- # similarly for other opcodes...
-
- for name, obj in list(locals().items()):
- if name[:9] == "_convert_":
- opname = name[9:]
- _converters[opname] = obj
- del name, obj, opname
-
- def makeByteCode(self):
- assert self.stage == CONV
- self.lnotab = lnotab = LineAddrTable()
- for t in self.insts:
- opname = t[0]
- if len(t) == 1:
- lnotab.addCode(self.opnum[opname])
- else:
- oparg = t[1]
- if opname == "SET_LINENO":
- lnotab.nextLine(oparg)
- continue
- hi, lo = twobyte(oparg)
-
- extended, hi = twobyte(hi)
- if extended:
- ehi, elo = twobyte(extended)
- lnotab.addCode(self.opnum['EXTENDED_ARG'], elo, ehi)
-
- try:
- lnotab.addCode(self.opnum[opname], lo, hi)
- except ValueError:
- print(opname, oparg)
- print(self.opnum[opname], lo, hi)
- raise
- self.stage = DONE
-
- opnum = {}
- for num in range(len(dis.opname)):
- opnum[dis.opname[num]] = num
- del num
-
- def newCodeObject(self):
- assert self.stage == DONE
- if (self.flags & CO_NEWLOCALS) == 0:
- nlocals = 0
- else:
- nlocals = len(self.varnames)
- argcount = self.argcount
- kwonlyargcount = len(self.kwonlyargs)
- return new.code(argcount, kwonlyargcount,
- nlocals, self.stacksize, self.flags,
- self.lnotab.getCode(), self.getConsts(),
- tuple(self.names), tuple(self.varnames),
- self.filename, self.name, self.lnotab.firstline,
- self.lnotab.getTable(), tuple(self.freevars),
- tuple(self.cellvars))
-
- def getConsts(self):
- """Return a tuple for the const slot of the code object
-
- Must convert references to code (MAKE_FUNCTION) to code
- objects recursively.
- """
- l = []
- for elt in self.consts:
- if isinstance(elt, PyFlowGraph):
- elt = elt.getCode()
- l.append(elt)
- return tuple(l)
-
-def isJump(opname):
- if opname[:4] == 'JUMP':
- return 1
-
-class TupleArg:
- """Helper for marking func defs with nested tuples in arglist"""
- def __init__(self, count, names):
- self.count = count
- self.names = names
- def __repr__(self):
- return "TupleArg(%s, %s)" % (self.count, self.names)
- def getName(self):
- return ".%d" % self.count
-
-def getArgCount(args):
- argcount = len(args)
- if args:
- for arg in args:
- if isinstance(arg, TupleArg):
- numNames = len(misc.flatten(arg.names))
- argcount = argcount - numNames
- return argcount
-
-def twobyte(val):
- """Convert an int argument into high and low bytes"""
- assert isinstance(val, int)
- return divmod(val, 256)
-
-class LineAddrTable:
- """lnotab
-
- This class builds the lnotab, which is documented in compile.c.
- Here's a brief recap:
-
- For each SET_LINENO instruction after the first one, two bytes are
- added to lnotab. (In some cases, multiple two-byte entries are
- added.) The first byte is the distance in bytes between the
- instruction for the last SET_LINENO and the current SET_LINENO.
- The second byte is offset in line numbers. If either offset is
- greater than 255, multiple two-byte entries are added -- see
- compile.c for the delicate details.
- """
-
- def __init__(self):
- self.code = []
- self.codeOffset = 0
- self.firstline = 0
- self.lastline = 0
- self.lastoff = 0
- self.lnotab = []
-
- def addCode(self, *args):
- for arg in args:
- self.code.append(chr(arg))
- self.codeOffset = self.codeOffset + len(args)
-
- def nextLine(self, lineno):
- if self.firstline == 0:
- self.firstline = lineno
- self.lastline = lineno
- else:
- # compute deltas
- addr = self.codeOffset - self.lastoff
- line = lineno - self.lastline
- # Python assumes that lineno always increases with
- # increasing bytecode address (lnotab is unsigned char).
- # Depending on when SET_LINENO instructions are emitted
- # this is not always true. Consider the code:
- # a = (1,
- # b)
- # In the bytecode stream, the assignment to "a" occurs
- # after the loading of "b". This works with the C Python
- # compiler because it only generates a SET_LINENO instruction
- # for the assignment.
- if line >= 0:
- push = self.lnotab.append
- while addr > 255:
- push(255); push(0)
- addr -= 255
- while line > 255:
- push(addr); push(255)
- line -= 255
- addr = 0
- if addr > 0 or line > 0:
- push(addr); push(line)
- self.lastline = lineno
- self.lastoff = self.codeOffset
-
- def getCode(self):
- return ''.join(self.code)
-
- def getTable(self):
- return ''.join(map(chr, self.lnotab))
-
-class StackDepthTracker:
- # XXX 1. need to keep track of stack depth on jumps
- # XXX 2. at least partly as a result, this code is broken
-
- def findDepth(self, insts, debug=0):
- depth = 0
- maxDepth = 0
- for i in insts:
- opname = i[0]
- if debug:
- print(i, end=' ')
- delta = self.effect.get(opname, None)
- if delta is not None:
- depth = depth + delta
- else:
- # now check patterns
- for pat, pat_delta in self.patterns:
- if opname[:len(pat)] == pat:
- delta = pat_delta
- depth = depth + delta
- break
- # if we still haven't found a match
- if delta is None:
- meth = getattr(self, opname, None)
- if meth is not None:
- depth = depth + meth(i[1])
- if depth > maxDepth:
- maxDepth = depth
- if debug:
- print(depth, maxDepth)
- return maxDepth
-
- effect = {
- 'POP_TOP': -1,
- 'DUP_TOP': 1,
- 'LIST_APPEND': -2,
- 'SLICE+1': -1,
- 'SLICE+2': -1,
- 'SLICE+3': -2,
- 'STORE_SLICE+0': -1,
- 'STORE_SLICE+1': -2,
- 'STORE_SLICE+2': -2,
- 'STORE_SLICE+3': -3,
- 'DELETE_SLICE+0': -1,
- 'DELETE_SLICE+1': -2,
- 'DELETE_SLICE+2': -2,
- 'DELETE_SLICE+3': -3,
- 'STORE_SUBSCR': -3,
- 'DELETE_SUBSCR': -2,
- 'PRINT_EXPR': -1,
- 'RETURN_VALUE': -1,
- 'YIELD_VALUE': -1,
- 'STORE_NAME': -1,
- 'STORE_ATTR': -2,
- 'DELETE_ATTR': -1,
- 'STORE_GLOBAL': -1,
- 'BUILD_MAP': 1,
- 'MAKE_BYTES': 0,
- 'COMPARE_OP': -1,
- 'STORE_FAST': -1,
- 'IMPORT_STAR': -1,
- 'IMPORT_NAME': -1,
- 'IMPORT_FROM': 1,
- 'LOAD_ATTR': 0, # unlike other loads
- # close enough...
- 'SETUP_EXCEPT': 3,
- 'SETUP_FINALLY': 3,
- 'FOR_ITER': 1,
- 'WITH_CLEANUP': -1,
- 'LOAD_BUILD_CLASS': 1,
- 'STORE_LOCALS': -1,
- }
- # use pattern match
- patterns = [
- ('BINARY_', -1),
- ('LOAD_', 1),
- ]
-
- def UNPACK_SEQUENCE(self, count):
- return count-1
- def BUILD_TUPLE(self, count):
- return -count+1
- def BUILD_LIST(self, count):
- return -count+1
- def BUILD_SET(self, count):
- return -count+1
- def CALL_FUNCTION(self, argc):
- hi, lo = divmod(argc, 256)
- return -(lo + hi * 2)
- def CALL_FUNCTION_VAR(self, argc):
- return self.CALL_FUNCTION(argc)-1
- def CALL_FUNCTION_KW(self, argc):
- return self.CALL_FUNCTION(argc)-1
- def CALL_FUNCTION_VAR_KW(self, argc):
- return self.CALL_FUNCTION(argc)-2
- def MAKE_FUNCTION(self, argc):
- hi, lo = divmod(argc, 256)
- ehi, hi = divmod(hi, 256)
- return -(lo + hi * 2 + ehi)
- def MAKE_CLOSURE(self, argc):
- # XXX need to account for free variables too!
- return -argc
- def BUILD_SLICE(self, argc):
- if argc == 2:
- return -1
- elif argc == 3:
- return -2
- def DUP_TOPX(self, argc):
- return argc
-
-findDepth = StackDepthTracker().findDepth
diff --git a/Lib/compiler/pycodegen.py b/Lib/compiler/pycodegen.py
deleted file mode 100644
index cc24650..0000000
--- a/Lib/compiler/pycodegen.py
+++ /dev/null
@@ -1,1580 +0,0 @@
-import imp
-import os
-import marshal
-import struct
-import sys
-from cStringIO import StringIO
-
-from compiler import ast, parse, walk, syntax
-from compiler import pyassem, misc, future, symbols
-from compiler.consts import SC_LOCAL, SC_GLOBAL, SC_FREE, SC_CELL
-from compiler.consts import (CO_VARARGS, CO_VARKEYWORDS, CO_NEWLOCALS,
- CO_NESTED, CO_GENERATOR, CO_FUTURE_DIVISION,
- CO_FUTURE_ABSIMPORT, CO_FUTURE_WITH_STATEMENT)
-from compiler.pyassem import TupleArg
-
-# XXX The version-specific code can go, since this code only works with 2.x.
-# Do we have Python 1.x or Python 2.x?
-try:
- VERSION = sys.version_info[0]
-except AttributeError:
- VERSION = 1
-
-callfunc_opcode_info = {
- # (Have *args, Have **args) : opcode
- (0,0) : "CALL_FUNCTION",
- (1,0) : "CALL_FUNCTION_VAR",
- (0,1) : "CALL_FUNCTION_KW",
- (1,1) : "CALL_FUNCTION_VAR_KW",
-}
-
-LOOP = 1
-EXCEPT = 2
-TRY_FINALLY = 3
-END_FINALLY = 4
-
-def compileFile(filename, display=0):
- f = open(filename, 'U')
- buf = f.read()
- f.close()
- mod = Module(buf, filename)
- try:
- mod.compile(display)
- except SyntaxError:
- raise
- else:
- f = open(filename + "c", "wb")
- mod.dump(f)
- f.close()
-
-def compile(source, filename, mode, flags=None, dont_inherit=None):
- """Replacement for builtin compile() function"""
- if flags is not None or dont_inherit is not None:
- raise RuntimeError, "not implemented yet"
-
- if mode == "single":
- gen = Interactive(source, filename)
- elif mode == "exec":
- gen = Module(source, filename)
- elif mode == "eval":
- gen = Expression(source, filename)
- else:
- raise ValueError("compile() 3rd arg must be 'exec' or "
- "'eval' or 'single'")
- gen.compile()
- return gen.code
-
-class AbstractCompileMode:
-
- mode = None # defined by subclass
-
- def __init__(self, source, filename):
- self.source = source
- self.filename = filename
- self.code = None
-
- def _get_tree(self):
- tree = parse(self.source, self.mode)
- misc.set_filename(self.filename, tree)
- syntax.check(tree)
- return tree
-
- def compile(self):
- pass # implemented by subclass
-
- def getCode(self):
- return self.code
-
-class Expression(AbstractCompileMode):
-
- mode = "eval"
-
- def compile(self):
- tree = self._get_tree()
- gen = ExpressionCodeGenerator(tree)
- self.code = gen.getCode()
-
-class Interactive(AbstractCompileMode):
-
- mode = "single"
-
- def compile(self):
- tree = self._get_tree()
- gen = InteractiveCodeGenerator(tree)
- self.code = gen.getCode()
-
-class Module(AbstractCompileMode):
-
- mode = "exec"
-
- def compile(self, display=0):
- tree = self._get_tree()
- gen = ModuleCodeGenerator(tree)
- if display:
- import pprint
- print(pprint.pprint(tree))
- self.code = gen.getCode()
-
- def dump(self, f):
- f.write(self.getPycHeader())
- marshal.dump(self.code, f)
-
- MAGIC = imp.get_magic()
-
- def getPycHeader(self):
- # compile.c uses marshal to write a long directly, with
- # calling the interface that would also generate a 1-byte code
- # to indicate the type of the value. simplest way to get the
- # same effect is to call marshal and then skip the code.
- mtime = os.path.getmtime(self.filename)
- mtime = struct.pack('<i', mtime)
- return self.MAGIC + mtime
-
-class LocalNameFinder:
- """Find local names in scope"""
- def __init__(self, names=()):
- self.names = misc.Set()
- self.globals = misc.Set()
- for name in names:
- self.names.add(name)
-
- # XXX list comprehensions and for loops
-
- def getLocals(self):
- for elt in self.globals.elements():
- if self.names.has_elt(elt):
- self.names.remove(elt)
- return self.names
-
- def visitDict(self, node):
- pass
-
- def visitGlobal(self, node):
- for name in node.names:
- self.globals.add(name)
-
- def visitFunction(self, node):
- self.names.add(node.name)
-
- def visitLambda(self, node):
- pass
-
- def visitImport(self, node):
- for name, alias in node.names:
- self.names.add(alias or name)
-
- def visitFrom(self, node):
- for name, alias in node.names:
- self.names.add(alias or name)
-
- def visitClass(self, node):
- self.names.add(node.name)
-
- def visitAssName(self, node):
- self.names.add(node.name)
-
-def is_constant_false(node):
- if isinstance(node, ast.Const):
- if not node.value:
- return 1
- return 0
-
-class CodeGenerator:
- """Defines basic code generator for Python bytecode
-
- This class is an abstract base class. Concrete subclasses must
- define an __init__() that defines self.graph and then calls the
- __init__() defined in this class.
-
- The concrete class must also define the class attributes
- NameFinder, FunctionGen, and ClassGen. These attributes can be
- defined in the initClass() method, which is a hook for
- initializing these methods after all the classes have been
- defined.
- """
-
- optimized = 0 # is namespace access optimized?
- __initialized = None
- class_name = None # provide default for instance variable
-
- def __init__(self):
- if self.__initialized is None:
- self.initClass()
- self.__class__.__initialized = 1
- self.checkClass()
- self.locals = misc.Stack()
- self.setups = misc.Stack()
- self.last_lineno = None
- self._setupGraphDelegation()
-
- # XXX set flags based on future features
- futures = self.get_module().futures
- for feature in futures:
- if feature == "division":
- self.graph.setFlag(CO_FUTURE_DIVISION)
- elif feature == "absolute_import":
- self.graph.setFlag(CO_FUTURE_ABSIMPORT)
- elif feature == "with_statement":
- self.graph.setFlag(CO_FUTURE_WITH_STATEMENT)
-
- def initClass(self):
- """This method is called once for each class"""
-
- def checkClass(self):
- """Verify that class is constructed correctly"""
- try:
- assert hasattr(self, 'graph')
- assert getattr(self, 'NameFinder')
- assert getattr(self, 'FunctionGen')
- assert getattr(self, 'ClassGen')
- except AssertionError as msg:
- intro = "Bad class construction for %s" % self.__class__.__name__
- raise AssertionError, intro
-
- def _setupGraphDelegation(self):
- self.emit = self.graph.emit
- self.newBlock = self.graph.newBlock
- self.startBlock = self.graph.startBlock
- self.nextBlock = self.graph.nextBlock
- self.setDocstring = self.graph.setDocstring
-
- def getCode(self):
- """Return a code object"""
- return self.graph.getCode()
-
- def mangle(self, name):
- if self.class_name is not None:
- return misc.mangle(name, self.class_name)
- else:
- return name
-
- def parseSymbols(self, tree):
- s = symbols.SymbolVisitor()
- walk(tree, s)
- return s.scopes
-
- def get_module(self):
- raise RuntimeError, "should be implemented by subclasses"
-
- # Next five methods handle name access
-
- def isLocalName(self, name):
- return self.locals.top().has_elt(name)
-
- def storeName(self, name):
- self._nameOp('STORE', name)
-
- def loadName(self, name):
- self._nameOp('LOAD', name)
-
- def delName(self, name):
- self._nameOp('DELETE', name)
-
- def _nameOp(self, prefix, name):
- name = self.mangle(name)
- scope = self.scope.check_name(name)
- if scope == SC_LOCAL:
- if not self.optimized:
- self.emit(prefix + '_NAME', name)
- else:
- self.emit(prefix + '_FAST', name)
- elif scope == SC_GLOBAL:
- if not self.optimized:
- self.emit(prefix + '_NAME', name)
- else:
- self.emit(prefix + '_GLOBAL', name)
- elif scope == SC_FREE or scope == SC_CELL:
- self.emit(prefix + '_DEREF', name)
- else:
- raise RuntimeError, "unsupported scope for var %s: %d" % \
- (name, scope)
-
- def _implicitNameOp(self, prefix, name):
- """Emit name ops for names generated implicitly by for loops
-
- The interpreter generates names that start with a period or
- dollar sign. The symbol table ignores these names because
- they aren't present in the program text.
- """
- if self.optimized:
- self.emit(prefix + '_FAST', name)
- else:
- self.emit(prefix + '_NAME', name)
-
- # The set_lineno() function and the explicit emit() calls for
- # SET_LINENO below are only used to generate the line number table.
- # As of Python 2.3, the interpreter does not have a SET_LINENO
- # instruction. pyassem treats SET_LINENO opcodes as a special case.
-
- def set_lineno(self, node, force=False):
- """Emit SET_LINENO if necessary.
-
- The instruction is considered necessary if the node has a
- lineno attribute and it is different than the last lineno
- emitted.
-
- Returns true if SET_LINENO was emitted.
-
- There are no rules for when an AST node should have a lineno
- attribute. The transformer and AST code need to be reviewed
- and a consistent policy implemented and documented. Until
- then, this method works around missing line numbers.
- """
- lineno = getattr(node, 'lineno', None)
- if lineno is not None and (lineno != self.last_lineno
- or force):
- self.emit('SET_LINENO', lineno)
- self.last_lineno = lineno
- return True
- return False
-
- # The first few visitor methods handle nodes that generator new
- # code objects. They use class attributes to determine what
- # specialized code generators to use.
-
- NameFinder = LocalNameFinder
- FunctionGen = None
- ClassGen = None
-
- def visitModule(self, node):
- self.scopes = self.parseSymbols(node)
- self.scope = self.scopes[node]
- self.emit('SET_LINENO', 0)
- if node.doc:
- self.emit('LOAD_CONST', node.doc)
- self.storeName('__doc__')
- lnf = walk(node.node, self.NameFinder(), verbose=0)
- self.locals.push(lnf.getLocals())
- self.visit(node.node)
- self.emit('LOAD_CONST', None)
- self.emit('RETURN_VALUE')
-
- def visitExpression(self, node):
- self.set_lineno(node)
- self.scopes = self.parseSymbols(node)
- self.scope = self.scopes[node]
- self.visit(node.node)
- self.emit('RETURN_VALUE')
-
- def visitFunction(self, node):
- self._visitFuncOrLambda(node, isLambda=0)
- if node.doc:
- self.setDocstring(node.doc)
- self.storeName(node.name)
-
- def visitLambda(self, node):
- self._visitFuncOrLambda(node, isLambda=1)
-
- def _visitFuncOrLambda(self, node, isLambda=0):
- if not isLambda and node.decorators:
- for decorator in node.decorators.nodes:
- self.visit(decorator)
- ndecorators = len(node.decorators.nodes)
- else:
- ndecorators = 0
-
- gen = self.FunctionGen(node, self.scopes, isLambda,
- self.class_name, self.get_module())
- walk(node.code, gen)
- gen.finish()
- self.set_lineno(node)
- num_kwargs = 0
- for keyword in node.kwonlyargs:
- default = keyword.expr
- if isinstance(default, ast.EmptyNode):
- continue
- self.emit('LOAD_CONST', keyword.arg.name)
- self.visit(default)
- num_kwargs += 1
- for default in node.defaults:
- self.visit(default)
-
- num_annotations = self._visit_annotations(node)
-
- oparg = len(node.defaults)
- oparg |= num_kwargs << 8
- oparg |= num_annotations << 16
-
- self._makeClosure(gen, oparg)
- for i in range(ndecorators):
- self.emit('CALL_FUNCTION', 1)
-
- def _visit_annotations(self, node):
- # emit code, return num_annotations
- annotations = []
- annotations.extend(self._visit_argument_annotations(node.arguments))
- annotations.extend(self._visit_kwarg_annotations(node.kwonlyargs))
- if node.returns:
- self.visit(node.returns)
- annotations.append('return')
- if not annotations:
- return 0
- self.emit('LOAD_CONST', tuple(annotations))
- return len(annotations) + 1
-
- def _visit_argument_annotations(self, arguments):
- for arg in arguments:
- if isinstance(arg, ast.SimpleArg):
- if arg.annotation:
- self.visit(arg.annotation)
- yield arg.name
- else:
- for name in self._visit_argument_annotations(arg.args):
- yield name
-
- def _visit_kwarg_annotations(self, kwargs):
- for kwarg in kwargs:
- arg = kwarg.arg
- if arg.annotation:
- self.visit(arg.annotation)
- yield arg.name
-
- def visitClass(self, node):
- gen = self.ClassGen(node, self.scopes,
- self.get_module())
- walk(node.code, gen)
- gen.finish()
- self.set_lineno(node)
- self.emit('LOAD_BUILD_CLASS')
- self._makeClosure(gen, 0)
- self.emit('LOAD_CONST', node.name)
- self.finish_visit_call(node, 2)
- self.storeName(node.name)
-
- # The rest are standard visitor methods
-
- # The next few implement control-flow statements
-
- def visitIf(self, node):
- end = self.newBlock()
- numtests = len(node.tests)
- for i in range(numtests):
- test, suite = node.tests[i]
- if is_constant_false(test):
- # XXX will need to check generator stuff here
- continue
- self.set_lineno(test)
- self.visit(test)
- nextTest = self.newBlock()
- self.emit('JUMP_IF_FALSE', nextTest)
- self.nextBlock()
- self.emit('POP_TOP')
- self.visit(suite)
- self.emit('JUMP_FORWARD', end)
- self.startBlock(nextTest)
- self.emit('POP_TOP')
- if node.else_:
- self.visit(node.else_)
- self.nextBlock(end)
-
- def visitWhile(self, node):
- self.set_lineno(node)
-
- loop = self.newBlock()
- else_ = self.newBlock()
-
- after = self.newBlock()
- self.emit('SETUP_LOOP', after)
-
- self.nextBlock(loop)
- self.setups.push((LOOP, loop))
-
- self.set_lineno(node, force=True)
- self.visit(node.test)
- self.emit('JUMP_IF_FALSE', else_ or after)
-
- self.nextBlock()
- self.emit('POP_TOP')
- self.visit(node.body)
- self.emit('JUMP_ABSOLUTE', loop)
-
- self.startBlock(else_) # or just the POPs if not else clause
- self.emit('POP_TOP')
- self.emit('POP_BLOCK')
- self.setups.pop()
- if node.else_:
- self.visit(node.else_)
- self.nextBlock(after)
-
- def visitFor(self, node):
- start = self.newBlock()
- anchor = self.newBlock()
- after = self.newBlock()
- self.setups.push((LOOP, start))
-
- self.set_lineno(node)
- self.emit('SETUP_LOOP', after)
- self.visit(node.list)
- self.emit('GET_ITER')
-
- self.nextBlock(start)
- self.set_lineno(node, force=1)
- self.emit('FOR_ITER', anchor)
- self.visit(node.assign)
- self.visit(node.body)
- self.emit('JUMP_ABSOLUTE', start)
- self.nextBlock(anchor)
- self.emit('POP_BLOCK')
- self.setups.pop()
- if node.else_:
- self.visit(node.else_)
- self.nextBlock(after)
-
- def visitBreak(self, node):
- if not self.setups:
- raise SyntaxError, "'break' outside loop (%s, %d)" % \
- (node.filename, node.lineno)
- self.set_lineno(node)
- self.emit('BREAK_LOOP')
-
- def visitContinue(self, node):
- if not self.setups:
- raise SyntaxError, "'continue' outside loop (%s, %d)" % \
- (node.filename, node.lineno)
- kind, block = self.setups.top()
- if kind == LOOP:
- self.set_lineno(node)
- self.emit('JUMP_ABSOLUTE', block)
- self.nextBlock()
- elif kind == EXCEPT or kind == TRY_FINALLY:
- self.set_lineno(node)
- # find the block that starts the loop
- top = len(self.setups)
- while top > 0:
- top = top - 1
- kind, loop_block = self.setups[top]
- if kind == LOOP:
- break
- if kind != LOOP:
- raise SyntaxError, "'continue' outside loop (%s, %d)" % \
- (node.filename, node.lineno)
- self.emit('CONTINUE_LOOP', loop_block)
- self.nextBlock()
- elif kind == END_FINALLY:
- msg = "'continue' not allowed inside 'finally' clause (%s, %d)"
- raise SyntaxError, msg % (node.filename, node.lineno)
-
- def visitTest(self, node, jump):
- end = self.newBlock()
- for child in node.nodes[:-1]:
- self.visit(child)
- self.emit(jump, end)
- self.nextBlock()
- self.emit('POP_TOP')
- self.visit(node.nodes[-1])
- self.nextBlock(end)
-
- def visitAnd(self, node):
- self.visitTest(node, 'JUMP_IF_FALSE')
-
- def visitOr(self, node):
- self.visitTest(node, 'JUMP_IF_TRUE')
-
- def visitIfExp(self, node):
- endblock = self.newBlock()
- elseblock = self.newBlock()
- self.visit(node.test)
- self.emit('JUMP_IF_FALSE', elseblock)
- self.emit('POP_TOP')
- self.visit(node.then)
- self.emit('JUMP_FORWARD', endblock)
- self.nextBlock(elseblock)
- self.emit('POP_TOP')
- self.visit(node.else_)
- self.nextBlock(endblock)
-
- def visitCompare(self, node):
- self.visit(node.expr)
- cleanup = self.newBlock()
- for op, code in node.ops[:-1]:
- self.visit(code)
- self.emit('DUP_TOP')
- self.emit('ROT_THREE')
- self.emit('COMPARE_OP', op)
- self.emit('JUMP_IF_FALSE', cleanup)
- self.nextBlock()
- self.emit('POP_TOP')
- # now do the last comparison
- if node.ops:
- op, code = node.ops[-1]
- self.visit(code)
- self.emit('COMPARE_OP', op)
- if len(node.ops) > 1:
- end = self.newBlock()
- self.emit('JUMP_FORWARD', end)
- self.startBlock(cleanup)
- self.emit('ROT_TWO')
- self.emit('POP_TOP')
- self.nextBlock(end)
-
- # list comprehensions
- __list_count = 0
-
- def visitListComp(self, node):
- self.set_lineno(node)
- # setup list
- tmpname = "$list%d" % self.__list_count
- self.__list_count = self.__list_count + 1
- self.emit('BUILD_LIST', 0)
- self.emit('DUP_TOP')
- self._implicitNameOp('STORE', tmpname)
-
- stack = []
- for i, for_ in zip(range(len(node.quals)), node.quals):
- start, anchor = self.visit(for_)
- cont = None
- for if_ in for_.ifs:
- if cont is None:
- cont = self.newBlock()
- self.visit(if_, cont)
- stack.insert(0, (start, cont, anchor))
-
- self._implicitNameOp('LOAD', tmpname)
- self.visit(node.expr)
- self.emit('LIST_APPEND')
-
- for start, cont, anchor in stack:
- if cont:
- skip_one = self.newBlock()
- self.emit('JUMP_FORWARD', skip_one)
- self.startBlock(cont)
- self.emit('POP_TOP')
- self.nextBlock(skip_one)
- self.emit('JUMP_ABSOLUTE', start)
- self.startBlock(anchor)
- self._implicitNameOp('DELETE', tmpname)
-
- self.__list_count = self.__list_count - 1
-
- def visitListCompFor(self, node):
- start = self.newBlock()
- anchor = self.newBlock()
-
- self.visit(node.list)
- self.emit('GET_ITER')
- self.nextBlock(start)
- self.set_lineno(node, force=True)
- self.emit('FOR_ITER', anchor)
- self.nextBlock()
- self.visit(node.assign)
- return start, anchor
-
- def visitListCompIf(self, node, branch):
- self.set_lineno(node, force=True)
- self.visit(node.test)
- self.emit('JUMP_IF_FALSE', branch)
- self.newBlock()
- self.emit('POP_TOP')
-
- def _makeClosure(self, gen, args):
- frees = gen.scope.get_free_vars()
- if frees:
- for name in frees:
- self.emit('LOAD_CLOSURE', name)
- self.emit('BUILD_TUPLE', len(frees))
- self.emit('LOAD_CONST', gen)
- self.emit('MAKE_CLOSURE', args)
- else:
- self.emit('LOAD_CONST', gen)
- self.emit('MAKE_FUNCTION', args)
-
- def visitGenExpr(self, node):
- gen = GenExprCodeGenerator(node, self.scopes, self.class_name,
- self.get_module())
- walk(node.code, gen)
- gen.finish()
- self.set_lineno(node)
- self._makeClosure(gen, 0)
- # precomputation of outmost iterable
- self.visit(node.code.quals[0].iter)
- self.emit('GET_ITER')
- self.emit('CALL_FUNCTION', 1)
-
- def visitGenExprInner(self, node):
- self.set_lineno(node)
- # setup list
-
- stack = []
- for i, for_ in zip(range(len(node.quals)), node.quals):
- start, anchor, end = self.visit(for_)
- cont = None
- for if_ in for_.ifs:
- if cont is None:
- cont = self.newBlock()
- self.visit(if_, cont)
- stack.insert(0, (start, cont, anchor, end))
-
- self.visit(node.expr)
- self.emit('YIELD_VALUE')
- self.emit('POP_TOP')
-
- for start, cont, anchor, end in stack:
- if cont:
- skip_one = self.newBlock()
- self.emit('JUMP_FORWARD', skip_one)
- self.startBlock(cont)
- self.emit('POP_TOP')
- self.nextBlock(skip_one)
- self.emit('JUMP_ABSOLUTE', start)
- self.startBlock(anchor)
- self.emit('POP_BLOCK')
- self.setups.pop()
- self.startBlock(end)
-
- self.emit('LOAD_CONST', None)
-
- def visitGenExprFor(self, node):
- start = self.newBlock()
- anchor = self.newBlock()
- end = self.newBlock()
-
- self.setups.push((LOOP, start))
- self.emit('SETUP_LOOP', end)
-
- if node.is_outmost:
- self.loadName('.0')
- else:
- self.visit(node.iter)
- self.emit('GET_ITER')
-
- self.nextBlock(start)
- self.set_lineno(node, force=True)
- self.emit('FOR_ITER', anchor)
- self.nextBlock()
- self.visit(node.assign)
- return start, anchor, end
-
- def visitGenExprIf(self, node, branch):
- self.set_lineno(node, force=True)
- self.visit(node.test)
- self.emit('JUMP_IF_FALSE', branch)
- self.newBlock()
- self.emit('POP_TOP')
-
- # exception related
-
- def visitAssert(self, node):
- # XXX would be interesting to implement this via a
- # transformation of the AST before this stage
- if __debug__:
- end = self.newBlock()
- self.set_lineno(node)
- # XXX AssertionError appears to be special case -- it is always
- # loaded as a global even if there is a local name. I guess this
- # is a sort of renaming op.
- self.nextBlock()
- self.visit(node.test)
- self.emit('JUMP_IF_TRUE', end)
- self.nextBlock()
- self.emit('POP_TOP')
- self.emit('LOAD_GLOBAL', 'AssertionError')
- if node.fail:
- self.visit(node.fail)
- self.emit('RAISE_VARARGS', 2)
- else:
- self.emit('RAISE_VARARGS', 1)
- self.nextBlock(end)
- self.emit('POP_TOP')
-
- def visitRaise(self, node):
- self.set_lineno(node)
- n = 0
- if node.expr1:
- self.visit(node.expr1)
- n = n + 1
- if node.expr2:
- self.visit(node.expr2)
- n = n + 1
- if node.expr3:
- self.visit(node.expr3)
- n = n + 1
- self.emit('RAISE_VARARGS', n)
-
- def visitTryExcept(self, node):
- body = self.newBlock()
- handlers = self.newBlock()
- end = self.newBlock()
- if node.else_:
- lElse = self.newBlock()
- else:
- lElse = end
- self.set_lineno(node)
- self.emit('SETUP_EXCEPT', handlers)
- self.nextBlock(body)
- self.setups.push((EXCEPT, body))
- self.visit(node.body)
- self.emit('POP_BLOCK')
- self.setups.pop()
- self.emit('JUMP_FORWARD', lElse)
- self.startBlock(handlers)
-
- last = len(node.handlers) - 1
- for i in range(len(node.handlers)):
- expr, target, body = node.handlers[i]
- self.set_lineno(expr)
- if expr:
- self.emit('DUP_TOP')
- self.visit(expr)
- self.emit('COMPARE_OP', 'exception match')
- next = self.newBlock()
- self.emit('JUMP_IF_FALSE', next)
- self.nextBlock()
- self.emit('POP_TOP')
- self.emit('POP_TOP')
- if target:
- cleanup_body = self.newBlock()
- cleanup_final = self.newBlock()
- target_name = target[1]
-
- self.storeName(target_name)
- self.emit('POP_TOP')
- self.emit('SETUP_FINALLY', cleanup_final)
- self.nextBlock(cleanup_body)
- self.setups.push((TRY_FINALLY, cleanup_body))
- self.visit(body)
- self.emit('POP_BLOCK')
- self.setups.pop()
- self.emit('LOAD_CONST', None)
- self.nextBlock(cleanup_final)
- self.setups.push((END_FINALLY, cleanup_final))
-
-
- self.emit('LOAD_CONST', None)
- self.storeName(target_name)
- self._implicitNameOp('DELETE', target_name)
-
- self.emit('END_FINALLY')
- self.setups.pop()
- else:
- self.emit('POP_TOP')
- self.emit('POP_TOP')
- self.visit(body)
- self.emit('JUMP_FORWARD', end)
- if expr:
- self.nextBlock(next)
- else:
- self.nextBlock()
- if expr: # XXX
- self.emit('POP_TOP')
- self.emit('END_FINALLY')
- if node.else_:
- self.nextBlock(lElse)
- self.visit(node.else_)
- self.nextBlock(end)
-
- def visitTryFinally(self, node):
- body = self.newBlock()
- final = self.newBlock()
- self.set_lineno(node)
- self.emit('SETUP_FINALLY', final)
- self.nextBlock(body)
- self.setups.push((TRY_FINALLY, body))
- self.visit(node.body)
- self.emit('POP_BLOCK')
- self.setups.pop()
- self.emit('LOAD_CONST', None)
- self.nextBlock(final)
- self.setups.push((END_FINALLY, final))
- self.visit(node.final)
- self.emit('END_FINALLY')
- self.setups.pop()
-
- __with_count = 0
-
- def visitWith(self, node):
- body = self.newBlock()
- final = self.newBlock()
- exitvar = "$exit%d" % self.__with_count
- valuevar = "$value%d" % self.__with_count
- self.__with_count += 1
- self.set_lineno(node)
- self.visit(node.expr)
- self.emit('DUP_TOP')
- self.emit('LOAD_ATTR', '__exit__')
- self._implicitNameOp('STORE', exitvar)
- self.emit('LOAD_ATTR', '__enter__')
- self.emit('CALL_FUNCTION', 0)
- if node.vars is None:
- self.emit('POP_TOP')
- else:
- self._implicitNameOp('STORE', valuevar)
- self.emit('SETUP_FINALLY', final)
- self.nextBlock(body)
- self.setups.push((TRY_FINALLY, body))
- if node.vars is not None:
- self._implicitNameOp('LOAD', valuevar)
- self._implicitNameOp('DELETE', valuevar)
- self.visit(node.vars)
- self.visit(node.body)
- self.emit('POP_BLOCK')
- self.setups.pop()
- self.emit('LOAD_CONST', None)
- self.nextBlock(final)
- self.setups.push((END_FINALLY, final))
- self._implicitNameOp('LOAD', exitvar)
- self._implicitNameOp('DELETE', exitvar)
- self.emit('WITH_CLEANUP')
- self.emit('END_FINALLY')
- self.setups.pop()
- self.__with_count -= 1
-
- # misc
-
- def visitDiscard(self, node):
- self.set_lineno(node)
- self.visit(node.expr)
- self.emit('POP_TOP')
-
- def visitConst(self, node):
- self.emit('LOAD_CONST', node.value)
-
- def visitBytes(self, node):
- self.emit('LOAD_CONST', node.value)
- self.emit('MAKE_BYTES')
-
- def visitKeyword(self, node):
- self.emit('LOAD_CONST', node.name)
- self.visit(node.expr)
-
- def visitGlobal(self, node):
- # no code to generate
- pass
-
- def visitName(self, node):
- self.set_lineno(node)
- self.loadName(node.name)
-
- def visitPass(self, node):
- self.set_lineno(node)
-
- def visitImport(self, node):
- self.set_lineno(node)
- level = 0 if self.graph.checkFlag(CO_FUTURE_ABSIMPORT) else -1
- for name, alias in node.names:
- if VERSION > 1:
- self.emit('LOAD_CONST', level)
- self.emit('LOAD_CONST', None)
- self.emit('IMPORT_NAME', name)
- mod = name.split(".")[0]
- if alias:
- self._resolveDots(name)
- self.storeName(alias)
- else:
- self.storeName(mod)
-
- def visitFrom(self, node):
- self.set_lineno(node)
- level = node.level
- if level == 0 and not self.graph.checkFlag(CO_FUTURE_ABSIMPORT):
- level = -1
- fromlist = map(lambda (name, alias): name, node.names)
- if VERSION > 1:
- self.emit('LOAD_CONST', level)
- self.emit('LOAD_CONST', tuple(fromlist))
- self.emit('IMPORT_NAME', node.modname)
- for name, alias in node.names:
- if VERSION > 1:
- if name == '*':
- self.namespace = 0
- self.emit('IMPORT_STAR')
- # There can only be one name w/ from ... import *
- assert len(node.names) == 1
- return
- else:
- self.emit('IMPORT_FROM', name)
- self._resolveDots(name)
- self.storeName(alias or name)
- else:
- self.emit('IMPORT_FROM', name)
- self.emit('POP_TOP')
-
- def _resolveDots(self, name):
- elts = name.split(".")
- if len(elts) == 1:
- return
- for elt in elts[1:]:
- self.emit('LOAD_ATTR', elt)
-
- def visitGetattr(self, node):
- self.visit(node.expr)
- self.emit('LOAD_ATTR', self.mangle(node.attrname))
-
- # next five implement assignments
-
- def visitAssign(self, node):
- self.set_lineno(node)
- self.visit(node.expr)
- dups = len(node.nodes) - 1
- for i in range(len(node.nodes)):
- elt = node.nodes[i]
- if i < dups:
- self.emit('DUP_TOP')
- if isinstance(elt, ast.Node):
- self.visit(elt)
-
- def visitAssName(self, node):
- if node.flags == 'OP_ASSIGN':
- self.storeName(node.name)
- elif node.flags == 'OP_DELETE':
- self.set_lineno(node)
- self.delName(node.name)
- else:
- print("oops", node.flags)
-
- def visitAssAttr(self, node):
- self.visit(node.expr)
- if node.flags == 'OP_ASSIGN':
- self.emit('STORE_ATTR', self.mangle(node.attrname))
- elif node.flags == 'OP_DELETE':
- self.emit('DELETE_ATTR', self.mangle(node.attrname))
- else:
- print("warning: unexpected flags:", node.flags)
- print(node)
-
- def _visitAssSequence(self, node, op='UNPACK_SEQUENCE'):
- if findOp(node) != 'OP_DELETE':
- self.emit(op, len(node.nodes))
- for child in node.nodes:
- self.visit(child)
-
- if VERSION > 1:
- visitAssTuple = _visitAssSequence
- visitAssList = _visitAssSequence
- else:
- def visitAssTuple(self, node):
- self._visitAssSequence(node, 'UNPACK_TUPLE')
-
- def visitAssList(self, node):
- self._visitAssSequence(node, 'UNPACK_LIST')
-
- # augmented assignment
-
- def visitAugAssign(self, node):
- self.set_lineno(node)
- aug_node = wrap_aug(node.node)
- self.visit(aug_node, "load")
- self.visit(node.expr)
- self.emit(self._augmented_opcode[node.op])
- self.visit(aug_node, "store")
-
- _augmented_opcode = {
- '+=' : 'INPLACE_ADD',
- '-=' : 'INPLACE_SUBTRACT',
- '*=' : 'INPLACE_MULTIPLY',
- '/=' : 'INPLACE_TRUE_DIVIDE',
- '//=': 'INPLACE_FLOOR_DIVIDE',
- '%=' : 'INPLACE_MODULO',
- '**=': 'INPLACE_POWER',
- '>>=': 'INPLACE_RSHIFT',
- '<<=': 'INPLACE_LSHIFT',
- '&=' : 'INPLACE_AND',
- '^=' : 'INPLACE_XOR',
- '|=' : 'INPLACE_OR',
- }
-
- def visitAugName(self, node, mode):
- if mode == "load":
- self.loadName(node.name)
- elif mode == "store":
- self.storeName(node.name)
-
- def visitAugGetattr(self, node, mode):
- if mode == "load":
- self.visit(node.expr)
- self.emit('DUP_TOP')
- self.emit('LOAD_ATTR', self.mangle(node.attrname))
- elif mode == "store":
- self.emit('ROT_TWO')
- self.emit('STORE_ATTR', self.mangle(node.attrname))
-
- def visitAugSlice(self, node, mode):
- if mode == "load":
- self.visitSlice(node, 1)
- elif mode == "store":
- slice = 0
- if node.lower:
- slice = slice | 1
- if node.upper:
- slice = slice | 2
- if slice == 0:
- self.emit('ROT_TWO')
- elif slice == 3:
- self.emit('ROT_FOUR')
- else:
- self.emit('ROT_THREE')
- self.emit('STORE_SLICE+%d' % slice)
-
- def visitAugSubscript(self, node, mode):
- if mode == "load":
- self.visitSubscript(node, 1)
- elif mode == "store":
- self.emit('ROT_THREE')
- self.emit('STORE_SUBSCR')
-
- def visitCallFunc(self, node):
- self.set_lineno(node)
- self.visit(node.node)
- self.finish_visit_call(node)
-
- def finish_visit_call(self, node, pos=0, kw=0):
- for arg in node.args:
- self.visit(arg)
- if isinstance(arg, ast.Keyword):
- kw = kw + 1
- else:
- pos = pos + 1
- if node.star_args is not None:
- self.visit(node.star_args)
- if node.dstar_args is not None:
- self.visit(node.dstar_args)
- have_star = node.star_args is not None
- have_dstar = node.dstar_args is not None
- opcode = callfunc_opcode_info[have_star, have_dstar]
- self.emit(opcode, kw << 8 | pos)
-
- def visitReturn(self, node):
- self.set_lineno(node)
- self.visit(node.value)
- self.emit('RETURN_VALUE')
-
- def visitYield(self, node):
- self.set_lineno(node)
- self.visit(node.value)
- self.emit('YIELD_VALUE')
-
- # slice and subscript stuff
-
- def visitSlice(self, node, aug_flag=None):
- # aug_flag is used by visitAugSlice
- self.visit(node.expr)
- slice = 0
- if node.lower:
- self.visit(node.lower)
- slice = slice | 1
- if node.upper:
- self.visit(node.upper)
- slice = slice | 2
- if aug_flag:
- if slice == 0:
- self.emit('DUP_TOP')
- elif slice == 3:
- self.emit('DUP_TOPX', 3)
- else:
- self.emit('DUP_TOPX', 2)
- if node.flags == 'OP_APPLY':
- self.emit('SLICE+%d' % slice)
- elif node.flags == 'OP_ASSIGN':
- self.emit('STORE_SLICE+%d' % slice)
- elif node.flags == 'OP_DELETE':
- self.emit('DELETE_SLICE+%d' % slice)
- else:
- print("weird slice", node.flags)
- raise
-
- def visitSubscript(self, node, aug_flag=None):
- self.visit(node.expr)
- for sub in node.subs:
- self.visit(sub)
- if len(node.subs) > 1:
- self.emit('BUILD_TUPLE', len(node.subs))
- if aug_flag:
- self.emit('DUP_TOPX', 2)
- if node.flags == 'OP_APPLY':
- self.emit('BINARY_SUBSCR')
- elif node.flags == 'OP_ASSIGN':
- self.emit('STORE_SUBSCR')
- elif node.flags == 'OP_DELETE':
- self.emit('DELETE_SUBSCR')
-
- # binary ops
-
- def binaryOp(self, node, op):
- self.visit(node.left)
- self.visit(node.right)
- self.emit(op)
-
- def visitAdd(self, node):
- return self.binaryOp(node, 'BINARY_ADD')
-
- def visitSub(self, node):
- return self.binaryOp(node, 'BINARY_SUBTRACT')
-
- def visitMul(self, node):
- return self.binaryOp(node, 'BINARY_MULTIPLY')
-
- def visitDiv(self, node):
- return self.binaryOp(node, 'BINARY_TRUE_DIVIDE')
-
- def visitFloorDiv(self, node):
- return self.binaryOp(node, 'BINARY_FLOOR_DIVIDE')
-
- def visitMod(self, node):
- return self.binaryOp(node, 'BINARY_MODULO')
-
- def visitPower(self, node):
- return self.binaryOp(node, 'BINARY_POWER')
-
- def visitLeftShift(self, node):
- return self.binaryOp(node, 'BINARY_LSHIFT')
-
- def visitRightShift(self, node):
- return self.binaryOp(node, 'BINARY_RSHIFT')
-
- # unary ops
-
- def unaryOp(self, node, op):
- self.visit(node.expr)
- self.emit(op)
-
- def visitInvert(self, node):
- return self.unaryOp(node, 'UNARY_INVERT')
-
- def visitUnarySub(self, node):
- return self.unaryOp(node, 'UNARY_NEGATIVE')
-
- def visitUnaryAdd(self, node):
- return self.unaryOp(node, 'UNARY_POSITIVE')
-
- def visitUnaryInvert(self, node):
- return self.unaryOp(node, 'UNARY_INVERT')
-
- def visitNot(self, node):
- return self.unaryOp(node, 'UNARY_NOT')
-
- # bit ops
-
- def bitOp(self, nodes, op):
- self.visit(nodes[0])
- for node in nodes[1:]:
- self.visit(node)
- self.emit(op)
-
- def visitBitand(self, node):
- return self.bitOp(node.nodes, 'BINARY_AND')
-
- def visitBitor(self, node):
- return self.bitOp(node.nodes, 'BINARY_OR')
-
- def visitBitxor(self, node):
- return self.bitOp(node.nodes, 'BINARY_XOR')
-
- # object constructors
-
- def visitTuple(self, node):
- self.set_lineno(node)
- for elt in node.nodes:
- self.visit(elt)
- self.emit('BUILD_TUPLE', len(node.nodes))
-
- def visitList(self, node):
- self.set_lineno(node)
- for elt in node.nodes:
- self.visit(elt)
- self.emit('BUILD_LIST', len(node.nodes))
-
- def visitSet(self, node):
- self.set_lineno(node)
- for elt in node.items:
- self.visit(elt)
- self.emit('BUILD_SET', len(node.items))
-
- def visitSliceobj(self, node):
- for child in node.nodes:
- self.visit(child)
- self.emit('BUILD_SLICE', len(node.nodes))
-
- def visitDict(self, node):
- self.set_lineno(node)
- self.emit('BUILD_MAP', 0)
- for k, v in node.items:
- self.emit('DUP_TOP')
- self.visit(k)
- self.visit(v)
- self.emit('ROT_THREE')
- self.emit('STORE_SUBSCR')
-
-class NestedScopeMixin:
- """Defines initClass() for nested scoping (Python 2.2-compatible)"""
- def initClass(self):
- self.__class__.NameFinder = LocalNameFinder
- self.__class__.FunctionGen = FunctionCodeGenerator
- self.__class__.ClassGen = ClassCodeGenerator
-
-class ModuleCodeGenerator(NestedScopeMixin, CodeGenerator):
- __super_init = CodeGenerator.__init__
-
- scopes = None
-
- def __init__(self, tree):
- self.graph = pyassem.PyFlowGraph("<module>", tree.filename)
- self.futures = future.find_futures(tree)
- self.__super_init()
- walk(tree, self)
-
- def get_module(self):
- return self
-
-class ExpressionCodeGenerator(NestedScopeMixin, CodeGenerator):
- __super_init = CodeGenerator.__init__
-
- scopes = None
- futures = ()
-
- def __init__(self, tree):
- self.graph = pyassem.PyFlowGraph("<expression>", tree.filename)
- self.__super_init()
- walk(tree, self)
-
- def get_module(self):
- return self
-
-class InteractiveCodeGenerator(NestedScopeMixin, CodeGenerator):
-
- __super_init = CodeGenerator.__init__
-
- scopes = None
- futures = ()
-
- def __init__(self, tree):
- self.graph = pyassem.PyFlowGraph("<interactive>", tree.filename)
- self.__super_init()
- self.set_lineno(tree)
- walk(tree, self)
- self.emit('RETURN_VALUE')
-
- def get_module(self):
- return self
-
- def visitDiscard(self, node):
- # XXX Discard means it's an expression. Perhaps this is a bad
- # name.
- self.visit(node.expr)
- self.emit('PRINT_EXPR')
-
-class AbstractFunctionCode:
- optimized = 1
- lambdaCount = 0
-
- def __init__(self, func, scopes, isLambda, class_name, mod):
- self.class_name = class_name
- self.module = mod
- if isLambda:
- klass = FunctionCodeGenerator
- name = "<lambda.%d>" % klass.lambdaCount
- klass.lambdaCount = klass.lambdaCount + 1
- else:
- name = func.name
-
- args, hasTupleArg = generateArgList(func.arguments)
- kwonlyargs = generateKwonlyArgList(func.kwonlyargs)
- self.graph = pyassem.PyFlowGraph(name, func.filename, args,
- kwonlyargs=kwonlyargs,
- optimized=1)
- self.isLambda = isLambda
- self.super_init()
-
- if not isLambda and func.doc:
- self.setDocstring(func.doc)
-
- lnf = walk(func.code, self.NameFinder(args+kwonlyargs), verbose=0)
- self.locals.push(lnf.getLocals())
- if func.varargs:
- self.graph.setFlag(CO_VARARGS)
- if func.kwargs:
- self.graph.setFlag(CO_VARKEYWORDS)
- self.set_lineno(func)
- if hasTupleArg:
- self.generateArgUnpack(func.arguments)
-
- def get_module(self):
- return self.module
-
- def finish(self):
- self.graph.startExitBlock()
- if not self.isLambda:
- self.emit('LOAD_CONST', None)
- self.emit('RETURN_VALUE')
-
- def generateArgUnpack(self, args):
- for i in range(len(args)):
- arg = args[i]
- if isinstance(arg, ast.NestedArgs):
- self.emit('LOAD_FAST', '.%d' % (i * 2))
- self.unpackSequence(tuple(_nested_names(arg)))
-
- def unpackSequence(self, tup):
- if VERSION > 1:
- self.emit('UNPACK_SEQUENCE', len(tup))
- else:
- self.emit('UNPACK_TUPLE', len(tup))
- for elt in tup:
- if isinstance(elt, tuple):
- self.unpackSequence(elt)
- else:
- self._nameOp('STORE', elt)
-
- unpackTuple = unpackSequence
-
-class FunctionCodeGenerator(NestedScopeMixin, AbstractFunctionCode,
- CodeGenerator):
- super_init = CodeGenerator.__init__ # call be other init
- scopes = None
-
- __super_init = AbstractFunctionCode.__init__
-
- def __init__(self, func, scopes, isLambda, class_name, mod):
- self.scopes = scopes
- self.scope = scopes[func]
- self.__super_init(func, scopes, isLambda, class_name, mod)
- self.graph.setFreeVars(self.scope.get_free_vars())
- self.graph.setCellVars(self.scope.get_cell_vars())
- if self.scope.generator is not None:
- self.graph.setFlag(CO_GENERATOR)
-
-class GenExprCodeGenerator(NestedScopeMixin, AbstractFunctionCode,
- CodeGenerator):
- super_init = CodeGenerator.__init__ # call be other init
- scopes = None
-
- __super_init = AbstractFunctionCode.__init__
-
- def __init__(self, gexp, scopes, class_name, mod):
- self.scopes = scopes
- self.scope = scopes[gexp]
- self.__super_init(gexp, scopes, 1, class_name, mod)
- self.graph.setFreeVars(self.scope.get_free_vars())
- self.graph.setCellVars(self.scope.get_cell_vars())
- self.graph.setFlag(CO_GENERATOR)
-
-class AbstractClassCode:
-
- def __init__(self, klass, scopes, module):
- self.class_name = klass.name
- self.module = module
- self.graph = pyassem.PyFlowGraph(klass.name, klass.filename,
- optimized=0, klass=1)
- self.super_init()
- lnf = walk(klass.code, self.NameFinder(), verbose=0)
- self.locals.push(lnf.getLocals())
- self.graph.setFlag(CO_NEWLOCALS)
- if klass.doc:
- self.setDocstring(klass.doc)
-
- def get_module(self):
- return self.module
-
- def finish(self):
- self.graph.startExitBlock()
- self.emit('LOAD_CONST', None)
- self.emit('RETURN_VALUE')
-
-class ClassCodeGenerator(NestedScopeMixin, AbstractClassCode, CodeGenerator):
- super_init = CodeGenerator.__init__
- scopes = None
-
- __super_init = AbstractClassCode.__init__
-
- def __init__(self, klass, scopes, module):
- self.scopes = scopes
- self.scope = scopes[klass]
- self.__super_init(klass, scopes, module)
- self.graph.setFreeVars(self.scope.get_free_vars())
- self.graph.setCellVars(self.scope.get_cell_vars())
- self.set_lineno(klass)
- self.emit("LOAD_GLOBAL", "__name__")
- self.storeName("__module__")
- if klass.doc:
- self.emit("LOAD_CONST", klass.doc)
- self.storeName('__doc__')
-
-def generateArgList(arglist):
- """Generate an arg list marking TupleArgs"""
- args = []
- extra = []
- count = 0
- for i in range(len(arglist)):
- elt = arglist[i]
- if isinstance(elt, ast.SimpleArg):
- args.append(elt.name)
- elif isinstance(elt, ast.NestedArgs):
- t = tuple(_nested_names(elt))
- args.append(TupleArg(i * 2, t))
- extra.extend(misc.flatten(t))
- count = count + 1
- else:
- raise ValueError, "unexpect argument type:", elt
- return args + extra, count
-
-def _nested_names(elt):
- for arg in elt.args:
- if isinstance(arg, ast.SimpleArg):
- yield arg.name
- elif isinstance(arg, ast.NestedArgs):
- yield tuple(_nested_names(arg))
-
-def generateKwonlyArgList(keywordOnlyArgs):
- kwonlyargs = []
- for elt in keywordOnlyArgs:
- assert isinstance(elt, ast.Kwarg)
- kwonlyargs.append(elt.arg.name)
- return kwonlyargs
-
-def findOp(node):
- """Find the op (DELETE, LOAD, STORE) in an AssTuple tree"""
- v = OpFinder()
- walk(node, v, verbose=0)
- return v.op
-
-class OpFinder:
- def __init__(self):
- self.op = None
- def visitAssName(self, node):
- if self.op is None:
- self.op = node.flags
- elif self.op != node.flags:
- raise ValueError, "mixed ops in stmt"
- visitAssAttr = visitAssName
- visitSubscript = visitAssName
-
-class Delegator:
- """Base class to support delegation for augmented assignment nodes
-
- To generator code for augmented assignments, we use the following
- wrapper classes. In visitAugAssign, the left-hand expression node
- is visited twice. The first time the visit uses the normal method
- for that node . The second time the visit uses a different method
- that generates the appropriate code to perform the assignment.
- These delegator classes wrap the original AST nodes in order to
- support the variant visit methods.
- """
- def __init__(self, obj):
- self.obj = obj
-
- def __getattr__(self, attr):
- return getattr(self.obj, attr)
-
-class AugGetattr(Delegator):
- pass
-
-class AugName(Delegator):
- pass
-
-class AugSlice(Delegator):
- pass
-
-class AugSubscript(Delegator):
- pass
-
-wrapper = {
- ast.Getattr: AugGetattr,
- ast.Name: AugName,
- ast.Slice: AugSlice,
- ast.Subscript: AugSubscript,
- }
-
-def wrap_aug(node):
- return wrapper[node.__class__](node)
-
-if __name__ == "__main__":
- for file in sys.argv[1:]:
- compileFile(file)
diff --git a/Lib/compiler/symbols.py b/Lib/compiler/symbols.py
deleted file mode 100644
index e22294e..0000000
--- a/Lib/compiler/symbols.py
+++ /dev/null
@@ -1,470 +0,0 @@
-"""Module symbol-table generator"""
-
-from compiler import ast
-from compiler.consts import SC_LOCAL, SC_GLOBAL, SC_FREE, SC_CELL, SC_UNKNOWN
-from compiler.misc import mangle
-import types
-
-
-import sys
-
-MANGLE_LEN = 256
-
-class Scope:
- # XXX how much information do I need about each name?
- def __init__(self, name, module, klass=None):
- self.name = name
- self.module = module
- self.defs = {}
- self.uses = {}
- self.globals = {}
- self.params = {}
- self.frees = {}
- self.cells = {}
- self.children = []
- # nested is true if the class could contain free variables,
- # i.e. if it is nested within another function.
- self.nested = None
- self.generator = None
- self.klass = None
- if klass is not None:
- for i in range(len(klass)):
- if klass[i] != '_':
- self.klass = klass[i:]
- break
-
- def __repr__(self):
- return "<%s: %s>" % (self.__class__.__name__, self.name)
-
- def mangle(self, name):
- if self.klass is None:
- return name
- return mangle(name, self.klass)
-
- def add_def(self, name):
- self.defs[self.mangle(name)] = 1
-
- def add_use(self, name):
- self.uses[self.mangle(name)] = 1
-
- def add_global(self, name):
- name = self.mangle(name)
- if name in self.uses or name in self.defs:
- pass # XXX warn about global following def/use
- if name in self.params:
- raise SyntaxError, "%s in %s is global and parameter" % \
- (name, self.name)
- self.globals[name] = 1
- self.module.add_def(name)
-
- def add_param(self, name):
- name = self.mangle(name)
- self.defs[name] = 1
- self.params[name] = 1
-
- def get_names(self):
- d = {}
- d.update(self.defs)
- d.update(self.uses)
- d.update(self.globals)
- return d.keys()
-
- def add_child(self, child):
- self.children.append(child)
-
- def get_children(self):
- return self.children
-
- def DEBUG(self):
- print(self.name, self.nested and "nested" or "", file=sys.stderr)
- print("\tglobals: ", self.globals, file=sys.stderr)
- print("\tcells: ", self.cells, file=sys.stderr)
- print("\tdefs: ", self.defs, file=sys.stderr)
- print("\tuses: ", self.uses, file=sys.stderr)
- print("\tfrees:", self.frees, file=sys.stderr)
-
- def check_name(self, name):
- """Return scope of name.
-
- The scope of a name could be LOCAL, GLOBAL, FREE, or CELL.
- """
- if name in self.globals:
- return SC_GLOBAL
- if name in self.cells:
- return SC_CELL
- if name in self.defs:
- return SC_LOCAL
- if self.nested and (name in self.frees or
- name in self.uses):
- return SC_FREE
- if self.nested:
- return SC_UNKNOWN
- else:
- return SC_GLOBAL
-
- def get_free_vars(self):
- if not self.nested:
- return ()
- free = {}
- free.update(self.frees)
- for name in self.uses.keys():
- if not (name in self.defs or
- name in self.globals):
- free[name] = 1
- return free.keys()
-
- def handle_children(self):
- for child in self.children:
- frees = child.get_free_vars()
- globals = self.add_frees(frees)
- for name in globals:
- child.force_global(name)
-
- def force_global(self, name):
- """Force name to be global in scope.
-
- Some child of the current node had a free reference to name.
- When the child was processed, it was labelled a free
- variable. Now that all its enclosing scope have been
- processed, the name is known to be a global or builtin. So
- walk back down the child chain and set the name to be global
- rather than free.
-
- Be careful to stop if a child does not think the name is
- free.
- """
- self.globals[name] = 1
- if name in self.frees:
- del self.frees[name]
- for child in self.children:
- if child.check_name(name) == SC_FREE:
- child.force_global(name)
-
- def add_frees(self, names):
- """Process list of free vars from nested scope.
-
- Returns a list of names that are either 1) declared global in the
- parent or 2) undefined in a top-level parent. In either case,
- the nested scope should treat them as globals.
- """
- child_globals = []
- for name in names:
- sc = self.check_name(name)
- if self.nested:
- if sc == SC_UNKNOWN or sc == SC_FREE \
- or isinstance(self, ClassScope):
- self.frees[name] = 1
- elif sc == SC_GLOBAL:
- child_globals.append(name)
- elif isinstance(self, FunctionScope) and sc == SC_LOCAL:
- self.cells[name] = 1
- elif sc != SC_CELL:
- child_globals.append(name)
- else:
- if sc == SC_LOCAL:
- self.cells[name] = 1
- elif sc != SC_CELL:
- child_globals.append(name)
- return child_globals
-
- def get_cell_vars(self):
- return self.cells.keys()
-
-class ModuleScope(Scope):
- __super_init = Scope.__init__
-
- def __init__(self):
- self.__super_init("global", self)
-
-class FunctionScope(Scope):
- pass
-
-class GenExprScope(Scope):
- __super_init = Scope.__init__
-
- __counter = 1
-
- def __init__(self, module, klass=None):
- i = self.__counter
- self.__counter += 1
- self.__super_init("generator expression<%d>"%i, module, klass)
- self.add_param('.0')
-
- def get_names(self):
- keys = Scope.get_names(self)
- return keys
-
-class LambdaScope(FunctionScope):
- __super_init = Scope.__init__
-
- __counter = 1
-
- def __init__(self, module, klass=None):
- i = self.__counter
- self.__counter += 1
- self.__super_init("lambda.%d" % i, module, klass)
-
-class ClassScope(Scope):
- __super_init = Scope.__init__
-
- def __init__(self, name, module):
- self.__super_init(name, module, name)
-
-class SymbolVisitor:
- def __init__(self):
- self.scopes = {}
- self.klass = None
-
- # node that define new scopes
-
- def visitModule(self, node):
- scope = self.module = self.scopes[node] = ModuleScope()
- self.visit(node.node, scope)
-
- visitExpression = visitModule
-
- def visitFunction(self, node, parent):
- if node.decorators:
- self.visit(node.decorators, parent)
- parent.add_def(node.name)
- for n in node.defaults:
- self.visit(n, parent)
- scope = FunctionScope(node.name, self.module, self.klass)
- if parent.nested or isinstance(parent, FunctionScope):
- scope.nested = 1
- self.scopes[node] = scope
-
- args = node.arguments
- for kwonly in node.kwonlyargs:
- args.append(kwonly.arg)
- self._do_arguments(scope, args)
-
- self.visit(node.code, scope)
- self.handle_free_vars(scope, parent)
-
- def visitGenExpr(self, node, parent):
- scope = GenExprScope(self.module, self.klass);
- if parent.nested or isinstance(parent, FunctionScope) \
- or isinstance(parent, GenExprScope):
- scope.nested = 1
-
- self.scopes[node] = scope
- self.visit(node.code, scope)
-
- self.handle_free_vars(scope, parent)
-
- def visitGenExprInner(self, node, scope):
- for genfor in node.quals:
- self.visit(genfor, scope)
-
- self.visit(node.expr, scope)
-
- def visitGenExprFor(self, node, scope):
- self.visit(node.assign, scope, 1)
- self.visit(node.iter, scope)
- for if_ in node.ifs:
- self.visit(if_, scope)
-
- def visitGenExprIf(self, node, scope):
- self.visit(node.test, scope)
-
- def visitLambda(self, node, parent, assign=0):
- # Lambda is an expression, so it could appear in an expression
- # context where assign is passed. The transformer should catch
- # any code that has a lambda on the left-hand side.
- assert not assign
-
- for n in node.defaults:
- self.visit(n, parent)
- scope = LambdaScope(self.module, self.klass)
- if parent.nested or isinstance(parent, FunctionScope):
- scope.nested = 1
- self.scopes[node] = scope
- self._do_arguments(scope, node.arguments)
- self.visit(node.code, scope)
- self.handle_free_vars(scope, parent)
-
- def _do_arguments(self, scope, arguments):
- for node in arguments:
- if isinstance(node, ast.SimpleArg):
- scope.add_param(node.name)
- if node.annotation:
- self.visit(node.annotation, scope)
- else:
- self._do_arguments(scope, node.args)
-
- def handle_free_vars(self, scope, parent):
- parent.add_child(scope)
- scope.handle_children()
-
- def visitClass(self, node, parent):
- parent.add_def(node.name)
- for n in node.args:
- self.visit(n, parent)
- scope = ClassScope(node.name, self.module)
- if parent.nested or isinstance(parent, FunctionScope):
- scope.nested = 1
- if node.doc is not None:
- scope.add_def('__doc__')
- scope.add_def('__module__')
- self.scopes[node] = scope
- prev = self.klass
- self.klass = node.name
- self.visit(node.code, scope)
- self.klass = prev
- self.handle_free_vars(scope, parent)
-
- # name can be a def or a use
-
- # XXX a few calls and nodes expect a third "assign" arg that is
- # true if the name is being used as an assignment. only
- # expressions contained within statements may have the assign arg.
-
- def visitName(self, node, scope, assign=0):
- if assign:
- scope.add_def(node.name)
- else:
- scope.add_use(node.name)
-
- # operations that bind new names
-
- def visitFor(self, node, scope):
- self.visit(node.assign, scope, 1)
- self.visit(node.list, scope)
- self.visit(node.body, scope)
- if node.else_:
- self.visit(node.else_, scope)
-
- def visitFrom(self, node, scope):
- for name, asname in node.names:
- if name == "*":
- continue
- scope.add_def(asname or name)
-
- def visitImport(self, node, scope):
- for name, asname in node.names:
- i = name.find(".")
- if i > -1:
- name = name[:i]
- scope.add_def(asname or name)
-
- def visitGlobal(self, node, scope):
- for name in node.names:
- scope.add_global(name)
-
- def visitAssign(self, node, scope):
- """Propagate assignment flag down to child nodes.
-
- The Assign node doesn't itself contains the variables being
- assigned to. Instead, the children in node.nodes are visited
- with the assign flag set to true. When the names occur in
- those nodes, they are marked as defs.
-
- Some names that occur in an assignment target are not bound by
- the assignment, e.g. a name occurring inside a slice. The
- visitor handles these nodes specially; they do not propagate
- the assign flag to their children.
- """
- for n in node.nodes:
- self.visit(n, scope, 1)
- self.visit(node.expr, scope)
-
- def visitAssName(self, node, scope, assign=1):
- scope.add_def(node.name)
-
- def visitAssAttr(self, node, scope, assign=0):
- self.visit(node.expr, scope, 0)
-
- def visitSubscript(self, node, scope, assign=0):
- self.visit(node.expr, scope, 0)
- for n in node.subs:
- self.visit(n, scope, 0)
-
- def visitSlice(self, node, scope, assign=0):
- self.visit(node.expr, scope, 0)
- if node.lower:
- self.visit(node.lower, scope, 0)
- if node.upper:
- self.visit(node.upper, scope, 0)
-
- def visitAugAssign(self, node, scope):
- # If the LHS is a name, then this counts as assignment.
- # Otherwise, it's just use.
- self.visit(node.node, scope)
- if isinstance(node.node, ast.Name):
- self.visit(node.node, scope, 1) # XXX worry about this
- self.visit(node.expr, scope)
-
- # prune if statements if tests are false
-
- _const_types = types.StringType, types.IntType, types.FloatType
-
- def visitIf(self, node, scope):
- for test, body in node.tests:
- if isinstance(test, ast.Const):
- if type(test.value) in self._const_types:
- if not test.value:
- continue
- self.visit(test, scope)
- self.visit(body, scope)
- if node.else_:
- self.visit(node.else_, scope)
-
- # a yield statement signals a generator
-
- def visitYield(self, node, scope):
- scope.generator = 1
- self.visit(node.value, scope)
-
-def list_eq(l1, l2):
- return sorted(l1) == sorted(l2)
-
-if __name__ == "__main__":
- import sys
- from compiler import parseFile, walk
- import symtable
-
- def get_names(syms):
- return [s for s in [s.get_name() for s in syms.get_symbols()]
- if not (s.startswith('_[') or s.startswith('.'))]
-
- for file in sys.argv[1:]:
- print(file)
- f = open(file)
- buf = f.read()
- f.close()
- syms = symtable.symtable(buf, file, "exec")
- mod_names = get_names(syms)
- tree = parseFile(file)
- s = SymbolVisitor()
- walk(tree, s)
-
- # compare module-level symbols
- names2 = s.scopes[tree].get_names()
-
- if not list_eq(mod_names, names2):
- print()
- print("oops", file)
- print(sorted(mod_names))
- print(sorted(names2))
- sys.exit(-1)
-
- d = {}
- d.update(s.scopes)
- del d[tree]
- scopes = d.values()
- del d
-
- for s in syms.get_symbols():
- if s.is_namespace():
- l = [sc for sc in scopes
- if sc.name == s.get_name()]
- if len(l) > 1:
- print("skipping", s.get_name())
- else:
- if not list_eq(get_names(s.get_namespace()),
- l[0].get_names()):
- print(s.get_name())
- print(sorted(get_names(s.get_namespace())))
- print(sorted(l[0].get_names()))
- sys.exit(-1)
diff --git a/Lib/compiler/syntax.py b/Lib/compiler/syntax.py
deleted file mode 100644
index 6187b47..0000000
--- a/Lib/compiler/syntax.py
+++ /dev/null
@@ -1,46 +0,0 @@
-"""Check for errs in the AST.
-
-The Python parser does not catch all syntax errors. Others, like
-assignments with invalid targets, are caught in the code generation
-phase.
-
-The compiler package catches some errors in the transformer module.
-But it seems clearer to write checkers that use the AST to detect
-errors.
-"""
-
-from compiler import ast, walk
-
-def check(tree, multi=None):
- v = SyntaxErrorChecker(multi)
- walk(tree, v)
- return v.errors
-
-class SyntaxErrorChecker:
- """A visitor to find syntax errors in the AST."""
-
- def __init__(self, multi=None):
- """Create new visitor object.
-
- If optional argument multi is not None, then print messages
- for each error rather than raising a SyntaxError for the
- first.
- """
- self.multi = multi
- self.errors = 0
-
- def error(self, node, msg):
- self.errors = self.errors + 1
- if self.multi is not None:
- print("%s:%s: %s" % (node.filename, node.lineno, msg))
- else:
- raise SyntaxError, "%s (%s:%s)" % (msg, node.filename, node.lineno)
-
- def visitAssign(self, node):
- # the transformer module handles many of these
- pass
-## for target in node.nodes:
-## if isinstance(target, ast.AssList):
-## if target.lineno is None:
-## target.lineno = node.lineno
-## self.error(target, "can't assign to list comprehension")
diff --git a/Lib/compiler/transformer.py b/Lib/compiler/transformer.py
deleted file mode 100644
index 3127b02..0000000
--- a/Lib/compiler/transformer.py
+++ /dev/null
@@ -1,1534 +0,0 @@
-"""Parse tree transformation module.
-
-Transforms Python source code into an abstract syntax tree (AST)
-defined in the ast module.
-
-The simplest ways to invoke this module are via parse and parseFile.
-parse(buf) -> AST
-parseFile(path) -> AST
-"""
-
-# Original version written by Greg Stein (gstein@lyra.org)
-# and Bill Tutt (rassilon@lima.mudlib.org)
-# February 1997.
-#
-# Modifications and improvements for Python 2.0 by Jeremy Hylton and
-# Mark Hammond
-#
-# Some fixes to try to have correct line number on almost all nodes
-# (except Module, Discard and Stmt) added by Sylvain Thenault
-#
-# Portions of this file are:
-# Copyright (C) 1997-1998 Greg Stein. All Rights Reserved.
-#
-# This module is provided under a BSD-ish license. See
-# http://www.opensource.org/licenses/bsd-license.html
-# and replace OWNER, ORGANIZATION, and YEAR as appropriate.
-
-from compiler.ast import *
-import parser
-import symbol
-import token
-import sys
-
-class WalkerError(StandardError):
- pass
-
-from compiler.consts import CO_VARARGS, CO_VARKEYWORDS
-from compiler.consts import OP_ASSIGN, OP_DELETE, OP_APPLY
-
-def parseFile(path):
- f = open(path, "U")
- # XXX The parser API tolerates files without a trailing newline,
- # but not strings without a trailing newline. Always add an extra
- # newline to the file contents, since we're going through the string
- # version of the API.
- src = f.read() + "\n"
- f.close()
- return parse(src)
-
-def parse(buf, mode="exec"):
- if mode == "exec" or mode == "single":
- return Transformer().parsesuite(buf)
- elif mode == "eval":
- return Transformer().parseexpr(buf)
- else:
- raise ValueError("compile() arg 3 must be"
- " 'exec' or 'eval' or 'single'")
-
-def asList(nodes):
- l = []
- for item in nodes:
- if hasattr(item, "asList"):
- l.append(item.asList())
- else:
- if type(item) is type( (None, None) ):
- l.append(tuple(asList(item)))
- elif type(item) is type( [] ):
- l.append(asList(item))
- else:
- l.append(item)
- return l
-
-def extractLineNo(ast):
- if not isinstance(ast[1], tuple):
- # get a terminal node
- return ast[2]
- for child in ast[1:]:
- if isinstance(child, tuple):
- lineno = extractLineNo(child)
- if lineno is not None:
- return lineno
-
-def Node(*args):
- kind = args[0]
- if kind in nodes:
- try:
- return nodes[kind](*args[1:])
- except TypeError:
- print(nodes[kind], len(args), args)
- raise
- else:
- raise WalkerError, "Can't find appropriate Node type: %s" % str(args)
- #return ast.Node(*args)
-
-class Transformer:
- """Utility object for transforming Python parse trees.
-
- Exposes the following methods:
- tree = transform(ast_tree)
- tree = parsesuite(text)
- tree = parseexpr(text)
- tree = parsefile(fileob | filename)
- """
-
- def __init__(self):
- self._dispatch = {}
- for value, name in symbol.sym_name.items():
- if hasattr(self, name):
- self._dispatch[value] = getattr(self, name)
- self._dispatch[token.NEWLINE] = self.com_NEWLINE
- self._atom_dispatch = {token.LPAR: self.atom_lpar,
- token.LSQB: self.atom_lsqb,
- token.LBRACE: self.atom_lbrace,
- token.NUMBER: self.atom_number,
- token.STRING: self.atom_string,
- token.ELLIPSIS: self.atom_ellipsis,
- token.NAME: self.atom_name,
- }
- self.encoding = None
-
- def transform(self, tree):
- """Transform an AST into a modified parse tree."""
- if not (isinstance(tree, tuple) or isinstance(tree, list)):
- tree = parser.ast2tuple(tree, line_info=1)
- return self.compile_node(tree)
-
- def parsesuite(self, text):
- """Return a modified parse tree for the given suite text."""
- return self.transform(parser.suite(text))
-
- def parseexpr(self, text):
- """Return a modified parse tree for the given expression text."""
- return self.transform(parser.expr(text))
-
- def parsefile(self, file):
- """Return a modified parse tree for the contents of the given file."""
- if type(file) == type(''):
- file = open(file)
- return self.parsesuite(file.read())
-
- # --------------------------------------------------------------
- #
- # PRIVATE METHODS
- #
-
- def compile_node(self, node):
- ### emit a line-number node?
- n = node[0]
-
- if n == symbol.encoding_decl:
- self.encoding = node[2]
- node = node[1]
- n = node[0]
-
- if n == symbol.single_input:
- return self.single_input(node[1:])
- if n == symbol.file_input:
- return self.file_input(node[1:])
- if n == symbol.eval_input:
- return self.eval_input(node[1:])
- if n == symbol.lambdef:
- return self.lambdef(node[1:])
- if n == symbol.funcdef:
- return self.funcdef(node[1:])
- if n == symbol.classdef:
- return self.classdef(node[1:])
-
- raise WalkerError, ('unexpected node type', n)
-
- def single_input(self, node):
- ### do we want to do anything about being "interactive" ?
-
- # NEWLINE | simple_stmt | compound_stmt NEWLINE
- n = node[0][0]
- if n != token.NEWLINE:
- return self.com_stmt(node[0])
-
- return Pass()
-
- def file_input(self, nodelist):
- doc = self.get_docstring(nodelist, symbol.file_input)
- if doc is not None:
- i = 1
- else:
- i = 0
- stmts = []
- for node in nodelist[i:]:
- if node[0] != token.ENDMARKER and node[0] != token.NEWLINE:
- self.com_append_stmt(stmts, node)
- return Module(doc, Stmt(stmts))
-
- def eval_input(self, nodelist):
- # from the built-in function input()
- ### is this sufficient?
- return Expression(self.com_node(nodelist[0]))
-
- def decorator_name(self, nodelist):
- listlen = len(nodelist)
- assert listlen >= 1 and listlen % 2 == 1
-
- item = self.atom_name(nodelist)
- i = 1
- while i < listlen:
- assert nodelist[i][0] == token.DOT
- assert nodelist[i + 1][0] == token.NAME
- item = Getattr(item, nodelist[i + 1][1])
- i += 2
-
- return item
-
- def decorator(self, nodelist):
- # '@' dotted_name [ '(' [arglist] ')' ]
- assert len(nodelist) in (3, 5, 6)
- assert nodelist[0][0] == token.AT
- assert nodelist[-1][0] == token.NEWLINE
-
- assert nodelist[1][0] == symbol.dotted_name
- funcname = self.decorator_name(nodelist[1][1:])
-
- if len(nodelist) > 3:
- assert nodelist[2][0] == token.LPAR
- expr = self.com_call_function(funcname, nodelist[3])
- else:
- expr = funcname
-
- return expr
-
- def decorators(self, nodelist):
- # decorators: decorator ([NEWLINE] decorator)* NEWLINE
- items = []
- for dec_nodelist in nodelist:
- assert dec_nodelist[0] == symbol.decorator
- items.append(self.decorator(dec_nodelist[1:]))
- return Decorators(items)
-
- def funcdef(self, nodelist):
- # 0 1 2 4 -1
- # funcdef: [decorators] 'def' NAME parameters ['->' test] ':' suite
- # parameters: '(' [typedargslist] ')'
- if nodelist[0][0] == symbol.decorators:
- decorators = self.decorators(nodelist[0][1:])
- nodelist = nodelist[1:]
- else:
- decorators = None
- assert len(nodelist) in (5, 7)
-
- lineno = nodelist[0][2]
- name = nodelist[1][1]
- args = nodelist[2][2]
-
- if args[0] == symbol.varargslist or args[0] == symbol.typedargslist:
- arguments, defaults, kwonly, flags = self.com_arglist(args[1:])
- else:
- arguments = defaults = kwonly = ()
- flags = 0
- doc = self.get_docstring(nodelist[-1])
-
- # code for function
- code = self.com_node(nodelist[-1])
-
- if doc is not None:
- assert isinstance(code, Stmt)
- assert isinstance(code.nodes[0], Discard)
- del code.nodes[0]
-
- if len(nodelist) == 7:
- returns = self.com_node(nodelist[4])
- else:
- returns = None
-
- return Function(decorators, name, arguments, defaults,
- kwonly, returns, flags, doc, code, lineno=lineno)
-
- def lambdef(self, nodelist):
- # lambdef: 'lambda' [varargslist] ':' test
- if nodelist[2][0] == symbol.varargslist:
- arguments, defaults, kwonlyargs, flags = \
- self.com_arglist(nodelist[2][1:])
- else:
- arguments = defaults = kwonlyargs = ()
- flags = 0
-
- # code for lambda
- code = self.com_node(nodelist[-1])
-
- return Lambda(arguments, defaults, kwonlyargs,
- flags, code, lineno=nodelist[1][2])
- old_lambdef = lambdef
-
- def classdef(self, nodelist):
- # classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
-
- name = nodelist[1][1]
- doc = self.get_docstring(nodelist[-1])
- if nodelist[2][0] == token.COLON:
- arglist = CallFunc(None, [])
- elif nodelist[3][0] == token.RPAR:
- arglist = CallFunc(None, [])
- else:
- arglist = self.com_call_function(None, nodelist[3])
-
- # code for class
- code = self.com_node(nodelist[-1])
-
- if doc is not None:
- assert isinstance(code, Stmt)
- assert isinstance(code.nodes[0], Discard)
- del code.nodes[0]
-
- return Class(name, arglist.args, arglist.star_args, arglist.dstar_args,
- doc, code, lineno=nodelist[1][2])
-
- def stmt(self, nodelist):
- return self.com_stmt(nodelist[0])
-
- small_stmt = stmt
- flow_stmt = stmt
- compound_stmt = stmt
-
- def simple_stmt(self, nodelist):
- # small_stmt (';' small_stmt)* [';'] NEWLINE
- stmts = []
- for i in range(0, len(nodelist), 2):
- self.com_append_stmt(stmts, nodelist[i])
- return Stmt(stmts)
-
- def parameters(self, nodelist):
- raise WalkerError
-
- def varargslist(self, nodelist):
- raise WalkerError
-
- def vfpdef(self, nodelist):
- raise WalkerError
-
- def vfplist(self, nodelist):
- raise WalkerError
-
- def vname(self, nodelist):
- raise WalkerError
-
- def typedargslist(self, nodelist):
- raise WalkerError
-
- def tfpdef(self, nodelist):
- raise WalkerError
-
- def tfplist(self, nodelist):
- raise WalkerError
-
- def tname(self, nodelist):
- raise WalkerError
-
- def dotted_name(self, nodelist):
- raise WalkerError
-
- def comp_op(self, nodelist):
- raise WalkerError
-
- def trailer(self, nodelist):
- raise WalkerError
-
- def sliceop(self, nodelist):
- raise WalkerError
-
- def argument(self, nodelist):
- raise WalkerError
-
- # --------------------------------------------------------------
- #
- # STATEMENT NODES (invoked by com_node())
- #
-
- def expr_stmt(self, nodelist):
- # augassign testlist | testlist ('=' testlist)*
- en = nodelist[-1]
- exprNode = self.lookup_node(en)(en[1:])
- if len(nodelist) == 1:
- return Discard(exprNode, lineno=exprNode.lineno)
- if nodelist[1][0] == token.EQUAL:
- nodesl = []
- for i in range(0, len(nodelist) - 2, 2):
- nodesl.append(self.com_assign(nodelist[i], OP_ASSIGN))
- return Assign(nodesl, exprNode, lineno=nodelist[1][2])
- else:
- lval = self.com_augassign(nodelist[0])
- op = self.com_augassign_op(nodelist[1])
- return AugAssign(lval, op[1], exprNode, lineno=op[2])
- raise WalkerError, "can't get here"
-
- def del_stmt(self, nodelist):
- return self.com_assign(nodelist[1], OP_DELETE)
-
- def pass_stmt(self, nodelist):
- return Pass(lineno=nodelist[0][2])
-
- def break_stmt(self, nodelist):
- return Break(lineno=nodelist[0][2])
-
- def continue_stmt(self, nodelist):
- return Continue(lineno=nodelist[0][2])
-
- def return_stmt(self, nodelist):
- # return: [testlist]
- if len(nodelist) < 2:
- return Return(Const(None), lineno=nodelist[0][2])
- return Return(self.com_node(nodelist[1]), lineno=nodelist[0][2])
-
- def yield_stmt(self, nodelist):
- expr = self.com_node(nodelist[0])
- return Discard(expr, lineno=expr.lineno)
-
- def yield_expr(self, nodelist):
- if len(nodelist) > 1:
- value = self.com_node(nodelist[1])
- else:
- value = Const(None)
- return Yield(value, lineno=nodelist[0][2])
-
- def raise_stmt(self, nodelist):
- # raise: [test [',' test [',' test]]]
- if len(nodelist) > 5:
- expr3 = self.com_node(nodelist[5])
- else:
- expr3 = None
- if len(nodelist) > 3:
- expr2 = self.com_node(nodelist[3])
- else:
- expr2 = None
- if len(nodelist) > 1:
- expr1 = self.com_node(nodelist[1])
- else:
- expr1 = None
- return Raise(expr1, expr2, expr3, lineno=nodelist[0][2])
-
- def import_stmt(self, nodelist):
- # import_stmt: import_name | import_from
- assert len(nodelist) == 1
- return self.com_node(nodelist[0])
-
- def import_name(self, nodelist):
- # import_name: 'import' dotted_as_names
- return Import(self.com_dotted_as_names(nodelist[1]),
- lineno=nodelist[0][2])
-
- def import_from(self, nodelist):
- # import_from: 'from' ('.'* dotted_name | '.') 'import' ('*' |
- # '(' import_as_names ')' | import_as_names)
- assert nodelist[0][1] == 'from'
- idx = 1
- while nodelist[idx][1] == '.':
- idx += 1
- level = idx - 1
- if nodelist[idx][0] == symbol.dotted_name:
- fromname = self.com_dotted_name(nodelist[idx])
- idx += 1
- else:
- fromname = ""
- assert nodelist[idx][1] == 'import'
- if nodelist[idx + 1][0] == token.STAR:
- return From(fromname, [('*', None)], level,
- lineno=nodelist[0][2])
- else:
- node = nodelist[idx + 1 + (nodelist[idx + 1][0] == token.LPAR)]
- return From(fromname, self.com_import_as_names(node), level,
- lineno=nodelist[0][2])
-
- def global_stmt(self, nodelist):
- # global: NAME (',' NAME)*
- names = []
- for i in range(1, len(nodelist), 2):
- names.append(nodelist[i][1])
- return Global(names, lineno=nodelist[0][2])
-
- def assert_stmt(self, nodelist):
- # 'assert': test, [',' test]
- expr1 = self.com_node(nodelist[1])
- if (len(nodelist) == 4):
- expr2 = self.com_node(nodelist[3])
- else:
- expr2 = None
- return Assert(expr1, expr2, lineno=nodelist[0][2])
-
- def if_stmt(self, nodelist):
- # if: test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
- tests = []
- for i in range(0, len(nodelist) - 3, 4):
- testNode = self.com_node(nodelist[i + 1])
- suiteNode = self.com_node(nodelist[i + 3])
- tests.append((testNode, suiteNode))
-
- if len(nodelist) % 4 == 3:
- elseNode = self.com_node(nodelist[-1])
-## elseNode.lineno = nodelist[-1][1][2]
- else:
- elseNode = None
- return If(tests, elseNode, lineno=nodelist[0][2])
-
- def while_stmt(self, nodelist):
- # 'while' test ':' suite ['else' ':' suite]
-
- testNode = self.com_node(nodelist[1])
- bodyNode = self.com_node(nodelist[3])
-
- if len(nodelist) > 4:
- elseNode = self.com_node(nodelist[6])
- else:
- elseNode = None
-
- return While(testNode, bodyNode, elseNode, lineno=nodelist[0][2])
-
- def for_stmt(self, nodelist):
- # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
-
- assignNode = self.com_assign(nodelist[1], OP_ASSIGN)
- listNode = self.com_node(nodelist[3])
- bodyNode = self.com_node(nodelist[5])
-
- if len(nodelist) > 8:
- elseNode = self.com_node(nodelist[8])
- else:
- elseNode = None
-
- return For(assignNode, listNode, bodyNode, elseNode,
- lineno=nodelist[0][2])
-
- def try_stmt(self, nodelist):
- return self.com_try_except_finally(nodelist)
-
- def with_stmt(self, nodelist):
- return self.com_with(nodelist)
-
- def with_var(self, nodelist):
- return self.com_with_var(nodelist)
-
- def suite(self, nodelist):
- # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
- if len(nodelist) == 1:
- return self.com_stmt(nodelist[0])
-
- stmts = []
- for node in nodelist:
- if node[0] == symbol.stmt:
- self.com_append_stmt(stmts, node)
- return Stmt(stmts)
-
- # --------------------------------------------------------------
- #
- # EXPRESSION NODES (invoked by com_node())
- #
-
- def testlist(self, nodelist):
- # testlist: expr (',' expr)* [',']
- # testlist_safe: test [(',' test)+ [',']]
- # exprlist: expr (',' expr)* [',']
- return self.com_binary(Tuple, nodelist)
-
- testlist_safe = testlist # XXX
- testlist1 = testlist
- exprlist = testlist
-
- def testlist_comp(self, nodelist):
- if len(nodelist) == 2 and nodelist[1][0] == symbol.gen_for:
- test = self.com_node(nodelist[0])
- return self.com_generator_expression(test, nodelist[1])
- return self.testlist(nodelist)
-
- def test(self, nodelist):
- # or_test ['if' or_test 'else' test] | lambdef
- if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
- return self.lambdef(nodelist[0])
- then = self.com_node(nodelist[0])
- if len(nodelist) > 1:
- assert len(nodelist) == 5
- assert nodelist[1][1] == 'if'
- assert nodelist[3][1] == 'else'
- test = self.com_node(nodelist[2])
- else_ = self.com_node(nodelist[4])
- return IfExp(test, then, else_, lineno=nodelist[1][2])
- return then
-
- def or_test(self, nodelist):
- # and_test ('or' and_test)* | lambdef
- if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
- return self.lambdef(nodelist[0])
- return self.com_binary(Or, nodelist)
- old_test = or_test
-
- def and_test(self, nodelist):
- # not_test ('and' not_test)*
- return self.com_binary(And, nodelist)
-
- def not_test(self, nodelist):
- # 'not' not_test | comparison
- result = self.com_node(nodelist[-1])
- if len(nodelist) == 2:
- return Not(result, lineno=nodelist[0][2])
- return result
-
- def comparison(self, nodelist):
- # comparison: expr (comp_op expr)*
- node = self.com_node(nodelist[0])
- if len(nodelist) == 1:
- return node
-
- results = []
- for i in range(2, len(nodelist), 2):
- nl = nodelist[i-1]
-
- # comp_op: '<' | '>' | '=' | '>=' | '<=' | '!=' | '=='
- # | 'in' | 'not' 'in' | 'is' | 'is' 'not'
- n = nl[1]
- if n[0] == token.NAME:
- type = n[1]
- if len(nl) == 3:
- if type == 'not':
- type = 'not in'
- else:
- type = 'is not'
- else:
- type = _cmp_types[n[0]]
-
- lineno = nl[1][2]
- results.append((type, self.com_node(nodelist[i])))
-
- # we need a special "compare" node so that we can distinguish
- # 3 < x < 5 from (3 < x) < 5
- # the two have very different semantics and results (note that the
- # latter form is always true)
-
- return Compare(node, results, lineno=lineno)
-
- def expr(self, nodelist):
- # xor_expr ('|' xor_expr)*
- return self.com_binary(Bitor, nodelist)
-
- def xor_expr(self, nodelist):
- # xor_expr ('^' xor_expr)*
- return self.com_binary(Bitxor, nodelist)
-
- def and_expr(self, nodelist):
- # xor_expr ('&' xor_expr)*
- return self.com_binary(Bitand, nodelist)
-
- def shift_expr(self, nodelist):
- # shift_expr ('<<'|'>>' shift_expr)*
- node = self.com_node(nodelist[0])
- for i in range(2, len(nodelist), 2):
- right = self.com_node(nodelist[i])
- if nodelist[i-1][0] == token.LEFTSHIFT:
- node = LeftShift([node, right], lineno=nodelist[1][2])
- elif nodelist[i-1][0] == token.RIGHTSHIFT:
- node = RightShift([node, right], lineno=nodelist[1][2])
- else:
- raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
- return node
-
- def arith_expr(self, nodelist):
- node = self.com_node(nodelist[0])
- for i in range(2, len(nodelist), 2):
- right = self.com_node(nodelist[i])
- if nodelist[i-1][0] == token.PLUS:
- node = Add([node, right], lineno=nodelist[1][2])
- elif nodelist[i-1][0] == token.MINUS:
- node = Sub([node, right], lineno=nodelist[1][2])
- else:
- raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
- return node
-
- def term(self, nodelist):
- node = self.com_node(nodelist[0])
- for i in range(2, len(nodelist), 2):
- right = self.com_node(nodelist[i])
- t = nodelist[i-1][0]
- if t == token.STAR:
- node = Mul([node, right])
- elif t == token.SLASH:
- node = Div([node, right])
- elif t == token.PERCENT:
- node = Mod([node, right])
- elif t == token.DOUBLESLASH:
- node = FloorDiv([node, right])
- else:
- raise ValueError, "unexpected token: %s" % t
- node.lineno = nodelist[1][2]
- return node
-
- def factor(self, nodelist):
- elt = nodelist[0]
- t = elt[0]
- node = self.lookup_node(nodelist[-1])(nodelist[-1][1:])
- # need to handle (unary op)constant here...
- if t == token.PLUS:
- return UnaryAdd(node, lineno=elt[2])
- elif t == token.MINUS:
- return UnarySub(node, lineno=elt[2])
- elif t == token.TILDE:
- node = Invert(node, lineno=elt[2])
- return node
-
- def power(self, nodelist):
- # power: atom trailer* ('**' factor)*
- node = self.com_node(nodelist[0])
- for i in range(1, len(nodelist)):
- elt = nodelist[i]
- if elt[0] == token.DOUBLESTAR:
- return Power([node, self.com_node(nodelist[i+1])],
- lineno=elt[2])
-
- node = self.com_apply_trailer(node, elt)
-
- return node
-
- def atom(self, nodelist):
- return self._atom_dispatch[nodelist[0][0]](nodelist)
-
- def atom_lpar(self, nodelist):
- if nodelist[1][0] == token.RPAR:
- return Tuple((), lineno=nodelist[0][2])
- return self.com_node(nodelist[1])
-
- def atom_lsqb(self, nodelist):
- if nodelist[1][0] == token.RSQB:
- return List((), lineno=nodelist[0][2])
- return self.com_list_constructor(nodelist[1])
-
- def atom_lbrace(self, nodelist):
- if nodelist[1][0] == token.RBRACE:
- return Dict((), lineno=nodelist[0][2])
- return self.com_dictsetmaker(nodelist[1])
-
- def atom_number(self, nodelist):
- ### need to verify this matches compile.c
- k = eval(nodelist[0][1])
- return Const(k, lineno=nodelist[0][2])
-
- def decode_literal(self, lit):
- if self.encoding:
- # this is particularly fragile & a bit of a
- # hack... changes in compile.c:parsestr and
- # tokenizer.c must be reflected here.
- if self.encoding not in ['utf-8', 'iso-8859-1']:
- lit = str(lit, 'utf-8').encode(self.encoding)
- return eval("# coding: %s\n%s" % (self.encoding, lit))
- else:
- return eval(lit)
-
- def atom_string(self, nodelist):
- k = self.decode_literal(nodelist[0][1])
- for node in nodelist[1:]:
- k += self.decode_literal(node[1])
- if isinstance(k, bytes):
- return Bytes(str(k), lineno=nodelist[0][2])
- return Const(k, lineno=nodelist[0][2])
-
- def atom_ellipsis(self, nodelist):
- return Const(Ellipsis, lineno=nodelist[0][2])
-
- def atom_name(self, nodelist):
- return Name(nodelist[0][1], lineno=nodelist[0][2])
-
- # --------------------------------------------------------------
- #
- # INTERNAL PARSING UTILITIES
- #
-
- # The use of com_node() introduces a lot of extra stack frames,
- # enough to cause a stack overflow compiling test.test_parser with
- # the standard interpreter recursionlimit. The com_node() is a
- # convenience function that hides the dispatch details, but comes
- # at a very high cost. It is more efficient to dispatch directly
- # in the callers. In these cases, use lookup_node() and call the
- # dispatched node directly.
-
- def lookup_node(self, node):
- return self._dispatch[node[0]]
-
- def com_node(self, node):
- # Note: compile.c has handling in com_node for del_stmt, pass_stmt,
- # break_stmt, stmt, small_stmt, flow_stmt, simple_stmt,
- # and compound_stmt.
- # We'll just dispatch them.
- return self._dispatch[node[0]](node[1:])
-
- def com_NEWLINE(self, *args):
- # A ';' at the end of a line can make a NEWLINE token appear
- # here, Render it harmless. (genc discards ('discard',
- # ('const', xxxx)) Nodes)
- return Discard(Const(None))
-
- def keywordonlyargs(self, nodelist):
- # (',' tname ['=' test])*
- # ^^^
- # ------+
- # tname and vname are handled.
- kwonlyargs = []
- i = 0
- while i < len(nodelist):
- default = EmptyNode()
- node = nodelist[i]
- #assert node[0] == token.COMMA
- #node = nodelist[i+1]
- if i+1 < len(nodelist) and nodelist[i+1][0] == token.EQUAL:
- assert i+2 < len(nodelist)
- default = self.com_node(nodelist[i+2])
- i += 2
- if node[0] == token.DOUBLESTAR:
- return kwonlyargs, i
- elif node[0] in (symbol.vname, symbol.tname):
- lineno = extractLineNo(node)
- kwarg = Kwarg(self._simplearg(node), default, lineno=lineno)
- kwonlyargs.append(kwarg)
- i += 2
- return kwonlyargs, i
-
- def _simplearg(self, node):
- # tname: NAME [':' test]
- # vname: NAME
- assert node[0] == symbol.vname or node[0] == symbol.tname
- name = node[1][1]
- lineno = node[1][2]
- assert isinstance(name, str)
- if len(node) > 2:
- annotation = self.com_node(node[3])
- else:
- annotation = None
- return SimpleArg(name, annotation, lineno)
-
- def com_arglist(self, nodelist):
- # varargslist:
- # (fpdef ['=' test] ',')*
- # ('*' [NAME] (',' NAME '=' test)* [',' '**' NAME] | '**' NAME)
- # | fpdef ['=' test] (',' fpdef ['=' test])* [',']
- # fpdef: NAME | '(' fplist ')'
- # fplist: fpdef (',' fpdef)* [',']
- arguments = []
- kwonlyargs = []
- defaults = []
- flags = 0
-
- i = 0
- while i < len(nodelist):
- node = nodelist[i]
- if node[0] == token.STAR or node[0] == token.DOUBLESTAR:
- if node[0] == token.STAR:
- node = nodelist[i+1]
- if node[0] in (symbol.tname, symbol.vname): # vararg
- arguments.append(self._simplearg(node))
- flags = flags | CO_VARARGS
- i = i + 3
- else: # no vararg
- assert node[0] == token.COMMA
- i += 2
- if i < len(nodelist) and \
- nodelist[i][0] in (symbol.tname, symbol.vname):
- kwonlyargs, skip = self.keywordonlyargs(nodelist[i:])
- i += skip
-
- if i < len(nodelist):
- # should be DOUBLESTAR
- t = nodelist[i][0]
- if t == token.DOUBLESTAR:
- node = nodelist[i+1]
- else:
- raise ValueError, "unexpected token: %s" % t
- arguments.append(self._simplearg(node))
- flags = flags | CO_VARKEYWORDS
-
- break
-
- # tfpdef: tname | '(' tfplist ')'
- arguments.append(self.com_tfpdef(node))
-
- i = i + 1
- if i < len(nodelist) and nodelist[i][0] == token.EQUAL:
- defaults.append(self.com_node(nodelist[i + 1]))
- i = i + 2
- elif len(defaults):
- # we have already seen an argument with default, but here
- # came one without
- raise SyntaxError, "non-default argument follows default argument"
-
- # skip the comma
- i = i + 1
-
- return arguments, defaults, kwonlyargs, flags
-
- def com_tfpdef(self, node):
- # tfpdef: tname | '(' tfplist ')'
- # def f((x)): -- x is not nested
- while node[1][0] == token.LPAR and len(node[2]) == 2:
- node = node[2][1]
- if node[1][0] == token.LPAR:
- return NestedArgs(self.com_tfplist(node[2]))
- return self._simplearg(node[1])
-
- def com_tfplist(self, node):
- # tfplist: tfpdef (',' tfpdef)* [',']
- if len(node) == 2:
- return self.com_tfpdef(node[1]),
- list = []
- for i in range(1, len(node), 2):
- list.append(self.com_tfpdef(node[i]))
- return tuple(list)
-
- def com_dotted_name(self, node):
- # String together the dotted names and return the string
- name = ""
- for n in node:
- if type(n) == type(()) and n[0] == 1:
- name = name + n[1] + '.'
- return name[:-1]
-
- def com_dotted_as_name(self, node):
- assert node[0] == symbol.dotted_as_name
- node = node[1:]
- dot = self.com_dotted_name(node[0][1:])
- if len(node) == 1:
- return dot, None
- assert node[1][1] == 'as'
- assert node[2][0] == token.NAME
- return dot, node[2][1]
-
- def com_dotted_as_names(self, node):
- assert node[0] == symbol.dotted_as_names
- node = node[1:]
- names = [self.com_dotted_as_name(node[0])]
- for i in range(2, len(node), 2):
- names.append(self.com_dotted_as_name(node[i]))
- return names
-
- def com_import_as_name(self, node):
- assert node[0] == symbol.import_as_name
- node = node[1:]
- assert node[0][0] == token.NAME
- if len(node) == 1:
- return node[0][1], None
- assert node[1][1] == 'as', node
- assert node[2][0] == token.NAME
- return node[0][1], node[2][1]
-
- def com_import_as_names(self, node):
- assert node[0] == symbol.import_as_names
- node = node[1:]
- names = [self.com_import_as_name(node[0])]
- for i in range(2, len(node), 2):
- names.append(self.com_import_as_name(node[i]))
- return names
-
- def com_bases(self, node):
- bases = []
- for i in range(1, len(node), 2):
- bases.append(self.com_node(node[i]))
- return bases
-
- def com_try_except_finally(self, nodelist):
- # ('try' ':' suite
- # ((except_clause ':' suite)+ ['else' ':' suite] ['finally' ':' suite]
- # | 'finally' ':' suite))
-
- if nodelist[3][0] == token.NAME:
- # first clause is a finally clause: only try-finally
- return TryFinally(self.com_node(nodelist[2]),
- self.com_node(nodelist[5]),
- lineno=nodelist[0][2])
-
- #tryexcept: [TryNode, [except_clauses], elseNode)]
- clauses = []
- elseNode = None
- finallyNode = None
- for i in range(3, len(nodelist), 3):
- node = nodelist[i]
- if node[0] == symbol.except_clause:
- # except_clause: 'except' [expr ['as' NAME]] */
- if len(node) > 2:
- expr = self.com_node(node[2])
- if len(node) > 4:
- expr_name = node[4]
- else:
- expr_name = None
- else:
- expr = expr_name = None
- clauses.append((expr, expr_name, self.com_node(nodelist[i+2])))
-
- if node[0] == token.NAME:
- if node[1] == 'else':
- elseNode = self.com_node(nodelist[i+2])
- elif node[1] == 'finally':
- finallyNode = self.com_node(nodelist[i+2])
- try_except = TryExcept(self.com_node(nodelist[2]), clauses, elseNode,
- lineno=nodelist[0][2])
- if finallyNode:
- return TryFinally(try_except, finallyNode, lineno=nodelist[0][2])
- else:
- return try_except
-
- def com_with(self, nodelist):
- # with_stmt: 'with' expr [with_var] ':' suite
- expr = self.com_node(nodelist[1])
- body = self.com_node(nodelist[-1])
- if nodelist[2][0] == token.COLON:
- var = None
- else:
- var = self.com_assign(nodelist[2][2], OP_ASSIGN)
- return With(expr, var, body, lineno=nodelist[0][2])
-
- def com_with_var(self, nodelist):
- # with_var: 'as' expr
- return self.com_node(nodelist[1])
-
- def com_augassign_op(self, node):
- assert node[0] == symbol.augassign
- return node[1]
-
- def com_augassign(self, node):
- """Return node suitable for lvalue of augmented assignment
-
- Names, slices, and attributes are the only allowable nodes.
- """
- l = self.com_node(node)
- if l.__class__ in (Name, Slice, Subscript, Getattr):
- return l
- raise SyntaxError, "can't assign to %s" % l.__class__.__name__
-
- def com_assign(self, node, assigning):
- # return a node suitable for use as an "lvalue"
- # loop to avoid trivial recursion
- while 1:
- t = node[0]
- if t in (symbol.exprlist, symbol.testlist, symbol.testlist_comp):
- if len(node) > 2:
- return self.com_assign_tuple(node, assigning)
- node = node[1]
- elif t in _assign_types:
- if len(node) > 2:
- raise SyntaxError, "can't assign to operator"
- node = node[1]
- elif t == symbol.power:
- if node[1][0] != symbol.atom:
- raise SyntaxError, "can't assign to operator"
- if len(node) > 2:
- primary = self.com_node(node[1])
- for i in range(2, len(node)-1):
- ch = node[i]
- if ch[0] == token.DOUBLESTAR:
- raise SyntaxError, "can't assign to operator"
- primary = self.com_apply_trailer(primary, ch)
- return self.com_assign_trailer(primary, node[-1],
- assigning)
- node = node[1]
- elif t == symbol.atom:
- t = node[1][0]
- if t == token.LPAR:
- node = node[2]
- if node[0] == token.RPAR:
- raise SyntaxError, "can't assign to ()"
- elif t == token.LSQB:
- node = node[2]
- if node[0] == token.RSQB:
- raise SyntaxError, "can't assign to []"
- return self.com_assign_list(node, assigning)
- elif t == token.NAME:
- return self.com_assign_name(node[1], assigning)
- else:
- raise SyntaxError, "can't assign to literal"
- else:
- raise SyntaxError, "bad assignment (%s)" % t
-
- def com_assign_tuple(self, node, assigning):
- assigns = []
- for i in range(1, len(node), 2):
- assigns.append(self.com_assign(node[i], assigning))
- return AssTuple(assigns, lineno=extractLineNo(node))
-
- def com_assign_list(self, node, assigning):
- assigns = []
- for i in range(1, len(node), 2):
- if i + 1 < len(node):
- if node[i + 1][0] == symbol.list_for:
- raise SyntaxError, "can't assign to list comprehension"
- assert node[i + 1][0] == token.COMMA, node[i + 1]
- assigns.append(self.com_assign(node[i], assigning))
- return AssList(assigns, lineno=extractLineNo(node))
-
- def com_assign_name(self, node, assigning):
- return AssName(node[1], assigning, lineno=node[2])
-
- def com_assign_trailer(self, primary, node, assigning):
- t = node[1][0]
- if t == token.DOT:
- return self.com_assign_attr(primary, node[2], assigning)
- if t == token.LSQB:
- return self.com_subscriptlist(primary, node[2], assigning)
- if t == token.LPAR:
- raise SyntaxError, "can't assign to function call"
- raise SyntaxError, "unknown trailer type: %s" % t
-
- def com_assign_attr(self, primary, node, assigning):
- return AssAttr(primary, node[1], assigning, lineno=node[-1])
-
- def com_binary(self, constructor, nodelist):
- "Compile 'NODE (OP NODE)*' into (type, [ node1, ..., nodeN ])."
- l = len(nodelist)
- if l == 1:
- n = nodelist[0]
- return self.lookup_node(n)(n[1:])
- items = []
- for i in range(0, l, 2):
- n = nodelist[i]
- items.append(self.lookup_node(n)(n[1:]))
- return constructor(items, lineno=extractLineNo(nodelist))
-
- def com_stmt(self, node):
- result = self.lookup_node(node)(node[1:])
- assert result is not None
- if isinstance(result, Stmt):
- return result
- return Stmt([result])
-
- def com_append_stmt(self, stmts, node):
- result = self.lookup_node(node)(node[1:])
- assert result is not None
- if isinstance(result, Stmt):
- stmts.extend(result.nodes)
- else:
- stmts.append(result)
-
- if hasattr(symbol, 'list_for'):
- def com_list_constructor(self, nodelist):
- # listmaker: test ( list_for | (',' test)* [','] )
- values = []
- for i in range(1, len(nodelist)):
- if nodelist[i][0] == symbol.list_for:
- assert len(nodelist[i:]) == 1
- return self.com_list_comprehension(values[0],
- nodelist[i])
- elif nodelist[i][0] == token.COMMA:
- continue
- values.append(self.com_node(nodelist[i]))
- return List(values, lineno=values[0].lineno)
-
- def com_list_comprehension(self, expr, node):
- # list_iter: list_for | list_if
- # list_for: 'for' exprlist 'in' testlist [list_iter]
- # list_if: 'if' test [list_iter]
-
- # XXX should raise SyntaxError for assignment
-
- lineno = node[1][2]
- fors = []
- while node:
- t = node[1][1]
- if t == 'for':
- assignNode = self.com_assign(node[2], OP_ASSIGN)
- listNode = self.com_node(node[4])
- newfor = ListCompFor(assignNode, listNode, [])
- newfor.lineno = node[1][2]
- fors.append(newfor)
- if len(node) == 5:
- node = None
- else:
- node = self.com_list_iter(node[5])
- elif t == 'if':
- test = self.com_node(node[2])
- newif = ListCompIf(test, lineno=node[1][2])
- newfor.ifs.append(newif)
- if len(node) == 3:
- node = None
- else:
- node = self.com_list_iter(node[3])
- else:
- raise SyntaxError, \
- ("unexpected list comprehension element: %s %d"
- % (node, lineno))
- return ListComp(expr, fors, lineno=lineno)
-
- def com_list_iter(self, node):
- assert node[0] == symbol.list_iter
- return node[1]
- else:
- def com_list_constructor(self, nodelist):
- values = []
- for i in range(1, len(nodelist), 2):
- values.append(self.com_node(nodelist[i]))
- return List(values, lineno=values[0].lineno)
-
- if hasattr(symbol, 'gen_for'):
- def com_generator_expression(self, expr, node):
- # gen_iter: gen_for | gen_if
- # gen_for: 'for' exprlist 'in' test [gen_iter]
- # gen_if: 'if' test [gen_iter]
-
- lineno = node[1][2]
- fors = []
- while node:
- t = node[1][1]
- if t == 'for':
- assignNode = self.com_assign(node[2], OP_ASSIGN)
- genNode = self.com_node(node[4])
- newfor = GenExprFor(assignNode, genNode, [],
- lineno=node[1][2])
- fors.append(newfor)
- if (len(node)) == 5:
- node = None
- else:
- node = self.com_gen_iter(node[5])
- elif t == 'if':
- test = self.com_node(node[2])
- newif = GenExprIf(test, lineno=node[1][2])
- newfor.ifs.append(newif)
- if len(node) == 3:
- node = None
- else:
- node = self.com_gen_iter(node[3])
- else:
- raise SyntaxError, \
- ("unexpected generator expression element: %s %d"
- % (node, lineno))
- fors[0].is_outmost = True
- return GenExpr(GenExprInner(expr, fors), lineno=lineno)
-
- def com_gen_iter(self, node):
- assert node[0] == symbol.gen_iter
- return node[1]
-
- def com_dictsetmaker(self, nodelist):
- # dictsetmaker: (test ':' test (',' test ':' value)* [',']) | (test (',' test)* [','])
- items = []
- if len(nodelist) == 1 or nodelist[1] != ':':
- # it's a set
- for i in range(1, len(nodelist), 2):
- items.append(self.com_node(nodelist[i]))
- return Set(items, lineno=items[0].lineno)
- else:
- # it's a dict
- for i in range(1, len(nodelist), 4):
- items.append((self.com_node(nodelist[i]),
- self.com_node(nodelist[i+2])))
- return Dict(items, lineno=items[0][0].lineno)
-
- def com_apply_trailer(self, primaryNode, nodelist):
- t = nodelist[1][0]
- if t == token.LPAR:
- return self.com_call_function(primaryNode, nodelist[2])
- if t == token.DOT:
- return self.com_select_member(primaryNode, nodelist[2])
- if t == token.LSQB:
- return self.com_subscriptlist(primaryNode, nodelist[2], OP_APPLY)
-
- raise SyntaxError, 'unknown node type: %s' % t
-
- def com_select_member(self, primaryNode, nodelist):
- if nodelist[0] != token.NAME:
- raise SyntaxError, "member must be a name"
- return Getattr(primaryNode, nodelist[1], lineno=nodelist[2])
-
- def com_call_function(self, primaryNode, nodelist):
- if nodelist[0] == token.RPAR:
- return CallFunc(primaryNode, [], lineno=extractLineNo(nodelist))
- args = []
- kw = 0
- len_nodelist = len(nodelist)
- for i in range(1, len_nodelist, 2):
- node = nodelist[i]
- if node[0] == token.STAR or node[0] == token.DOUBLESTAR:
- break
- kw, result = self.com_argument(node, kw)
-
- if len_nodelist != 2 and isinstance(result, GenExpr) \
- and len(node) == 3 and node[2][0] == symbol.gen_for:
- # allow f(x for x in y), but reject f(x for x in y, 1)
- # should use f((x for x in y), 1) instead of f(x for x in y, 1)
- raise SyntaxError, 'generator expression needs parenthesis'
-
- args.append(result)
- else:
- # No broken by star arg, so skip the last one we processed.
- i = i + 1
- if i < len_nodelist and nodelist[i][0] == token.COMMA:
- # need to accept an application that looks like "f(a, b,)"
- i = i + 1
- star_node = dstar_node = None
- while i < len_nodelist:
- tok = nodelist[i]
- ch = nodelist[i+1]
- i = i + 3
- if tok[0]==token.STAR:
- if star_node is not None:
- raise SyntaxError, 'already have the varargs indentifier'
- star_node = self.com_node(ch)
- elif tok[0]==token.DOUBLESTAR:
- if dstar_node is not None:
- raise SyntaxError, 'already have the kwargs indentifier'
- dstar_node = self.com_node(ch)
- else:
- raise SyntaxError, 'unknown node type: %s' % tok
- return CallFunc(primaryNode, args, star_node, dstar_node,
- lineno=extractLineNo(nodelist))
-
- def com_argument(self, nodelist, kw):
- if len(nodelist) == 3 and nodelist[2][0] == symbol.gen_for:
- test = self.com_node(nodelist[1])
- return 0, self.com_generator_expression(test, nodelist[2])
- if len(nodelist) == 2:
- if kw:
- raise SyntaxError, "non-keyword arg after keyword arg"
- return 0, self.com_node(nodelist[1])
- result = self.com_node(nodelist[3])
- n = nodelist[1]
- while len(n) == 2 and n[0] != token.NAME:
- n = n[1]
- if n[0] != token.NAME:
- raise SyntaxError, "keyword can't be an expression (%s)"%n[0]
- node = Keyword(n[1], result, lineno=n[2])
- return 1, node
-
- def com_subscriptlist(self, primary, nodelist, assigning):
- # slicing: simple_slicing | extended_slicing
- # simple_slicing: primary "[" short_slice "]"
- # extended_slicing: primary "[" slice_list "]"
- # slice_list: slice_item ("," slice_item)* [","]
-
- # backwards compat slice for '[i:j]'
- if len(nodelist) == 2:
- sub = nodelist[1]
- if (sub[1][0] == token.COLON or \
- (len(sub) > 2 and sub[2][0] == token.COLON)) and \
- sub[-1][0] != symbol.sliceop:
- return self.com_slice(primary, sub, assigning)
-
- subscripts = []
- for i in range(1, len(nodelist), 2):
- subscripts.append(self.com_subscript(nodelist[i]))
- return Subscript(primary, assigning, subscripts,
- lineno=extractLineNo(nodelist))
-
- def com_subscript(self, node):
- # slice_item: expression | proper_slice
- ch = node[1]
- t = ch[0]
- if t == token.COLON or len(node) > 2:
- return self.com_sliceobj(node)
- return self.com_node(ch)
-
- def com_sliceobj(self, node):
- # proper_slice: short_slice | long_slice
- # short_slice: [lower_bound] ":" [upper_bound]
- # long_slice: short_slice ":" [stride]
- # lower_bound: expression
- # upper_bound: expression
- # stride: expression
- #
- # Note: a stride may be further slicing...
-
- items = []
-
- if node[1][0] == token.COLON:
- items.append(Const(None))
- i = 2
- else:
- items.append(self.com_node(node[1]))
- # i == 2 is a COLON
- i = 3
-
- if i < len(node) and node[i][0] == symbol.test:
- items.append(self.com_node(node[i]))
- i = i + 1
- else:
- items.append(Const(None))
-
- # a short_slice has been built. look for long_slice now by looking
- # for strides...
- for j in range(i, len(node)):
- ch = node[j]
- if len(ch) == 2:
- items.append(Const(None))
- else:
- items.append(self.com_node(ch[2]))
- return Sliceobj(items, lineno=extractLineNo(node))
-
- def com_slice(self, primary, node, assigning):
- # short_slice: [lower_bound] ":" [upper_bound]
- lower = upper = None
- if len(node) == 3:
- if node[1][0] == token.COLON:
- upper = self.com_node(node[2])
- else:
- lower = self.com_node(node[1])
- elif len(node) == 4:
- lower = self.com_node(node[1])
- upper = self.com_node(node[3])
- return Slice(primary, assigning, lower, upper,
- lineno=extractLineNo(node))
-
- def get_docstring(self, node, n=None):
- if n is None:
- n = node[0]
- node = node[1:]
- if n == symbol.suite:
- if len(node) == 1:
- return self.get_docstring(node[0])
- for sub in node:
- if sub[0] == symbol.stmt:
- return self.get_docstring(sub)
- return None
- if n == symbol.file_input:
- for sub in node:
- if sub[0] == symbol.stmt:
- return self.get_docstring(sub)
- return None
- if n == symbol.atom:
- if node[0][0] == token.STRING:
- s = ''
- for t in node:
- s = s + eval(t[1])
- return s
- return None
- if n == symbol.stmt or n == symbol.simple_stmt \
- or n == symbol.small_stmt:
- return self.get_docstring(node[0])
- if n in _doc_nodes and len(node) == 1:
- return self.get_docstring(node[0])
- return None
-
-
-_doc_nodes = [
- symbol.expr_stmt,
- symbol.testlist,
- symbol.testlist_safe,
- symbol.test,
- symbol.or_test,
- symbol.and_test,
- symbol.not_test,
- symbol.comparison,
- symbol.expr,
- symbol.xor_expr,
- symbol.and_expr,
- symbol.shift_expr,
- symbol.arith_expr,
- symbol.term,
- symbol.factor,
- symbol.power,
- ]
-
-# comp_op: '<' | '>' | '=' | '>=' | '<=' | '!=' | '=='
-# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
-_cmp_types = {
- token.LESS : '<',
- token.GREATER : '>',
- token.EQEQUAL : '==',
- token.EQUAL : '==',
- token.LESSEQUAL : '<=',
- token.GREATEREQUAL : '>=',
- token.NOTEQUAL : '!=',
- }
-
-_legal_node_types = [
- symbol.funcdef,
- symbol.classdef,
- symbol.stmt,
- symbol.small_stmt,
- symbol.flow_stmt,
- symbol.simple_stmt,
- symbol.compound_stmt,
- symbol.expr_stmt,
- symbol.del_stmt,
- symbol.pass_stmt,
- symbol.break_stmt,
- symbol.continue_stmt,
- symbol.return_stmt,
- symbol.raise_stmt,
- symbol.import_stmt,
- symbol.global_stmt,
- symbol.assert_stmt,
- symbol.if_stmt,
- symbol.while_stmt,
- symbol.for_stmt,
- symbol.try_stmt,
- symbol.with_stmt,
- symbol.suite,
- symbol.testlist,
- symbol.testlist_safe,
- symbol.test,
- symbol.and_test,
- symbol.not_test,
- symbol.comparison,
- symbol.exprlist,
- symbol.expr,
- symbol.xor_expr,
- symbol.and_expr,
- symbol.shift_expr,
- symbol.arith_expr,
- symbol.term,
- symbol.factor,
- symbol.power,
- symbol.atom,
- ]
-
-if hasattr(symbol, 'yield_stmt'):
- _legal_node_types.append(symbol.yield_stmt)
-if hasattr(symbol, 'yield_expr'):
- _legal_node_types.append(symbol.yield_expr)
-
-_assign_types = [
- symbol.test,
- symbol.or_test,
- symbol.and_test,
- symbol.not_test,
- symbol.comparison,
- symbol.expr,
- symbol.xor_expr,
- symbol.and_expr,
- symbol.shift_expr,
- symbol.arith_expr,
- symbol.term,
- symbol.factor,
- ]
-
-_names = {}
-for k, v in symbol.sym_name.items():
- _names[k] = v
-for k, v in token.tok_name.items():
- _names[k] = v
-
-def debug_tree(tree):
- l = []
- for elt in tree:
- if isinstance(elt, int):
- l.append(_names.get(elt, elt))
- elif isinstance(elt, str):
- l.append(elt)
- else:
- l.append(debug_tree(elt))
- return l
diff --git a/Lib/compiler/visitor.py b/Lib/compiler/visitor.py
deleted file mode 100644
index 99c6716..0000000
--- a/Lib/compiler/visitor.py
+++ /dev/null
@@ -1,113 +0,0 @@
-from compiler import ast
-
-# XXX should probably rename ASTVisitor to ASTWalker
-# XXX can it be made even more generic?
-
-class ASTVisitor:
- """Performs a depth-first walk of the AST
-
- The ASTVisitor will walk the AST, performing either a preorder or
- postorder traversal depending on which method is called.
-
- methods:
- preorder(tree, visitor)
- postorder(tree, visitor)
- tree: an instance of ast.Node
- visitor: an instance with visitXXX methods
-
- The ASTVisitor is responsible for walking over the tree in the
- correct order. For each node, it checks the visitor argument for
- a method named 'visitNodeType' where NodeType is the name of the
- node's class, e.g. Class. If the method exists, it is called
- with the node as its sole argument.
-
- The visitor method for a particular node type can control how
- child nodes are visited during a preorder walk. (It can't control
- the order during a postorder walk, because it is called _after_
- the walk has occurred.) The ASTVisitor modifies the visitor
- argument by adding a visit method to the visitor; this method can
- be used to visit a child node of arbitrary type.
- """
-
- VERBOSE = 0
-
- def __init__(self):
- self.node = None
- self._cache = {}
-
- def default(self, node, *args):
- for child in node.getChildNodes():
- self.dispatch(child, *args)
-
- def dispatch(self, node, *args):
- self.node = node
- klass = node.__class__
- meth = self._cache.get(klass, None)
- if meth is None:
- className = klass.__name__
- meth = getattr(self.visitor, 'visit' + className, self.default)
- self._cache[klass] = meth
-## if self.VERBOSE > 0:
-## className = klass.__name__
-## if self.VERBOSE == 1:
-## if meth == 0:
-## print "dispatch", className
-## else:
-## print "dispatch", className, (meth and meth.__name__ or '')
- return meth(node, *args)
-
- def preorder(self, tree, visitor, *args):
- """Do preorder walk of tree using visitor"""
- self.visitor = visitor
- visitor.visit = self.dispatch
- self.dispatch(tree, *args) # XXX *args make sense?
-
-class ExampleASTVisitor(ASTVisitor):
- """Prints examples of the nodes that aren't visited
-
- This visitor-driver is only useful for development, when it's
- helpful to develop a visitor incrementally, and get feedback on what
- you still have to do.
- """
- examples = {}
-
- def dispatch(self, node, *args):
- self.node = node
- meth = self._cache.get(node.__class__, None)
- className = node.__class__.__name__
- if meth is None:
- meth = getattr(self.visitor, 'visit' + className, 0)
- self._cache[node.__class__] = meth
- if self.VERBOSE > 1:
- print("dispatch", className, (meth and meth.__name__ or ''))
- if meth:
- meth(node, *args)
- elif self.VERBOSE > 0:
- klass = node.__class__
- if klass not in self.examples:
- self.examples[klass] = klass
- print()
- print(self.visitor)
- print(klass)
- for attr in dir(node):
- if attr[0] != '_':
- print("\t", "%-12.12s" % attr, getattr(node, attr))
- print()
- return self.default(node, *args)
-
-# XXX this is an API change
-
-_walker = ASTVisitor
-def walk(tree, visitor, walker=None, verbose=None):
- if walker is None:
- walker = _walker()
- if verbose is not None:
- walker.VERBOSE = verbose
- walker.preorder(tree, visitor)
- return walker.visitor
-
-def dumpNode(node):
- print(node.__class__)
- for attr in dir(node):
- if attr[0] != '_':
- print("\t", "%-10.10s" % attr, getattr(node, attr))
diff --git a/Lib/ihooks.py b/Lib/ihooks.py
index 95691b6..733f2db 100644
--- a/Lib/ihooks.py
+++ b/Lib/ihooks.py
@@ -8,7 +8,7 @@ module searching and loading algorithm, and it is possible to replace
the built-in function __import__ in order to change the semantics of
the import statement, until now it has been difficult to combine the
effect of different __import__ hacks, like loading modules from URLs
-by rimport.py, or restricted execution by rexec.py.
+by rimport.py.
This module defines three new concepts:
diff --git a/Lib/imputil.py b/Lib/imputil.py
index 87c31fa..88d288f 100644
--- a/Lib/imputil.py
+++ b/Lib/imputil.py
@@ -674,7 +674,6 @@ def _test_revamp():
# push MAL's mapper into sys.path[0] as a cache (hard-coded for apps)
#
# from Guido:
-# need to change sys.* references for rexec environs
# need hook for MAL's walk-me-up import strategy, or Tim's absolute strategy
# watch out for sys.modules[...] is None
# flag to force absolute imports? (speeds _determine_import_context and
@@ -714,7 +713,7 @@ def _test_revamp():
# > However, we still have a tension occurring here:
# >
# > 1) implementing policy in ImportManager assists in single-point policy
-# > changes for app/rexec situations
+# > changes for app situations
# > 2) implementing policy in Importer assists in package-private policy
# > changes for normal, operating conditions
# >
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 0be0419..ff25be7 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -161,7 +161,6 @@ def isframe(object):
f_lasti index of last attempted instruction in bytecode
f_lineno current line number in Python source code
f_locals local namespace seen by this frame
- f_restricted 0 or 1 if frame is in restricted execution mode
f_trace tracing function for this frame, or None"""
return isinstance(object, types.FrameType)
@@ -674,7 +673,7 @@ def getargs(co):
"""Get information about the arguments accepted by a code object.
Three things are returned: (args, varargs, varkw), where
- 'args' is the list of argument names, possibly containing nested
+ 'args' is the list of argument names, possibly containing nested
lists. Keyword-only arguments are appended. 'varargs' and 'varkw'
are the names of the * and ** arguments or None."""
args, varargs, kwonlyargs, varkw = _getfullargs(co)
@@ -751,7 +750,7 @@ def getargspec(func):
'args' will include keyword-only argument names.
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'defaults' is an n-tuple of the default values of the last n arguments.
-
+
Use the getfullargspec() API for Python-3000 code, as annotations
and keyword arguments are supported. getargspec() will raise ValueError
if the func has either annotations or keyword arguments.
@@ -767,7 +766,7 @@ def getargspec(func):
def getfullargspec(func):
"""Get the names and default values of a function's arguments.
- A tuple of seven things is returned: (args, varargs, kwonlyargs,
+ A tuple of seven things is returned: (args, varargs, kwonlyargs,
kwonlydefaults, varkw, defaults, annotations).
'args' is a list of the argument names (it may contain nested lists).
'varargs' and 'varkw' are the names of the * and ** arguments or None.
@@ -775,7 +774,7 @@ def getfullargspec(func):
'kwonlyargs' is a list of keyword-only argument names.
'kwonlydefaults' is a dictionary mapping names from kwonlyargs to defaults.
'annotations' is a dictionary mapping argument names to annotations.
-
+
The first four items in the tuple correspond to getargspec().
"""
@@ -784,7 +783,7 @@ def getfullargspec(func):
if not isfunction(func):
raise TypeError('arg is not a Python function')
args, varargs, kwonlyargs, varkw = _getfullargs(func.__code__)
- return (args, varargs, varkw, func.__defaults__,
+ return (args, varargs, varkw, func.__defaults__,
kwonlyargs, func.__kwdefaults__, func.__annotations__)
def getargvalues(frame):
@@ -816,12 +815,12 @@ def formatannotation(annotation, base_module=None):
return annotation.__name__
return annotation.__module__+'.'+annotation.__name__
return repr(annotation)
-
+
def formatannotationrelativeto(object):
- module = getattr(object, '__module__', None)
- def _formatannotation(annotation):
- return formatannotation(annotation, module)
- return _formatannotation
+ module = getattr(object, '__module__', None)
+ def _formatannotation(annotation):
+ return formatannotation(annotation, module)
+ return _formatannotation
def formatargspec(args, varargs=None, varkw=None, defaults=None,
kwonlyargs=(), kwonlydefaults={}, annotations={},
@@ -832,7 +831,7 @@ def formatargspec(args, varargs=None, varkw=None, defaults=None,
formatreturns=lambda text: ' -> ' + text,
formatannotation=formatannotation,
join=joinseq):
- """Format an argument spec from the values returned by getargspec
+ """Format an argument spec from the values returned by getargspec
or getfullargspec.
The first seven arguments are (args, varargs, varkw, defaults,
diff --git a/Lib/md5.py b/Lib/md5.py
deleted file mode 100644
index bbe1984..0000000
--- a/Lib/md5.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# $Id$
-#
-# Copyright (C) 2005 Gregory P. Smith (greg@electricrain.com)
-# Licensed to PSF under a Contributor Agreement.
-
-from hashlib import md5
-new = md5
-
-blocksize = 1 # legacy value (wrong in any useful sense)
-digest_size = 16
diff --git a/Lib/new.py b/Lib/new.py
index bee11ed..5559f6e 100644
--- a/Lib/new.py
+++ b/Lib/new.py
@@ -8,9 +8,4 @@ from types import ClassType as classobj
from types import FunctionType as function
from types import MethodType as instancemethod
from types import ModuleType as module
-
-# CodeType is not accessible in restricted execution mode
-try:
- from types import CodeType as code
-except ImportError:
- pass
+from types import CodeType as code
diff --git a/Lib/pickle.py b/Lib/pickle.py
index 62658cb..c93352f 100644
--- a/Lib/pickle.py
+++ b/Lib/pickle.py
@@ -1009,14 +1009,9 @@ class Unpickler:
if (not args and
type(klass) is ClassType and
not hasattr(klass, "__getinitargs__")):
- try:
- value = _EmptyClass()
- value.__class__ = klass
- instantiated = 1
- except RuntimeError:
- # In restricted execution, assignment to inst.__class__ is
- # prohibited
- pass
+ value = _EmptyClass()
+ value.__class__ = klass
+ instantiated = 1
if not instantiated:
try:
value = klass(*args)
@@ -1184,20 +1179,7 @@ class Unpickler:
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if state:
- try:
- inst.__dict__.update(state)
- except RuntimeError:
- # XXX In restricted execution, the instance's __dict__
- # is not accessible. Use the old way of unpickling
- # the instance variables. This is a semantic
- # difference when unpickling in restricted
- # vs. unrestricted modes.
- # Note, however, that cPickle has never tried to do the
- # .update() business, and always uses
- # PyObject_SetItem(inst.__dict__, key, value) in a
- # loop over state.items().
- for k, v in state.items():
- setattr(inst, k, v)
+ inst.__dict__.update(state)
if slotstate:
for k, v in slotstate.items():
setattr(inst, k, v)
diff --git a/Lib/pickletools.py b/Lib/pickletools.py
index c5c45eb..b2c9599 100644
--- a/Lib/pickletools.py
+++ b/Lib/pickletools.py
@@ -1562,13 +1562,6 @@ opcodes = [
the object is updated via
anyobject.__dict__.update(argument)
-
- This may raise RuntimeError in restricted execution mode (which
- disallows access to __dict__ directly); in that case, the object
- is updated instead via
-
- for k, v in argument.items():
- anyobject[k] = v
"""),
I(name='INST',
@@ -1604,11 +1597,7 @@ opcodes = [
calling __init__() is current wisdom). In this case, an instance of
an old-style dummy class is created, and then we try to rebind its
__class__ attribute to the desired class object. If this succeeds,
- the new instance object is pushed on the stack, and we're done. In
- restricted execution mode it can fail (assignment to __class__ is
- disallowed), and I'm not really sure what happens then -- it looks
- like the code ends up calling the class object's __init__ anyway,
- via falling into the next case.
+ the new instance object is pushed on the stack, and we're done.
Else (the argtuple is not empty, it's not an old-style class object,
or the class object does have a __getinitargs__ attribute), the code
diff --git a/Lib/plat-irix6/jpeg.py b/Lib/plat-irix6/jpeg.py
deleted file mode 100644
index 0b52031..0000000
--- a/Lib/plat-irix6/jpeg.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Implement 'jpeg' interface using SGI's compression library
-
-# XXX Options 'smooth' and 'optimize' are ignored.
-
-# XXX It appears that compressing grayscale images doesn't work right;
-# XXX the resulting file causes weirdness.
-
-class error(Exception):
- pass
-
-options = {'quality': 75, 'optimize': 0, 'smooth': 0, 'forcegray': 0}
-
-comp = None
-decomp = None
-
-def compress(imgdata, width, height, bytesperpixel):
- global comp
- import cl
- if comp is None: comp = cl.OpenCompressor(cl.JPEG)
- if bytesperpixel == 1:
- format = cl.GRAYSCALE
- elif bytesperpixel == 4:
- format = cl.RGBX
- if options['forcegray']:
- iformat = cl.GRAYSCALE
- else:
- iformat = cl.YUV
- # XXX How to support 'optimize'?
- params = [cl.IMAGE_WIDTH, width, cl.IMAGE_HEIGHT, height,
- cl.ORIGINAL_FORMAT, format,
- cl.ORIENTATION, cl.BOTTOM_UP,
- cl.QUALITY_FACTOR, options['quality'],
- cl.INTERNAL_FORMAT, iformat,
- ]
- comp.SetParams(params)
- jpegdata = comp.Compress(1, imgdata)
- return jpegdata
-
-def decompress(jpegdata):
- global decomp
- import cl
- if decomp is None: decomp = cl.OpenDecompressor(cl.JPEG)
- headersize = decomp.ReadHeader(jpegdata)
- params = [cl.IMAGE_WIDTH, 0, cl.IMAGE_HEIGHT, 0, cl.INTERNAL_FORMAT, 0]
- decomp.GetParams(params)
- width, height, format = params[1], params[3], params[5]
- if format == cl.GRAYSCALE or options['forcegray']:
- format = cl.GRAYSCALE
- bytesperpixel = 1
- else:
- format = cl.RGBX
- bytesperpixel = 4
- # XXX How to support 'smooth'?
- params = [cl.ORIGINAL_FORMAT, format,
- cl.ORIENTATION, cl.BOTTOM_UP,
- cl.FRAME_BUFFER_SIZE, width*height*bytesperpixel]
- decomp.SetParams(params)
- imgdata = decomp.Decompress(1, jpegdata)
- return imgdata, width, height, bytesperpixel
-
-def setoption(name, value):
- if type(value) is not type(0):
- raise TypeError, 'jpeg.setoption: numeric options only'
- if name == 'forcegrey':
- name = 'forcegray'
- if not options.has_key(name):
- raise KeyError, 'jpeg.setoption: unknown option name'
- options[name] = int(value)
-
-def test():
- import sys
- if sys.argv[1:2] == ['-g']:
- del sys.argv[1]
- setoption('forcegray', 1)
- if not sys.argv[1:]:
- sys.argv.append('/usr/local/images/data/jpg/asterix.jpg')
- for file in sys.argv[1:]:
- show(file)
-
-def show(file):
- import gl, GL, DEVICE
- jpegdata = open(file, 'r').read()
- imgdata, width, height, bytesperpixel = decompress(jpegdata)
- gl.foreground()
- gl.prefsize(width, height)
- win = gl.winopen(file)
- if bytesperpixel == 1:
- gl.cmode()
- gl.pixmode(GL.PM_SIZE, 8)
- gl.gconfig()
- for i in range(256):
- gl.mapcolor(i, i, i, i)
- else:
- gl.RGBmode()
- gl.pixmode(GL.PM_SIZE, 32)
- gl.gconfig()
- gl.qdevice(DEVICE.REDRAW)
- gl.qdevice(DEVICE.ESCKEY)
- gl.qdevice(DEVICE.WINQUIT)
- gl.qdevice(DEVICE.WINSHUT)
- gl.lrectwrite(0, 0, width-1, height-1, imgdata)
- while 1:
- dev, val = gl.qread()
- if dev in (DEVICE.ESCKEY, DEVICE.WINSHUT, DEVICE.WINQUIT):
- break
- if dev == DEVICE.REDRAW:
- gl.lrectwrite(0, 0, width-1, height-1, imgdata)
- gl.winclose(win)
- # Now test the compression and write the result to a fixed filename
- newjpegdata = compress(imgdata, width, height, bytesperpixel)
- open('/tmp/j.jpg', 'w').write(newjpegdata)
diff --git a/Lib/plat-irix6/panel.py b/Lib/plat-irix6/panel.py
deleted file mode 100644
index 5a0d87e..0000000
--- a/Lib/plat-irix6/panel.py
+++ /dev/null
@@ -1,281 +0,0 @@
-# Module 'panel'
-#
-# Support for the Panel library.
-# Uses built-in module 'pnl'.
-# Applications should use 'panel.function' instead of 'pnl.function';
-# most 'pnl' functions are transparently exported by 'panel',
-# but dopanel() is overridden and you have to use this version
-# if you want to use callbacks.
-
-
-import pnl
-
-
-debug = 0
-
-
-# Test if an object is a list.
-#
-def is_list(x):
- return type(x) == type([])
-
-
-# Reverse a list.
-#
-def reverse(list):
- res = []
- for item in list:
- res.insert(0, item)
- return res
-
-
-# Get an attribute of a list, which may itself be another list.
-# Don't use 'prop' for name.
-#
-def getattrlist(list, name):
- for item in list:
- if item and is_list(item) and item[0] == name:
- return item[1:]
- return []
-
-
-# Get a property of a list, which may itself be another list.
-#
-def getproplist(list, name):
- for item in list:
- if item and is_list(item) and item[0] == 'prop':
- if len(item) > 1 and item[1] == name:
- return item[2:]
- return []
-
-
-# Test if an actuator description contains the property 'end-of-group'
-#
-def is_endgroup(list):
- x = getproplist(list, 'end-of-group')
- return (x and x[0] == '#t')
-
-
-# Neatly display an actuator definition given as S-expression
-# the prefix string is printed before each line.
-#
-def show_actuator(prefix, a):
- for item in a:
- if not is_list(item):
- print(prefix, item)
- elif item and item[0] == 'al':
- print(prefix, 'Subactuator list:')
- for a in item[1:]:
- show_actuator(prefix + ' ', a)
- elif len(item) == 2:
- print(prefix, item[0], '=>', item[1])
- elif len(item) == 3 and item[0] == 'prop':
- print(prefix, 'Prop', item[1], '=>', end=' ')
- print(item[2])
- else:
- print(prefix, '?', item)
-
-
-# Neatly display a panel.
-#
-def show_panel(prefix, p):
- for item in p:
- if not is_list(item):
- print(prefix, item)
- elif item and item[0] == 'al':
- print(prefix, 'Actuator list:')
- for a in item[1:]:
- show_actuator(prefix + ' ', a)
- elif len(item) == 2:
- print(prefix, item[0], '=>', item[1])
- elif len(item) == 3 and item[0] == 'prop':
- print(prefix, 'Prop', item[1], '=>', end=' ')
- print(item[2])
- else:
- print(prefix, '?', item)
-
-
-# Exception raised by build_actuator or build_panel.
-#
-panel_error = 'panel error'
-
-
-# Dummy callback used to initialize the callbacks.
-#
-def dummy_callback(arg):
- pass
-
-
-# Assign attributes to members of the target.
-# Attribute names in exclist are ignored.
-# The member name is the attribute name prefixed with the prefix.
-#
-def assign_members(target, attrlist, exclist, prefix):
- for item in attrlist:
- if is_list(item) and len(item) == 2 and item[0] not in exclist:
- name, value = item[0], item[1]
- ok = 1
- if value[0] in '-0123456789':
- value = eval(value)
- elif value[0] == '"':
- value = value[1:-1]
- elif value == 'move-then-resize':
- # Strange default set by Panel Editor...
- ok = 0
- else:
- print('unknown value', value, 'for', name)
- ok = 0
- if ok:
- lhs = 'target.' + prefix + name
- stmt = lhs + '=' + repr(value)
- if debug: print('exec', stmt)
- try:
- exec(stmt + '\n')
- except KeyboardInterrupt: # Don't catch this!
- raise KeyboardInterrupt
- except:
- print('assign failed:', stmt)
-
-
-# Build a real actuator from an actuator description.
-# Return a pair (actuator, name).
-#
-def build_actuator(descr):
- namelist = getattrlist(descr, 'name')
- if namelist:
- # Assume it is a string
- actuatorname = namelist[0][1:-1]
- else:
- actuatorname = ''
- type = descr[0]
- if type[:4] == 'pnl_': type = type[4:]
- act = pnl.mkact(type)
- act.downfunc = act.activefunc = act.upfunc = dummy_callback
- #
- assign_members(act, descr[1:], ['al', 'data', 'name'], '')
- #
- # Treat actuator-specific data
- #
- datalist = getattrlist(descr, 'data')
- prefix = ''
- if type[-4:] == 'puck':
- prefix = 'puck_'
- elif type == 'mouse':
- prefix = 'mouse_'
- assign_members(act, datalist, [], prefix)
- #
- return act, actuatorname
-
-
-# Build all sub-actuators and add them to the super-actuator.
-# The super-actuator must already have been added to the panel.
-# Sub-actuators with defined names are added as members to the panel
-# so they can be referenced as p.name.
-#
-# Note: I have no idea how panel.endgroup() works when applied
-# to a sub-actuator.
-#
-def build_subactuators(panel, super_act, al):
- #
- # This is nearly the same loop as below in build_panel(),
- # except a call is made to addsubact() instead of addact().
- #
- for a in al:
- act, name = build_actuator(a)
- act.addsubact(super_act)
- if name:
- stmt = 'panel.' + name + ' = act'
- if debug: print('exec', stmt)
- exec(stmt + '\n')
- if is_endgroup(a):
- panel.endgroup()
- sub_al = getattrlist(a, 'al')
- if sub_al:
- build_subactuators(panel, act, sub_al)
- #
- # Fix the actuator to which whe just added subactuators.
- # This can't hurt (I hope) and is needed for the scroll actuator.
- #
- super_act.fixact()
-
-
-# Build a real panel from a panel definition.
-# Return a panel object p, where for each named actuator a, p.name is a
-# reference to a.
-#
-def build_panel(descr):
- #
- # Sanity check
- #
- if (not descr) or descr[0] != 'panel':
- raise panel_error, 'panel description must start with "panel"'
- #
- if debug: show_panel('', descr)
- #
- # Create an empty panel
- #
- panel = pnl.mkpanel()
- #
- # Assign panel attributes
- #
- assign_members(panel, descr[1:], ['al'], '')
- #
- # Look for actuator list
- #
- al = getattrlist(descr, 'al')
- #
- # The order in which actuators are created is important
- # because of the endgroup() operator.
- # Unfortunately the Panel Editor outputs the actuator list
- # in reverse order, so we reverse it here.
- #
- al = reverse(al)
- #
- for a in al:
- act, name = build_actuator(a)
- act.addact(panel)
- if name:
- stmt = 'panel.' + name + ' = act'
- exec(stmt + '\n')
- if is_endgroup(a):
- panel.endgroup()
- sub_al = getattrlist(a, 'al')
- if sub_al:
- build_subactuators(panel, act, sub_al)
- #
- return panel
-
-
-# Wrapper around pnl.dopanel() which calls call-back functions.
-#
-def my_dopanel():
- # Extract only the first 4 elements to allow for future expansion
- a, down, active, up = pnl.dopanel()[:4]
- if down:
- down.downfunc(down)
- if active:
- active.activefunc(active)
- if up:
- up.upfunc(up)
- return a
-
-
-# Create one or more panels from a description file (S-expressions)
-# generated by the Panel Editor.
-#
-def defpanellist(file):
- import panelparser
- descrlist = panelparser.parse_file(open(file, 'r'))
- panellist = []
- for descr in descrlist:
- panellist.append(build_panel(descr))
- return panellist
-
-
-# Import everything from built-in method pnl, so the user can always
-# use panel.foo() instead of pnl.foo().
-# This gives *no* performance penalty once this module is imported.
-#
-from pnl import * # for export
-
-dopanel = my_dopanel # override pnl.dopanel
diff --git a/Lib/plat-irix6/panelparser.py b/Lib/plat-irix6/panelparser.py
deleted file mode 100644
index c831c49..0000000
--- a/Lib/plat-irix6/panelparser.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Module 'parser'
-#
-# Parse S-expressions output by the Panel Editor
-# (which is written in Scheme so it can't help writing S-expressions).
-#
-# See notes at end of file.
-
-
-whitespace = ' \t\n'
-operators = '()\''
-separators = operators + whitespace + ';' + '"'
-
-
-# Tokenize a string.
-# Return a list of tokens (strings).
-#
-def tokenize_string(s):
- tokens = []
- while s:
- c = s[:1]
- if c in whitespace:
- s = s[1:]
- elif c == ';':
- s = ''
- elif c == '"':
- n = len(s)
- i = 1
- while i < n:
- c = s[i]
- i = i+1
- if c == '"': break
- if c == '\\': i = i+1
- tokens.append(s[:i])
- s = s[i:]
- elif c in operators:
- tokens.append(c)
- s = s[1:]
- else:
- n = len(s)
- i = 1
- while i < n:
- if s[i] in separators: break
- i = i+1
- tokens.append(s[:i])
- s = s[i:]
- return tokens
-
-
-# Tokenize a whole file (given as file object, not as file name).
-# Return a list of tokens (strings).
-#
-def tokenize_file(fp):
- tokens = []
- while 1:
- line = fp.readline()
- if not line: break
- tokens = tokens + tokenize_string(line)
- return tokens
-
-
-# Exception raised by parse_exr.
-#
-syntax_error = 'syntax error'
-
-
-# Parse an S-expression.
-# Input is a list of tokens as returned by tokenize_*().
-# Return a pair (expr, tokens)
-# where expr is a list representing the s-expression,
-# and tokens contains the remaining tokens.
-# May raise syntax_error.
-#
-def parse_expr(tokens):
- if (not tokens) or tokens[0] != '(':
- raise syntax_error, 'expected "("'
- tokens = tokens[1:]
- expr = []
- while 1:
- if not tokens:
- raise syntax_error, 'missing ")"'
- if tokens[0] == ')':
- return expr, tokens[1:]
- elif tokens[0] == '(':
- subexpr, tokens = parse_expr(tokens)
- expr.append(subexpr)
- else:
- expr.append(tokens[0])
- tokens = tokens[1:]
-
-
-# Parse a file (given as file object, not as file name).
-# Return a list of parsed S-expressions found at the top level.
-#
-def parse_file(fp):
- tokens = tokenize_file(fp)
- exprlist = []
- while tokens:
- expr, tokens = parse_expr(tokens)
- exprlist.append(expr)
- return exprlist
-
-
-# EXAMPLE:
-#
-# The input
-# '(hip (hop hur-ray))'
-#
-# passed to tokenize_string() returns the token list
-# ['(', 'hip', '(', 'hop', 'hur-ray', ')', ')']
-#
-# When this is passed to parse_expr() it returns the expression
-# ['hip', ['hop', 'hur-ray']]
-# plus an empty token list (because there are no tokens left.
-#
-# When a file containing the example is passed to parse_file() it returns
-# a list whose only element is the output of parse_expr() above:
-# [['hip', ['hop', 'hur-ray']]]
-
-
-# TOKENIZING:
-#
-# Comments start with semicolon (;) and continue till the end of the line.
-#
-# Tokens are separated by whitespace, except the following characters
-# always form a separate token (outside strings):
-# ( ) '
-# Strings are enclosed in double quotes (") and backslash (\) is used
-# as escape character in strings.
diff --git a/Lib/rexec.py b/Lib/rexec.py
deleted file mode 100644
index c4ce1d0..0000000
--- a/Lib/rexec.py
+++ /dev/null
@@ -1,585 +0,0 @@
-"""Restricted execution facilities.
-
-The class RExec exports methods r_exec(), r_eval(), r_execfile(), and
-r_import(), which correspond roughly to the built-in operations
-exec, eval(), execfile() and import, but executing the code in an
-environment that only exposes those built-in operations that are
-deemed safe. To this end, a modest collection of 'fake' modules is
-created which mimics the standard modules by the same names. It is a
-policy decision which built-in modules and operations are made
-available; this module provides a reasonable default, but derived
-classes can change the policies e.g. by overriding or extending class
-variables like ok_builtin_modules or methods like make_sys().
-
-XXX To do:
-- r_open should allow writing tmp dir
-- r_exec etc. with explicit globals/locals? (Use rexec("exec ... in ...")?)
-
-"""
-
-
-import sys
-import __builtin__
-import os
-import ihooks
-import imp
-
-__all__ = ["RExec"]
-
-class FileBase:
-
- ok_file_methods = ('fileno', 'flush', 'isatty', 'read', 'readline',
- 'readlines', 'seek', 'tell', 'write', 'writelines',
- '__iter__')
-
-
-class FileWrapper(FileBase):
-
- # XXX This is just like a Bastion -- should use that!
-
- def __init__(self, f):
- for m in self.ok_file_methods:
- if not hasattr(self, m) and hasattr(f, m):
- setattr(self, m, getattr(f, m))
-
- def close(self):
- self.flush()
-
-
-TEMPLATE = """
-def %s(self, *args):
- return getattr(self.mod, self.name).%s(*args)
-"""
-
-class FileDelegate(FileBase):
-
- def __init__(self, mod, name):
- self.mod = mod
- self.name = name
-
- for m in FileBase.ok_file_methods + ('close',):
- exec(TEMPLATE % (m, m))
-
-
-class RHooks(ihooks.Hooks):
-
- def __init__(self, *args):
- # Hacks to support both old and new interfaces:
- # old interface was RHooks(rexec[, verbose])
- # new interface is RHooks([verbose])
- verbose = 0
- rexec = None
- if args and type(args[-1]) == type(0):
- verbose = args[-1]
- args = args[:-1]
- if args and hasattr(args[0], '__class__'):
- rexec = args[0]
- args = args[1:]
- if args:
- raise TypeError, "too many arguments"
- ihooks.Hooks.__init__(self, verbose)
- self.rexec = rexec
-
- def set_rexec(self, rexec):
- # Called by RExec instance to complete initialization
- self.rexec = rexec
-
- def get_suffixes(self):
- return self.rexec.get_suffixes()
-
- def is_builtin(self, name):
- return self.rexec.is_builtin(name)
-
- def init_builtin(self, name):
- m = __import__(name)
- return self.rexec.copy_except(m, ())
-
- def init_frozen(self, name): raise SystemError, "don't use this"
- def load_source(self, *args): raise SystemError, "don't use this"
- def load_compiled(self, *args): raise SystemError, "don't use this"
- def load_package(self, *args): raise SystemError, "don't use this"
-
- def load_dynamic(self, name, filename, file):
- return self.rexec.load_dynamic(name, filename, file)
-
- def add_module(self, name):
- return self.rexec.add_module(name)
-
- def modules_dict(self):
- return self.rexec.modules
-
- def default_path(self):
- return self.rexec.modules['sys'].path
-
-
-# XXX Backwards compatibility
-RModuleLoader = ihooks.FancyModuleLoader
-RModuleImporter = ihooks.ModuleImporter
-
-
-class RExec(ihooks._Verbose):
- """Basic restricted execution framework.
-
- Code executed in this restricted environment will only have access to
- modules and functions that are deemed safe; you can subclass RExec to
- add or remove capabilities as desired.
-
- The RExec class can prevent code from performing unsafe operations like
- reading or writing disk files, or using TCP/IP sockets. However, it does
- not protect against code using extremely large amounts of memory or
- processor time.
-
- """
-
- ok_path = tuple(sys.path) # That's a policy decision
-
- ok_builtin_modules = ('audioop', 'array', 'binascii',
- 'cmath', 'errno', 'imageop',
- 'marshal', 'math', 'md5', 'operator',
- 'parser', 'select',
- 'sha', '_sre', 'strop', 'struct', 'time',
- '_weakref')
-
- ok_posix_names = ('error', 'fstat', 'listdir', 'lstat', 'readlink',
- 'stat', 'times', 'uname', 'getpid', 'getppid',
- 'getcwd', 'getuid', 'getgid', 'geteuid', 'getegid')
-
- ok_sys_names = ('byteorder', 'copyright', 'exit', 'getdefaultencoding',
- 'getrefcount', 'hexversion', 'maxint', 'maxunicode',
- 'platform', 'ps1', 'ps2', 'version', 'version_info')
-
- nok_builtin_names = ('open', 'file', 'reload', '__import__')
-
- ok_file_types = (imp.C_EXTENSION, imp.PY_SOURCE)
-
- def __init__(self, hooks = None, verbose = 0):
- """Returns an instance of the RExec class.
-
- The hooks parameter is an instance of the RHooks class or a subclass
- of it. If it is omitted or None, the default RHooks class is
- instantiated.
-
- Whenever the RExec module searches for a module (even a built-in one)
- or reads a module's code, it doesn't actually go out to the file
- system itself. Rather, it calls methods of an RHooks instance that
- was passed to or created by its constructor. (Actually, the RExec
- object doesn't make these calls --- they are made by a module loader
- object that's part of the RExec object. This allows another level of
- flexibility, which can be useful when changing the mechanics of
- import within the restricted environment.)
-
- By providing an alternate RHooks object, we can control the file
- system accesses made to import a module, without changing the
- actual algorithm that controls the order in which those accesses are
- made. For instance, we could substitute an RHooks object that
- passes all filesystem requests to a file server elsewhere, via some
- RPC mechanism such as ILU. Grail's applet loader uses this to support
- importing applets from a URL for a directory.
-
- If the verbose parameter is true, additional debugging output may be
- sent to standard output.
-
- """
-
- raise RuntimeError, "This code is not secure in Python 2.2 and later"
-
- ihooks._Verbose.__init__(self, verbose)
- # XXX There's a circular reference here:
- self.hooks = hooks or RHooks(verbose)
- self.hooks.set_rexec(self)
- self.modules = {}
- self.ok_dynamic_modules = self.ok_builtin_modules
- list = []
- for mname in self.ok_builtin_modules:
- if mname in sys.builtin_module_names:
- list.append(mname)
- self.ok_builtin_modules = tuple(list)
- self.set_trusted_path()
- self.make_builtin()
- self.make_initial_modules()
- # make_sys must be last because it adds the already created
- # modules to its builtin_module_names
- self.make_sys()
- self.loader = RModuleLoader(self.hooks, verbose)
- self.importer = RModuleImporter(self.loader, verbose)
-
- def set_trusted_path(self):
- # Set the path from which dynamic modules may be loaded.
- # Those dynamic modules must also occur in ok_builtin_modules
- self.trusted_path = filter(os.path.isabs, sys.path)
-
- def load_dynamic(self, name, filename, file):
- if name not in self.ok_dynamic_modules:
- raise ImportError, "untrusted dynamic module: %s" % name
- if name in sys.modules:
- src = sys.modules[name]
- else:
- src = imp.load_dynamic(name, filename, file)
- dst = self.copy_except(src, [])
- return dst
-
- def make_initial_modules(self):
- self.make_main()
- self.make_osname()
-
- # Helpers for RHooks
-
- def get_suffixes(self):
- return [item # (suff, mode, type)
- for item in imp.get_suffixes()
- if item[2] in self.ok_file_types]
-
- def is_builtin(self, mname):
- return mname in self.ok_builtin_modules
-
- # The make_* methods create specific built-in modules
-
- def make_builtin(self):
- m = self.copy_except(__builtin__, self.nok_builtin_names)
- m.__import__ = self.r_import
- m.reload = self.r_reload
- m.open = m.file = self.r_open
-
- def make_main(self):
- m = self.add_module('__main__')
-
- def make_osname(self):
- osname = os.name
- src = __import__(osname)
- dst = self.copy_only(src, self.ok_posix_names)
- dst.environ = e = {}
- for key, value in os.environ.items():
- e[key] = value
-
- def make_sys(self):
- m = self.copy_only(sys, self.ok_sys_names)
- m.modules = self.modules
- m.argv = ['RESTRICTED']
- m.path = map(None, self.ok_path)
- m.exc_info = self.r_exc_info
- m = self.modules['sys']
- l = self.modules.keys() + list(self.ok_builtin_modules)
- l.sort()
- m.builtin_module_names = tuple(l)
-
- # The copy_* methods copy existing modules with some changes
-
- def copy_except(self, src, exceptions):
- dst = self.copy_none(src)
- for name in dir(src):
- setattr(dst, name, getattr(src, name))
- for name in exceptions:
- try:
- delattr(dst, name)
- except AttributeError:
- pass
- return dst
-
- def copy_only(self, src, names):
- dst = self.copy_none(src)
- for name in names:
- try:
- value = getattr(src, name)
- except AttributeError:
- continue
- setattr(dst, name, value)
- return dst
-
- def copy_none(self, src):
- m = self.add_module(src.__name__)
- m.__doc__ = src.__doc__
- return m
-
- # Add a module -- return an existing module or create one
-
- def add_module(self, mname):
- m = self.modules.get(mname)
- if m is None:
- self.modules[mname] = m = self.hooks.new_module(mname)
- m.__builtins__ = self.modules['__builtin__']
- return m
-
- # The r* methods are public interfaces
-
- def r_exec(self, code):
- """Execute code within a restricted environment.
-
- The code parameter must either be a string containing one or more
- lines of Python code, or a compiled code object, which will be
- executed in the restricted environment's __main__ module.
-
- """
- m = self.add_module('__main__')
- exec(code, m.__dict__)
-
- def r_eval(self, code):
- """Evaluate code within a restricted environment.
-
- The code parameter must either be a string containing a Python
- expression, or a compiled code object, which will be evaluated in
- the restricted environment's __main__ module. The value of the
- expression or code object will be returned.
-
- """
- m = self.add_module('__main__')
- return eval(code, m.__dict__)
-
- def r_execfile(self, file):
- """Execute the Python code in the file in the restricted
- environment's __main__ module.
-
- """
- m = self.add_module('__main__')
- execfile(file, m.__dict__)
-
- def r_import(self, mname, globals={}, locals={}, fromlist=[]):
- """Import a module, raising an ImportError exception if the module
- is considered unsafe.
-
- This method is implicitly called by code executing in the
- restricted environment. Overriding this method in a subclass is
- used to change the policies enforced by a restricted environment.
-
- """
- return self.importer.import_module(mname, globals, locals, fromlist)
-
- def r_reload(self, m):
- """Reload the module object, re-parsing and re-initializing it.
-
- This method is implicitly called by code executing in the
- restricted environment. Overriding this method in a subclass is
- used to change the policies enforced by a restricted environment.
-
- """
- return self.importer.reload(m)
-
- def r_unload(self, m):
- """Unload the module.
-
- Removes it from the restricted environment's sys.modules dictionary.
-
- This method is implicitly called by code executing in the
- restricted environment. Overriding this method in a subclass is
- used to change the policies enforced by a restricted environment.
-
- """
- return self.importer.unload(m)
-
- # The s_* methods are similar but also swap std{in,out,err}
-
- def make_delegate_files(self):
- s = self.modules['sys']
- self.delegate_stdin = FileDelegate(s, 'stdin')
- self.delegate_stdout = FileDelegate(s, 'stdout')
- self.delegate_stderr = FileDelegate(s, 'stderr')
- self.restricted_stdin = FileWrapper(sys.stdin)
- self.restricted_stdout = FileWrapper(sys.stdout)
- self.restricted_stderr = FileWrapper(sys.stderr)
-
- def set_files(self):
- if not hasattr(self, 'save_stdin'):
- self.save_files()
- if not hasattr(self, 'delegate_stdin'):
- self.make_delegate_files()
- s = self.modules['sys']
- s.stdin = self.restricted_stdin
- s.stdout = self.restricted_stdout
- s.stderr = self.restricted_stderr
- sys.stdin = self.delegate_stdin
- sys.stdout = self.delegate_stdout
- sys.stderr = self.delegate_stderr
-
- def reset_files(self):
- self.restore_files()
- s = self.modules['sys']
- self.restricted_stdin = s.stdin
- self.restricted_stdout = s.stdout
- self.restricted_stderr = s.stderr
-
-
- def save_files(self):
- self.save_stdin = sys.stdin
- self.save_stdout = sys.stdout
- self.save_stderr = sys.stderr
-
- def restore_files(self):
- sys.stdin = self.save_stdin
- sys.stdout = self.save_stdout
- sys.stderr = self.save_stderr
-
- def s_apply(self, func, args=(), kw={}):
- self.save_files()
- try:
- self.set_files()
- r = func(*args, **kw)
- finally:
- self.restore_files()
- return r
-
- def s_exec(self, *args):
- """Execute code within a restricted environment.
-
- Similar to the r_exec() method, but the code will be granted access
- to restricted versions of the standard I/O streams sys.stdin,
- sys.stderr, and sys.stdout.
-
- The code parameter must either be a string containing one or more
- lines of Python code, or a compiled code object, which will be
- executed in the restricted environment's __main__ module.
-
- """
- return self.s_apply(self.r_exec, args)
-
- def s_eval(self, *args):
- """Evaluate code within a restricted environment.
-
- Similar to the r_eval() method, but the code will be granted access
- to restricted versions of the standard I/O streams sys.stdin,
- sys.stderr, and sys.stdout.
-
- The code parameter must either be a string containing a Python
- expression, or a compiled code object, which will be evaluated in
- the restricted environment's __main__ module. The value of the
- expression or code object will be returned.
-
- """
- return self.s_apply(self.r_eval, args)
-
- def s_execfile(self, *args):
- """Execute the Python code in the file in the restricted
- environment's __main__ module.
-
- Similar to the r_execfile() method, but the code will be granted
- access to restricted versions of the standard I/O streams sys.stdin,
- sys.stderr, and sys.stdout.
-
- """
- return self.s_apply(self.r_execfile, args)
-
- def s_import(self, *args):
- """Import a module, raising an ImportError exception if the module
- is considered unsafe.
-
- This method is implicitly called by code executing in the
- restricted environment. Overriding this method in a subclass is
- used to change the policies enforced by a restricted environment.
-
- Similar to the r_import() method, but has access to restricted
- versions of the standard I/O streams sys.stdin, sys.stderr, and
- sys.stdout.
-
- """
- return self.s_apply(self.r_import, args)
-
- def s_reload(self, *args):
- """Reload the module object, re-parsing and re-initializing it.
-
- This method is implicitly called by code executing in the
- restricted environment. Overriding this method in a subclass is
- used to change the policies enforced by a restricted environment.
-
- Similar to the r_reload() method, but has access to restricted
- versions of the standard I/O streams sys.stdin, sys.stderr, and
- sys.stdout.
-
- """
- return self.s_apply(self.r_reload, args)
-
- def s_unload(self, *args):
- """Unload the module.
-
- Removes it from the restricted environment's sys.modules dictionary.
-
- This method is implicitly called by code executing in the
- restricted environment. Overriding this method in a subclass is
- used to change the policies enforced by a restricted environment.
-
- Similar to the r_unload() method, but has access to restricted
- versions of the standard I/O streams sys.stdin, sys.stderr, and
- sys.stdout.
-
- """
- return self.s_apply(self.r_unload, args)
-
- # Restricted open(...)
-
- def r_open(self, file, mode='r', buf=-1):
- """Method called when open() is called in the restricted environment.
-
- The arguments are identical to those of the open() function, and a
- file object (or a class instance compatible with file objects)
- should be returned. RExec's default behaviour is allow opening
- any file for reading, but forbidding any attempt to write a file.
-
- This method is implicitly called by code executing in the
- restricted environment. Overriding this method in a subclass is
- used to change the policies enforced by a restricted environment.
-
- """
- mode = str(mode)
- if mode not in ('r', 'rb'):
- raise IOError, "can't open files for writing in restricted mode"
- return open(file, mode, buf)
-
- # Restricted version of sys.exc_info()
-
- def r_exc_info(self):
- ty, va, tr = sys.exc_info()
- tr = None
- return ty, va, tr
-
-
-def test():
- import getopt, traceback
- opts, args = getopt.getopt(sys.argv[1:], 'vt:')
- verbose = 0
- trusted = []
- for o, a in opts:
- if o == '-v':
- verbose = verbose+1
- if o == '-t':
- trusted.append(a)
- r = RExec(verbose=verbose)
- if trusted:
- r.ok_builtin_modules = r.ok_builtin_modules + tuple(trusted)
- if args:
- r.modules['sys'].argv = args
- r.modules['sys'].path.insert(0, os.path.dirname(args[0]))
- else:
- r.modules['sys'].path.insert(0, "")
- fp = sys.stdin
- if args and args[0] != '-':
- try:
- fp = open(args[0])
- except IOError as msg:
- print("%s: can't open file %r" % (sys.argv[0], args[0]))
- return 1
- if fp.isatty():
- try:
- import readline
- except ImportError:
- pass
- import code
- class RestrictedConsole(code.InteractiveConsole):
- def runcode(self, co):
- self.locals['__builtins__'] = r.modules['__builtin__']
- r.s_apply(code.InteractiveConsole.runcode, (self, co))
- try:
- RestrictedConsole(r.modules['__main__'].__dict__).interact()
- except SystemExit as n:
- return n
- else:
- text = fp.read()
- fp.close()
- c = compile(text, fp.name, 'exec')
- try:
- r.s_exec(c)
- except SystemExit as n:
- return n
- except:
- traceback.print_exc()
- return 1
-
-
-if __name__ == '__main__':
- sys.exit(test())
diff --git a/Lib/test/test___all__.py b/Lib/test/test___all__.py
index 6003733..7a39ca3 100644
--- a/Lib/test/test___all__.py
+++ b/Lib/test/test___all__.py
@@ -35,7 +35,6 @@ class AllTest(unittest.TestCase):
import _socket
self.check_all("BaseHTTPServer")
- self.check_all("Bastion")
self.check_all("CGIHTTPServer")
self.check_all("ConfigParser")
self.check_all("Cookie")
@@ -124,7 +123,6 @@ class AllTest(unittest.TestCase):
self.check_all("random")
self.check_all("re")
self.check_all("repr")
- self.check_all("rexec")
self.check_all("rfc822")
self.check_all("rlcompleter")
self.check_all("robotparser")
diff --git a/Lib/test/test_bastion.py b/Lib/test/test_bastion.py
deleted file mode 100644
index 4760ec8..0000000
--- a/Lib/test/test_bastion.py
+++ /dev/null
@@ -1,3 +0,0 @@
-##import Bastion
-##
-##Bastion._test()
diff --git a/Lib/test/test_compiler.py b/Lib/test/test_compiler.py
deleted file mode 100644
index c55dc0e..0000000
--- a/Lib/test/test_compiler.py
+++ /dev/null
@@ -1,265 +0,0 @@
-import compiler
-from compiler.ast import flatten
-import os, sys, time, unittest
-import test.test_support
-from random import random
-
-# How much time in seconds can pass before we print a 'Still working' message.
-_PRINT_WORKING_MSG_INTERVAL = 5 * 60
-
-class TrivialContext(object):
- def __enter__(self):
- return self
- def __exit__(self, *exc_info):
- pass
-
-class CompilerTest(unittest.TestCase):
-
- def testCompileLibrary(self):
- # A simple but large test. Compile all the code in the
- # standard library and its test suite. This doesn't verify
- # that any of the code is correct, merely the compiler is able
- # to generate some kind of code for it.
-
- next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
- libdir = os.path.dirname(unittest.__file__)
- testdir = os.path.dirname(test.test_support.__file__)
-
- for dir in [libdir, testdir]:
- for basename in os.listdir(dir):
- # Print still working message since this test can be really slow
- if next_time <= time.time():
- next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
- print(' testCompileLibrary still working, be patient...', file=sys.__stdout__)
- sys.__stdout__.flush()
-
- if not basename.endswith(".py"):
- continue
- if not TEST_ALL and random() < 0.98:
- continue
- path = os.path.join(dir, basename)
- if test.test_support.verbose:
- print("compiling", path)
- f = open(path, "U")
- buf = f.read()
- f.close()
- if "badsyntax" in basename or "bad_coding" in basename:
- self.assertRaises(SyntaxError, compiler.compile,
- buf, basename, "exec")
- else:
- try:
- compiler.compile(buf, basename, "exec")
- except Exception as e:
- args = list(e.args) or [""]
- args[0] = "%s [in file %s]" % (args[0], basename)
- e.args = tuple(args)
- raise
-
- def testNewClassSyntax(self):
- compiler.compile("class foo():pass\n\n","<string>","exec")
-
- def testYieldExpr(self):
- compiler.compile("def g(): yield\n\n", "<string>", "exec")
-
- def testTryExceptFinally(self):
- # Test that except and finally clauses in one try stmt are recognized
- c = compiler.compile("try:\n 1/0\nexcept:\n e = 1\nfinally:\n f = 1",
- "<string>", "exec")
- dct = {}
- exec(c, dct)
- self.assertEquals(dct.get('e'), 1)
- self.assertEquals(dct.get('f'), 1)
-
- def testDefaultArgs(self):
- self.assertRaises(SyntaxError, compiler.parse, "def foo(a=1, b): pass")
-
- def testDocstrings(self):
- c = compiler.compile('"doc"', '<string>', 'exec')
- self.assert_('__doc__' in c.co_names)
- c = compiler.compile('def f():\n "doc"', '<string>', 'exec')
- g = {}
- exec(c, g)
- self.assertEquals(g['f'].__doc__, "doc")
-
- def testLineNo(self):
- # Test that all nodes except Module have a correct lineno attribute.
- filename = __file__
- if filename.endswith((".pyc", ".pyo")):
- filename = filename[:-1]
- tree = compiler.parseFile(filename)
- self.check_lineno(tree)
-
- def check_lineno(self, node):
- try:
- self._check_lineno(node)
- except AssertionError:
- print(node.__class__, node.lineno)
- raise
-
- def _check_lineno(self, node):
- if not node.__class__ in NOLINENO:
- self.assert_(isinstance(node.lineno, int),
- "lineno=%s on %s" % (node.lineno, node.__class__))
- self.assert_(node.lineno > 0,
- "lineno=%s on %s" % (node.lineno, node.__class__))
- for child in node.getChildNodes():
- self.check_lineno(child)
-
- def testFlatten(self):
- self.assertEquals(flatten([1, [2]]), [1, 2])
- self.assertEquals(flatten((1, (2,))), [1, 2])
-
- def testNestedScope(self):
- c = compiler.compile('def g():\n'
- ' a = 1\n'
- ' def f(): return a + 2\n'
- ' return f()\n'
- 'result = g()',
- '<string>',
- 'exec')
- dct = {}
- exec(c, dct)
- self.assertEquals(dct.get('result'), 3)
- c = compiler.compile('def g(a):\n'
- ' def f(): return a + 2\n'
- ' return f()\n'
- 'result = g(1)',
- '<string>',
- 'exec')
- dct = {}
- exec(c, dct)
- self.assertEquals(dct.get('result'), 3)
- c = compiler.compile('def g((a, b)):\n'
- ' def f(): return a + b\n'
- ' return f()\n'
- 'result = g((1, 2))',
- '<string>',
- 'exec')
- dct = {}
- exec(c, dct)
- self.assertEquals(dct.get('result'), 3)
-
- def testGenExp(self):
- c = compiler.compile('list((i,j) for i in range(3) if i < 3'
- ' for j in range(4) if j > 2)',
- '<string>',
- 'eval')
- self.assertEquals(eval(c), [(0, 3), (1, 3), (2, 3)])
-
- def testFuncAnnotations(self):
- testdata = [
- ('def f(a: 1): pass', {'a': 1}),
- ('''def f(a, (b:1, c:2, d), e:3=4, f=5,
- *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass
- ''', {'b': 1, 'c': 2, 'e': 3, 'g': 6, 'h': 7, 'j': 9,
- 'k': 11, 'return': 12}),
- ]
- for sourcecode, expected in testdata:
- # avoid IndentationError: unexpected indent from trailing lines
- sourcecode = sourcecode.rstrip()+'\n'
- c = compiler.compile(sourcecode, '<string>', 'exec')
- dct = {}
- exec(c, dct)
- self.assertEquals(dct['f'].__annotations__, expected)
-
- def testWith(self):
- # SF bug 1638243
- c = compiler.compile('from __future__ import with_statement\n'
- 'def f():\n'
- ' with TrivialContext():\n'
- ' return 1\n'
- 'result = f()',
- '<string>',
- 'exec' )
- dct = {'TrivialContext': TrivialContext}
- exec(c, dct)
- self.assertEquals(dct.get('result'), 1)
-
- def testWithAss(self):
- c = compiler.compile('from __future__ import with_statement\n'
- 'def f():\n'
- ' with TrivialContext() as tc:\n'
- ' return 1\n'
- 'result = f()',
- '<string>',
- 'exec' )
- dct = {'TrivialContext': TrivialContext}
- exec(c, dct)
- self.assertEquals(dct.get('result'), 1)
-
- def testBytesLiteral(self):
- c = compiler.compile("b'foo'", '<string>', 'eval')
- b = eval(c)
-
- c = compiler.compile('def f(b=b"foo"):\n'
- ' b[0] += 1\n'
- ' return b\n'
- 'f(); f(); result = f()\n',
- '<string>',
- 'exec')
- dct = {}
- exec(c, dct)
- self.assertEquals(dct.get('result'), b"ioo")
-
- c = compiler.compile('def f():\n'
- ' b = b"foo"\n'
- ' b[0] += 1\n'
- ' return b\n'
- 'f(); f(); result = f()\n',
- '<string>',
- 'exec')
- dct = {}
- exec(c, dct)
- self.assertEquals(dct.get('result'), b"goo")
-
-NOLINENO = (compiler.ast.Module, compiler.ast.Stmt, compiler.ast.Discard)
-
-###############################################################################
-# code below is just used to trigger some possible errors, for the benefit of
-# testLineNo
-###############################################################################
-
-class Toto:
- """docstring"""
- pass
-
-a, b = 2, 3
-[c, d] = 5, 6
-l = [(x, y) for x, y in zip(range(5), range(5,10))]
-l[0]
-l[3:4]
-d = {'a': 2}
-d = {}
-t = ()
-t = (1, 2)
-l = []
-l = [1, 2]
-if l:
- pass
-else:
- a, b = b, a
-
-try:
- print(yo)
-except:
- yo = 3
-else:
- yo += 3
-
-try:
- a += b
-finally:
- b = 0
-
-from math import *
-
-###############################################################################
-
-def test_main(all=False):
- global TEST_ALL
- TEST_ALL = all or test.test_support.is_resource_enabled("compiler")
- test.test_support.run_unittest(CompilerTest)
-
-if __name__ == "__main__":
- import sys
- test_main('all' in sys.argv)
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index efdf9b0..4b6d734 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -2452,49 +2452,6 @@ def keywords():
raise TestFailed("expected TypeError from bogus keyword "
"argument to %r" % constructor)
-def restricted():
- # XXX This test is disabled because rexec is not deemed safe
- return
- import rexec
- if verbose:
- print("Testing interaction with restricted execution ...")
-
- sandbox = rexec.RExec()
-
- code1 = """f = open(%r, 'w')""" % TESTFN
- code2 = """f = open(%r, 'w')""" % TESTFN
- code3 = """\
-f = open(%r)
-t = type(f) # a sneaky way to get the file() constructor
-f.close()
-f = t(%r, 'w') # rexec can't catch this by itself
-""" % (TESTFN, TESTFN)
-
- f = open(TESTFN, 'w') # Create the file so code3 can find it.
- f.close()
-
- try:
- for code in code1, code2, code3:
- try:
- sandbox.r_exec(code)
- except IOError as msg:
- if str(msg).find("restricted") >= 0:
- outcome = "OK"
- else:
- outcome = "got an exception, but not an expected one"
- else:
- outcome = "expected a restricted-execution exception"
-
- if outcome != "OK":
- raise TestFailed("%s, in %r" % (outcome, code))
-
- finally:
- try:
- import os
- os.unlink(TESTFN)
- except:
- pass
-
def str_subclass_as_dict_key():
if verbose:
print("Testing a str subclass used as dict key ..")
@@ -4173,7 +4130,6 @@ def test_main():
supers()
inherits()
keywords()
- restricted()
str_subclass_as_dict_key()
classic_comparisons()
rich_comparisons()
diff --git a/Lib/test/test_importhooks.py b/Lib/test/test_importhooks.py
index 66c9258..02268ab 100644
--- a/Lib/test/test_importhooks.py
+++ b/Lib/test/test_importhooks.py
@@ -251,7 +251,7 @@ class ImportHooksTestCase(ImportHooksBaseTestCase):
i = ImpWrapper()
sys.meta_path.append(i)
sys.path_hooks.append(ImpWrapper)
- mnames = ("colorsys", "urlparse", "distutils.core", "compiler.misc")
+ mnames = ("colorsys", "urlparse", "distutils.core")
for mname in mnames:
parent = mname.split(".")[0]
for n in list(sys.modules.keys()):
diff --git a/Lib/test/test_md5.py b/Lib/test/test_md5.py
deleted file mode 100644
index 1f08568..0000000
--- a/Lib/test/test_md5.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Testing md5 module
-
-import unittest
-from md5 import md5
-from test import test_support
-
-def hexstr(s):
- import string
- h = string.hexdigits
- r = ''
- for c in s:
- i = ord(c)
- r = r + h[(i >> 4) & 0xF] + h[i & 0xF]
- return r
-
-class MD5_Test(unittest.TestCase):
-
- def md5test(self, s, expected):
- self.assertEqual(hexstr(md5(s).digest()), expected)
- self.assertEqual(md5(s).hexdigest(), expected)
-
- def test_basics(self):
- eq = self.md5test
- eq('', 'd41d8cd98f00b204e9800998ecf8427e')
- eq('a', '0cc175b9c0f1b6a831c399e269772661')
- eq('abc', '900150983cd24fb0d6963f7d28e17f72')
- eq('message digest', 'f96b697d7cb7938d525a2f31aaf161d0')
- eq('abcdefghijklmnopqrstuvwxyz', 'c3fcd3d76192e4007dfb496cca67e13b')
- eq('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
- 'd174ab98d277d9f5a5611c2c9f419d9f')
- eq('12345678901234567890123456789012345678901234567890123456789012345678901234567890',
- '57edf4a22be3c955ac49da2e2107b67a')
-
- def test_hexdigest(self):
- # hexdigest is new with Python 2.0
- m = md5('testing the hexdigest method')
- h = m.hexdigest()
- self.assertEqual(hexstr(m.digest()), h)
-
- def test_large_update(self):
- aas = 'a' * 64
- bees = 'b' * 64
- cees = 'c' * 64
-
- m1 = md5()
- m1.update(aas)
- m1.update(bees)
- m1.update(cees)
-
- m2 = md5()
- m2.update(aas + bees + cees)
- self.assertEqual(m1.digest(), m2.digest())
-
-def test_main():
- test_support.run_unittest(MD5_Test)
-
-if __name__ == '__main__':
- test_main()
diff --git a/Lib/test/test_pep247.py b/Lib/test/test_pep247.py
index cbd071b..4ea747a 100644
--- a/Lib/test/test_pep247.py
+++ b/Lib/test/test_pep247.py
@@ -3,7 +3,7 @@
# hashing algorithms.
#
-import md5, sha, hmac
+import hmac
def check_hash_module(module, key=None):
assert hasattr(module, 'digest_size'), "Must have digest_size"
@@ -45,6 +45,4 @@ def check_hash_module(module, key=None):
if __name__ == '__main__':
- check_hash_module(md5)
- check_hash_module(sha)
check_hash_module(hmac, key='abc')
diff --git a/Lib/test/test_pyclbr.py b/Lib/test/test_pyclbr.py
index 1edda75..749c568 100644
--- a/Lib/test/test_pyclbr.py
+++ b/Lib/test/test_pyclbr.py
@@ -11,10 +11,6 @@ from unittest import TestCase
StaticMethodType = type(staticmethod(lambda: None))
ClassMethodType = type(classmethod(lambda c: None))
-# This next line triggers an error on old versions of pyclbr.
-
-from commands import getstatus
-
# Here we test the python class browser code.
#
# The main function in this suite, 'testModule', compares the output
diff --git a/Lib/test/test_sha.py b/Lib/test/test_sha.py
deleted file mode 100644
index ea224e4..0000000
--- a/Lib/test/test_sha.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Testing sha module (NIST's Secure Hash Algorithm)
-
-# use the three examples from Federal Information Processing Standards
-# Publication 180-1, Secure Hash Standard, 1995 April 17
-# http://www.itl.nist.gov/div897/pubs/fip180-1.htm
-
-import sha
-import unittest
-from test import test_support
-
-
-class SHATestCase(unittest.TestCase):
- def check(self, data, digest):
- # Check digest matches the expected value
- obj = sha.new(data)
- computed = obj.hexdigest()
- self.assert_(computed == digest)
-
- # Verify that the value doesn't change between two consecutive
- # digest operations.
- computed_again = obj.hexdigest()
- self.assert_(computed == computed_again)
-
- # Check hexdigest() output matches digest()'s output
- digest = obj.digest()
- hexd = ""
- for c in digest:
- hexd += '%02x' % ord(c)
- self.assert_(computed == hexd)
-
- def test_case_1(self):
- self.check("abc",
- "a9993e364706816aba3e25717850c26c9cd0d89d")
-
- def test_case_2(self):
- self.check("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
- "84983e441c3bd26ebaae4aa1f95129e5e54670f1")
-
- def test_case_3(self):
- self.check("a" * 1000000,
- "34aa973cd4c4daa4f61eeb2bdbad27316534016f")
-
- def test_case_4(self):
- self.check(chr(0xAA) * 80,
- '4ca0ef38f1794b28a8f8ee110ee79d48ce13be25')
-
-def test_main():
- test_support.run_unittest(SHATestCase)
-
-
-if __name__ == "__main__":
- test_main()
diff --git a/Lib/test/test_sundry.py b/Lib/test/test_sundry.py
index ebbe915..a37aad1 100644
--- a/Lib/test/test_sundry.py
+++ b/Lib/test/test_sundry.py
@@ -49,7 +49,6 @@ import posixfile
import pstats
import py_compile
import pydoc
-import rexec
import rlcompleter
import sched
import smtplib
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index 08c7a88..312050b 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -5,7 +5,7 @@ import os
import shutil
import tempfile
import StringIO
-import md5
+from hashlib import md5
import errno
import unittest
@@ -25,7 +25,7 @@ except ImportError:
bz2 = None
def md5sum(data):
- return md5.new(data).hexdigest()
+ return md5(data).hexdigest()
def path(path):
return test_support.findfile(path)
diff --git a/Lib/test/test_transformer.py b/Lib/test/test_transformer.py
deleted file mode 100644
index 6f1c4f9..0000000
--- a/Lib/test/test_transformer.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import unittest
-from test import test_support
-from compiler import transformer, ast
-from compiler import compile
-
-class Tests(unittest.TestCase):
-
- def testMultipleLHS(self):
- """ Test multiple targets on the left hand side. """
-
- snippets = ['a, b = 1, 2',
- '(a, b) = 1, 2',
- '((a, b), c) = (1, 2), 3']
-
- for s in snippets:
- a = transformer.parse(s)
- assert isinstance(a, ast.Module)
- child1 = a.getChildNodes()[0]
- assert isinstance(child1, ast.Stmt)
- child2 = child1.getChildNodes()[0]
- assert isinstance(child2, ast.Assign)
-
- # This actually tests the compiler, but it's a way to assure the ast
- # is correct
- c = compile(s, '<string>', 'single')
- vals = {}
- exec(c, vals)
- assert vals['a'] == 1
- assert vals['b'] == 2
-
-def test_main():
- test_support.run_unittest(Tests)
-
-if __name__ == "__main__":
- test_main()
diff --git a/Lib/types.py b/Lib/types.py
index 1d90012..1c396fa 100644
--- a/Lib/types.py
+++ b/Lib/types.py
@@ -31,11 +31,7 @@ DictType = DictionaryType = dict
def _f(): pass
FunctionType = type(_f)
LambdaType = type(lambda: None) # Same as FunctionType
-try:
- CodeType = type(_f.__code__)
-except RuntimeError:
- # Execution in restricted environment
- pass
+CodeType = type(_f.__code__)
def _g():
yield 1
@@ -55,14 +51,9 @@ ModuleType = type(sys)
try:
raise TypeError
except TypeError:
- try:
- tb = sys.exc_info()[2]
- TracebackType = type(tb)
- FrameType = type(tb.tb_frame)
- except AttributeError:
- # In the restricted environment, exc_info returns (None, None,
- # None) Then, tb.tb_frame gives an attribute error
- pass
+ tb = sys.exc_info()[2]
+ TracebackType = type(tb)
+ FrameType = type(tb.tb_frame)
tb = None; del tb
SliceType = slice
diff --git a/Lib/uuid.py b/Lib/uuid.py
index 3d4572c..029df51 100644
--- a/Lib/uuid.py
+++ b/Lib/uuid.py
@@ -535,8 +535,8 @@ def uuid1(node=None, clock_seq=None):
def uuid3(namespace, name):
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
- import md5
- hash = md5.md5(namespace.bytes + name).digest()
+ import hashlib
+ hash = hashlib.md5(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=3)
def uuid4():
diff --git a/Misc/NEWS b/Misc/NEWS
index b9c8ef9..e25a613 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -26,6 +26,9 @@ TO DO
Core and Builtins
-----------------
+- Remove the f_restricted attribute from frames. This naturally leads to teh
+ removal of PyEval_GetRestricted() and PyFrame_IsRestricted().
+
- PEP 3132 was accepted. That means that you can do ``a, *b = range(5)``
to assign 0 to a and [1, 2, 3, 4] to b.
@@ -175,11 +178,19 @@ Extension Modules
Library
-------
+- Remove the compiler package. Use of the _ast module and (an eventual) AST ->
+ bytecode mechanism.
+
+- Remove md5 and sha. Both have been deprecated since Python 2.5.
+
+- Remove Bastion and rexec as they have been disabled since Python 2.3 (this
+ also leads to the C API support for restricted execution).
+
- Remove obsolete IRIX modules: al, cd, cl, fl, fm, gl, imgfile, sgi, sv.
-- Remove bsddb185 module it was obsolete.
+- Remove bsddb185 module; it was obsolete.
-- Remove commands.getstatus() it was obsolete.
+- Remove commands.getstatus(); it was obsolete.
- Remove functions in string and strop modules that are also string methods.
diff --git a/Modules/cPickle.c b/Modules/cPickle.c
index 639e68b..79ccb3e 100644
--- a/Modules/cPickle.c
+++ b/Modules/cPickle.c
@@ -2701,21 +2701,8 @@ newPicklerobject(PyObject *file, int proto)
}
}
- if (PyEval_GetRestricted()) {
- /* Restricted execution, get private tables */
- PyObject *m = PyImport_Import(copy_reg_str);
-
- if (m == NULL)
- goto err;
- self->dispatch_table = PyObject_GetAttr(m, dispatch_table_str);
- Py_DECREF(m);
- if (self->dispatch_table == NULL)
- goto err;
- }
- else {
- self->dispatch_table = dispatch_table;
- Py_INCREF(dispatch_table);
- }
+ self->dispatch_table = dispatch_table;
+ Py_INCREF(dispatch_table);
PyObject_GC_Track(self);
return self;
diff --git a/Objects/fileobject.c b/Objects/fileobject.c
index 50b3cd6..e195450 100644
--- a/Objects/fileobject.c
+++ b/Objects/fileobject.c
@@ -207,15 +207,6 @@ open_the_file(PyFileObject *f, char *name, char *mode)
goto cleanup;
}
- /* rexec.py can't stop a user from getting the file() constructor --
- all they have to do is get *any* file object f, and then do
- type(f). Here we prevent them from doing damage with it. */
- if (PyEval_GetRestricted()) {
- PyErr_SetString(PyExc_IOError,
- "file() constructor not accessible in restricted mode");
- f = NULL;
- goto cleanup;
- }
errno = 0;
#ifdef MS_WINDOWS
diff --git a/Objects/frameobject.c b/Objects/frameobject.c
index 4f195ee..f780b3a 100644
--- a/Objects/frameobject.c
+++ b/Objects/frameobject.c
@@ -340,18 +340,12 @@ frame_settrace(PyFrameObject *f, PyObject* v, void *closure)
return 0;
}
-static PyObject *
-frame_getrestricted(PyFrameObject *f, void *closure)
-{
- return PyBool_FromLong(PyFrame_IsRestricted(f));
-}
static PyGetSetDef frame_getsetlist[] = {
{"f_locals", (getter)frame_getlocals, NULL, NULL},
{"f_lineno", (getter)frame_getlineno,
(setter)frame_setlineno, NULL},
{"f_trace", (getter)frame_gettrace, (setter)frame_settrace, NULL},
- {"f_restricted",(getter)frame_getrestricted,NULL, NULL},
{0}
};
diff --git a/Objects/funcobject.c b/Objects/funcobject.c
index d835f89..adbb6d5 100644
--- a/Objects/funcobject.c
+++ b/Objects/funcobject.c
@@ -234,21 +234,9 @@ static PyMemberDef func_memberlist[] = {
{NULL} /* Sentinel */
};
-static int
-restricted(void)
-{
- if (!PyEval_GetRestricted())
- return 0;
- PyErr_SetString(PyExc_RuntimeError,
- "function attributes not accessible in restricted mode");
- return 1;
-}
-
static PyObject *
func_get_dict(PyFunctionObject *op)
{
- if (restricted())
- return NULL;
if (op->func_dict == NULL) {
op->func_dict = PyDict_New();
if (op->func_dict == NULL)
@@ -263,8 +251,6 @@ func_set_dict(PyFunctionObject *op, PyObject *value)
{
PyObject *tmp;
- if (restricted())
- return -1;
/* It is illegal to del f.func_dict */
if (value == NULL) {
PyErr_SetString(PyExc_TypeError,
@@ -287,8 +273,6 @@ func_set_dict(PyFunctionObject *op, PyObject *value)
static PyObject *
func_get_code(PyFunctionObject *op)
{
- if (restricted())
- return NULL;
Py_INCREF(op->func_code);
return op->func_code;
}
@@ -299,8 +283,6 @@ func_set_code(PyFunctionObject *op, PyObject *value)
PyObject *tmp;
Py_ssize_t nfree, nclosure;
- if (restricted())
- return -1;
/* Not legal to del f.func_code or to set it to anything
* other than a code object. */
if (value == NULL || !PyCode_Check(value)) {
@@ -338,8 +320,6 @@ func_set_name(PyFunctionObject *op, PyObject *value)
{
PyObject *tmp;
- if (restricted())
- return -1;
/* Not legal to del f.func_name or to set it to anything
* other than a string object. */
if (value == NULL || !PyString_Check(value)) {
@@ -357,8 +337,6 @@ func_set_name(PyFunctionObject *op, PyObject *value)
static PyObject *
func_get_defaults(PyFunctionObject *op)
{
- if (restricted())
- return NULL;
if (op->func_defaults == NULL) {
Py_INCREF(Py_None);
return Py_None;
@@ -372,8 +350,6 @@ func_set_defaults(PyFunctionObject *op, PyObject *value)
{
PyObject *tmp;
- if (restricted())
- return -1;
/* Legal to del f.func_defaults.
* Can only set func_defaults to NULL or a tuple. */
if (value == Py_None)
@@ -393,8 +369,6 @@ func_set_defaults(PyFunctionObject *op, PyObject *value)
static PyObject *
func_get_kwdefaults(PyFunctionObject *op)
{
- if (restricted())
- return NULL;
if (op->func_kwdefaults == NULL) {
Py_INCREF(Py_None);
return Py_None;
@@ -407,9 +381,6 @@ static int
func_set_kwdefaults(PyFunctionObject *op, PyObject *value)
{
PyObject *tmp;
-
- if (restricted())
- return -1;
if (value == Py_None)
value = NULL;
diff --git a/Objects/methodobject.c b/Objects/methodobject.c
index 862acd1..4204db7 100644
--- a/Objects/methodobject.c
+++ b/Objects/methodobject.c
@@ -158,11 +158,7 @@ static PyObject *
meth_get__self__(PyCFunctionObject *m, void *closure)
{
PyObject *self;
- if (PyEval_GetRestricted()) {
- PyErr_SetString(PyExc_RuntimeError,
- "method.__self__ not accessible in restricted mode");
- return NULL;
- }
+
self = m->m_self;
if (self == NULL)
self = Py_None;
diff --git a/Python/ceval.c b/Python/ceval.c
index 73e7ea9..3bb2178 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -3367,13 +3367,6 @@ PyEval_GetFrame(void)
}
int
-PyEval_GetRestricted(void)
-{
- PyFrameObject *current_frame = PyEval_GetFrame();
- return current_frame == NULL ? 0 : PyFrame_IsRestricted(current_frame);
-}
-
-int
PyEval_MergeCompilerFlags(PyCompilerFlags *cf)
{
PyFrameObject *current_frame = PyEval_GetFrame();
diff --git a/Python/marshal.c b/Python/marshal.c
index 72253ea..d00ac8d 100644
--- a/Python/marshal.c
+++ b/Python/marshal.c
@@ -798,13 +798,7 @@ r_object(RFILE *p)
return v3;
case TYPE_CODE:
- if (PyEval_GetRestricted()) {
- PyErr_SetString(PyExc_RuntimeError,
- "cannot unmarshal code objects in "
- "restricted execution mode");
- return NULL;
- }
- else {
+ {
int argcount;
int kwonlyargcount;
int nlocals;
@@ -823,7 +817,7 @@ r_object(RFILE *p)
v = NULL;
- /* XXX ignore long->int overflows for now */
+ /* XXX ignore long->int overflows for now */
argcount = (int)r_long(p);
kwonlyargcount = (int)r_long(p);
nlocals = (int)r_long(p);
@@ -876,8 +870,8 @@ r_object(RFILE *p)
Py_XDECREF(name);
Py_XDECREF(lnotab);
+ return v;
}
- return v;
default:
/* Bogus data got written, which isn't ideal.
diff --git a/Python/structmember.c b/Python/structmember.c
index 87c1641..b6cf2ac 100644
--- a/Python/structmember.c
+++ b/Python/structmember.c
@@ -54,11 +54,7 @@ PyObject *
PyMember_GetOne(const char *addr, PyMemberDef *l)
{
PyObject *v;
- if ((l->flags & READ_RESTRICTED) &&
- PyEval_GetRestricted()) {
- PyErr_SetString(PyExc_RuntimeError, "restricted attribute");
- return NULL;
- }
+
addr += l->offset;
switch (l->type) {
case T_BYTE:
@@ -167,10 +163,6 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v)
PyErr_SetString(PyExc_AttributeError, "readonly attribute");
return -1;
}
- if ((l->flags & WRITE_RESTRICTED) && PyEval_GetRestricted()) {
- PyErr_SetString(PyExc_RuntimeError, "restricted attribute");
- return -1;
- }
if (v == NULL && l->type != T_OBJECT_EX && l->type != T_OBJECT) {
PyErr_SetString(PyExc_TypeError,
"can't delete numeric/char attribute");
diff --git a/Tools/compiler/ACKS b/Tools/compiler/ACKS
deleted file mode 100644
index 5f97b15..0000000
--- a/Tools/compiler/ACKS
+++ /dev/null
@@ -1,8 +0,0 @@
-Fred L. Drake, Jr.
-Mark Hammond
-Shane Hathaway
-Neil Schemenauer
-Evan Simpson
-Greg Stein
-Bill Tutt
-Moshe Zadka
diff --git a/Tools/compiler/README b/Tools/compiler/README
deleted file mode 100644
index 1885dd9..0000000
--- a/Tools/compiler/README
+++ /dev/null
@@ -1,18 +0,0 @@
-This directory contains support tools for the Python compiler package,
-which is now part of the standard library.
-
-compile.py Demo that compiles a Python module into a .pyc file
- using the pure-Python compiler code.
-
-demo.py Prints the names of all the methods defined in a module,
- as a demonstration of walking through the abstract syntax
- tree produced by the parser.
-
-dumppyc.py Dumps the contents of a .pyc file, printing
- the attributes of the code object followed by a
- code disassembly.
-
-regrtest.py Runs the Python test suite using bytecode generated
- by the pure-Python compiler code instead of the
- builtin compiler.
-
diff --git a/Tools/compiler/ast.txt b/Tools/compiler/ast.txt
deleted file mode 100644
index d458b75..0000000
--- a/Tools/compiler/ast.txt
+++ /dev/null
@@ -1,105 +0,0 @@
-# This file describes the nodes of the AST in ast.py. The module is
-# generated by astgen.py.
-# The descriptions use the following special notation to describe
-# properties of the children:
-# * this child is not a node
-# ! this child is a sequence that contains nodes in it
-# & this child may be set to None
-# = ... a default value for the node constructor (optional args)
-#
-# If you add node types here, please be sure to update the list of
-# Node types in Doc/lib/asttable.tex.
-Module: doc*, node
-Stmt: nodes!
-Decorators: nodes!
-Function: decorators&, name*, arguments!, defaults!, kwonlyargs!, returns&, flags*, doc*, code
-Lambda: arguments!, defaults!, kwonlyargs!, flags*, code
-SimpleArg: name*, annotation&
-NestedArgs: args!
-Kwarg: arg, expr
-Class: name*, bases!, doc*, code
-Pass:
-Break:
-Continue:
-For: assign, list, body, else_&
-While: test, body, else_&
-With: expr, vars&, body
-If: tests!, else_&
-IfExp: test, then, else_
-From: modname*, names*, level*
-Import: names*
-Raise: expr1&, expr2&, expr3&
-TryFinally: body, final
-TryExcept: body, handlers!, else_&
-Return: value
-Yield: value
-Const: value*
-Discard: expr
-AugAssign: node, op*, expr
-Assign: nodes!, expr
-AssTuple: nodes!
-AssList: nodes!
-AssName: name*, flags*
-AssAttr: expr, attrname*, flags*
-ListComp: expr, quals!
-ListCompFor: assign, list, ifs!
-ListCompIf: test
-GenExpr: code
-GenExprInner: expr, quals!
-GenExprFor: assign, iter, ifs!
-GenExprIf: test
-List: nodes!
-Dict: items!
-Set: items!
-Not: expr
-Compare: expr, ops!
-Name: name*
-Global: names*
-Getattr: expr, attrname*
-CallFunc: node, args!, star_args& = None, dstar_args& = None
-Keyword: name*, expr
-Subscript: expr, flags*, subs!
-Sliceobj: nodes!
-Slice: expr, flags*, lower&, upper&
-Assert: test, fail&
-Tuple: nodes!
-Or: nodes!
-And: nodes!
-Bitor: nodes!
-Bitxor: nodes!
-Bitand: nodes!
-LeftShift: (left, right)
-RightShift: (left, right)
-Add: (left, right)
-Sub: (left, right)
-Mul: (left, right)
-Div: (left, right)
-Mod: (left, right)
-Power: (left, right)
-FloorDiv: (left, right)
-UnaryAdd: expr
-UnarySub: expr
-Invert: expr
-
-init(Function):
- self.varargs = self.kwargs = None
- if flags & CO_VARARGS:
- self.varargs = 1
- if flags & CO_VARKEYWORDS:
- self.kwargs = 1
-
-init(Lambda):
- self.varargs = self.kwargs = None
- if flags & CO_VARARGS:
- self.varargs = 1
- if flags & CO_VARKEYWORDS:
- self.kwargs = 1
- self.returns = None
-
-init(GenExpr):
- self.arguments = [SimpleArg('.0', None)]
- self.varargs = self.kwargs = None
- self.kwonlyargs = ()
-
-init(GenExprFor):
- self.is_outmost = False
diff --git a/Tools/compiler/astgen.py b/Tools/compiler/astgen.py
deleted file mode 100644
index 601d2bd..0000000
--- a/Tools/compiler/astgen.py
+++ /dev/null
@@ -1,292 +0,0 @@
-"""Generate ast module from specification
-
-This script generates the ast module from a simple specification,
-which makes it easy to accomodate changes in the grammar. This
-approach would be quite reasonable if the grammar changed often.
-Instead, it is rather complex to generate the appropriate code. And
-the Node interface has changed more often than the grammar.
-"""
-
-import fileinput
-import getopt
-import re
-import sys
-from StringIO import StringIO
-
-SPEC = "ast.txt"
-COMMA = ", "
-
-def load_boilerplate(file):
- f = open(file)
- buf = f.read()
- f.close()
- i = buf.find('### ''PROLOGUE')
- j = buf.find('### ''EPILOGUE')
- pro = buf[i+12:j].strip()
- epi = buf[j+12:].strip()
- return pro, epi
-
-def strip_default(arg):
- """Return the argname from an 'arg = default' string"""
- i = arg.find('=')
- if i == -1:
- return arg
- t = arg[:i].strip()
- return t
-
-P_NODE = 1
-P_OTHER = 2
-P_NESTED = 3
-P_NONE = 4
-
-class NodeInfo:
- """Each instance describes a specific AST node"""
- def __init__(self, name, args):
- self.name = name
- self.args = args.strip()
- self.argnames = self.get_argnames()
- self.argprops = self.get_argprops()
- self.nargs = len(self.argnames)
- self.init = []
-
- def get_argnames(self):
- if '(' in self.args:
- i = self.args.find('(')
- j = self.args.rfind(')')
- args = self.args[i+1:j]
- else:
- args = self.args
- return [strip_default(arg.strip())
- for arg in args.split(',') if arg]
-
- def get_argprops(self):
- """Each argument can have a property like '*' or '!'
-
- XXX This method modifies the argnames in place!
- """
- d = {}
- hardest_arg = P_NODE
- for i in range(len(self.argnames)):
- arg = self.argnames[i]
- if arg.endswith('*'):
- arg = self.argnames[i] = arg[:-1]
- d[arg] = P_OTHER
- hardest_arg = max(hardest_arg, P_OTHER)
- elif arg.endswith('!'):
- arg = self.argnames[i] = arg[:-1]
- d[arg] = P_NESTED
- hardest_arg = max(hardest_arg, P_NESTED)
- elif arg.endswith('&'):
- arg = self.argnames[i] = arg[:-1]
- d[arg] = P_NONE
- hardest_arg = max(hardest_arg, P_NONE)
- else:
- d[arg] = P_NODE
- self.hardest_arg = hardest_arg
-
- if hardest_arg > P_NODE:
- self.args = self.args.replace('*', '')
- self.args = self.args.replace('!', '')
- self.args = self.args.replace('&', '')
-
- return d
-
- def gen_source(self):
- buf = StringIO()
- print("class %s(Node):" % self.name, file=buf)
- self._gen_init(buf)
- print(file=buf)
- self._gen_getChildren(buf)
- print(file=buf)
- self._gen_getChildNodes(buf)
- print(file=buf)
- self._gen_repr(buf)
- buf.seek(0, 0)
- return buf.read()
-
- def _gen_init(self, buf):
- if self.args:
- print(" def __init__(self, %s, lineno=None):" % self.args, file=buf)
- else:
- print(" def __init__(self, lineno=None):", file=buf)
- if self.argnames:
- for name in self.argnames:
- print(" self.%s = %s" % (name, name), file=buf)
- print(" self.lineno = lineno", file=buf)
- # Copy the lines in self.init, indented four spaces. The rstrip()
- # business is to get rid of the four spaces if line happens to be
- # empty, so that reindent.py is happy with the output.
- for line in self.init:
- print((" " + line).rstrip(), file=buf)
-
- def _gen_getChildren(self, buf):
- print(" def getChildren(self):", file=buf)
- if len(self.argnames) == 0:
- print(" return ()", file=buf)
- else:
- if self.hardest_arg < P_NESTED:
- clist = COMMA.join(["self.%s" % c
- for c in self.argnames])
- if self.nargs == 1:
- print(" return %s," % clist, file=buf)
- else:
- print(" return %s" % clist, file=buf)
- else:
- if len(self.argnames) == 1:
- print(" return tuple(flatten(self.%s))" % self.argnames[0], file=buf)
- else:
- print(" children = []", file=buf)
- template = " children.%s(%sself.%s%s)"
- for name in self.argnames:
- if self.argprops[name] == P_NESTED:
- print(template % ("extend", "flatten(",
- name, ")"), file=buf)
- else:
- print(template % ("append", "", name, ""), file=buf)
- print(" return tuple(children)", file=buf)
-
- def _gen_getChildNodes(self, buf):
- print(" def getChildNodes(self):", file=buf)
- if len(self.argnames) == 0:
- print(" return ()", file=buf)
- else:
- if self.hardest_arg < P_NESTED:
- clist = ["self.%s" % c
- for c in self.argnames
- if self.argprops[c] == P_NODE]
- if len(clist) == 0:
- print(" return ()", file=buf)
- elif len(clist) == 1:
- print(" return %s," % clist[0], file=buf)
- else:
- print(" return %s" % COMMA.join(clist), file=buf)
- else:
- print(" nodelist = []", file=buf)
- template = " nodelist.%s(%sself.%s%s)"
- for name in self.argnames:
- if self.argprops[name] == P_NONE:
- tmp = (" if self.%s is not None:\n"
- " nodelist.append(self.%s)")
- print(tmp % (name, name), file=buf)
- elif self.argprops[name] == P_NESTED:
- print(template % ("extend", "flatten_nodes(",
- name, ")"), file=buf)
- elif self.argprops[name] == P_NODE:
- print(template % ("append", "", name, ""), file=buf)
- print(" return tuple(nodelist)", file=buf)
-
- def _gen_repr(self, buf):
- print(" def __repr__(self):", file=buf)
- if self.argnames:
- fmt = COMMA.join(["%s"] * self.nargs)
- if '(' in self.args:
- fmt = '(%s)' % fmt
- vals = ["repr(self.%s)" % name for name in self.argnames]
- vals = COMMA.join(vals)
- if self.nargs == 1:
- vals = vals + ","
- print(' return "%s(%s)" %% (%s)' % \
- (self.name, fmt, vals), file=buf)
- else:
- print(' return "%s()"' % self.name, file=buf)
-
-rx_init = re.compile('init\((.*)\):')
-
-def parse_spec(file):
- classes = {}
- cur = None
- for line in fileinput.input(file):
- if line.strip().startswith('#'):
- continue
- mo = rx_init.search(line)
- if mo is None:
- if cur is None:
- # a normal entry
- try:
- name, args = line.split(':')
- except ValueError:
- continue
- classes[name] = NodeInfo(name, args)
- cur = None
- else:
- # some code for the __init__ method
- cur.init.append(line)
- else:
- # some extra code for a Node's __init__ method
- name = mo.group(1)
- cur = classes[name]
- return sorted(classes.values(), key=lambda n: n.name)
-
-def main():
- prologue, epilogue = load_boilerplate(sys.argv[-1])
- print(prologue)
- print()
- classes = parse_spec(SPEC)
- for info in classes:
- print(info.gen_source())
- print(epilogue)
-
-if __name__ == "__main__":
- main()
- sys.exit(0)
-
-### PROLOGUE
-"""Python abstract syntax node definitions
-
-This file is automatically generated by Tools/compiler/astgen.py
-"""
-from compiler.consts import CO_VARARGS, CO_VARKEYWORDS
-
-def flatten(seq):
- l = []
- for elt in seq:
- t = type(elt)
- if t is tuple or t is list:
- for elt2 in flatten(elt):
- l.append(elt2)
- else:
- l.append(elt)
- return l
-
-def flatten_nodes(seq):
- return [n for n in flatten(seq) if isinstance(n, Node)]
-
-nodes = {}
-
-class Node:
- """Abstract base class for ast nodes."""
- def getChildren(self):
- pass # implemented by subclasses
- def __iter__(self):
- for n in self.getChildren():
- yield n
- def asList(self): # for backwards compatibility
- return self.getChildren()
- def getChildNodes(self):
- pass # implemented by subclasses
-
-class EmptyNode(Node):
- def getChildNodes(self):
- return ()
- def getChildren(self):
- return ()
-
-class Expression(Node):
- # Expression is an artificial node class to support "eval"
- nodes["expression"] = "Expression"
- def __init__(self, node):
- self.node = node
-
- def getChildren(self):
- return self.node,
-
- def getChildNodes(self):
- return self.node,
-
- def __repr__(self):
- return "Expression(%s)" % (repr(self.node))
-
-### EPILOGUE
-for name, obj in globals().items():
- if isinstance(obj, type) and issubclass(obj, Node):
- nodes[name.lower()] = obj
diff --git a/Tools/compiler/compile.py b/Tools/compiler/compile.py
deleted file mode 100644
index 51a9f0c..0000000
--- a/Tools/compiler/compile.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import sys
-import getopt
-
-from compiler import compileFile, visitor
-
-import profile
-
-def main():
- VERBOSE = 0
- DISPLAY = 0
- PROFILE = 0
- CONTINUE = 0
- opts, args = getopt.getopt(sys.argv[1:], 'vqdcp')
- for k, v in opts:
- if k == '-v':
- VERBOSE = 1
- visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
- if k == '-q':
- if sys.platform[:3]=="win":
- f = open('nul', 'wb') # /dev/null fails on Windows...
- else:
- f = open('/dev/null', 'wb')
- sys.stdout = f
- if k == '-d':
- DISPLAY = 1
- if k == '-c':
- CONTINUE = 1
- if k == '-p':
- PROFILE = 1
- if not args:
- print "no files to compile"
- else:
- for filename in args:
- if VERBOSE:
- print filename
- try:
- if PROFILE:
- profile.run('compileFile(%r, %r)' % (filename, DISPLAY),
- filename + ".prof")
- else:
- compileFile(filename, DISPLAY)
-
- except SyntaxError as err:
- print err
- if err.lineno is not None:
- print err.lineno
- if not CONTINUE:
- sys.exit(-1)
-
-if __name__ == "__main__":
- main()
diff --git a/Tools/compiler/demo.py b/Tools/compiler/demo.py
deleted file mode 100755
index 61c54ee..0000000
--- a/Tools/compiler/demo.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#! /usr/bin/env python
-
-"""Print names of all methods defined in module
-
-This script demonstrates use of the visitor interface of the compiler
-package.
-"""
-
-import compiler
-
-class MethodFinder:
- """Print the names of all the methods
-
- Each visit method takes two arguments, the node and its current
- scope. The scope is the name of the current class or None.
- """
-
- def visitClass(self, node, scope=None):
- self.visit(node.code, node.name)
-
- def visitFunction(self, node, scope=None):
- if scope is not None:
- print "%s.%s" % (scope, node.name)
- self.visit(node.code, None)
-
-def main(files):
- mf = MethodFinder()
- for file in files:
- f = open(file)
- buf = f.read()
- f.close()
- ast = compiler.parse(buf)
- compiler.walk(ast, mf)
-
-if __name__ == "__main__":
- import sys
-
- main(sys.argv[1:])
diff --git a/Tools/compiler/dumppyc.py b/Tools/compiler/dumppyc.py
deleted file mode 100755
index 1258cce..0000000
--- a/Tools/compiler/dumppyc.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#! /usr/bin/env python
-
-import marshal
-import os
-import dis
-import types
-
-def dump(obj):
- print obj
- for attr in dir(obj):
- if attr.startswith('co_'):
- val = getattr(obj, attr)
- print "\t", attr, repr(val)
-
-def loadCode(path):
- f = open(path)
- f.read(8)
- co = marshal.load(f)
- f.close()
- return co
-
-def walk(co, match=None):
- if match is None or co.co_name == match:
- dump(co)
- print
- dis.dis(co)
- for obj in co.co_consts:
- if type(obj) == types.CodeType:
- walk(obj, match)
-
-def load(filename, codename=None):
- co = loadCode(filename)
- walk(co, codename)
-
-if __name__ == "__main__":
- import sys
- if len(sys.argv) == 3:
- filename, codename = sys.argv[1:]
- else:
- filename = sys.argv[1]
- codename = None
- if filename.endswith('.py'):
- buf = open(filename).read()
- co = compile(buf, filename, "exec")
- walk(co)
- else:
- load(filename, codename)
diff --git a/Tools/compiler/regrtest.py b/Tools/compiler/regrtest.py
deleted file mode 100644
index d86d746..0000000
--- a/Tools/compiler/regrtest.py
+++ /dev/null
@@ -1,83 +0,0 @@
-"""Run the Python regression test using the compiler
-
-This test runs the standard Python test suite using bytecode generated
-by this compiler instead of by the builtin compiler.
-
-The regression test is run with the interpreter in verbose mode so
-that import problems can be observed easily.
-"""
-
-from compiler import compileFile
-
-import os
-import sys
-import test
-import tempfile
-
-def copy_test_suite():
- dest = tempfile.mkdtemp()
- os.system("cp -r %s/* %s" % (test.__path__[0], dest))
- print "Creating copy of test suite in", dest
- return dest
-
-def copy_library():
- dest = tempfile.mkdtemp()
- libdir = os.path.split(test.__path__[0])[0]
- print "Found standard library in", libdir
- print "Creating copy of standard library in", dest
- os.system("cp -r %s/* %s" % (libdir, dest))
- return dest
-
-def compile_files(dir):
- print "Compiling", dir, "\n\t",
- line_len = 10
- for file in os.listdir(dir):
- base, ext = os.path.splitext(file)
- if ext == '.py':
- source = os.path.join(dir, file)
- line_len = line_len + len(file) + 1
- if line_len > 75:
- print "\n\t",
- line_len = len(source) + 9
- print file,
- try:
- compileFile(source)
- except SyntaxError as err:
- print err
- continue
- # make sure the .pyc file is not over-written
- os.chmod(source + "c", 444)
- elif file == 'CVS':
- pass
- else:
- path = os.path.join(dir, file)
- if os.path.isdir(path):
- print
- print
- compile_files(path)
- print "\t",
- line_len = 10
- print
-
-def run_regrtest(lib_dir):
- test_dir = os.path.join(lib_dir, "test")
- os.chdir(test_dir)
- os.system("PYTHONPATH=%s %s -v regrtest.py" % (lib_dir, sys.executable))
-
-def cleanup(dir):
- os.system("rm -rf %s" % dir)
-
-def raw_input(prompt):
- sys.stdout.write(prompt)
- sys.stdout.flush()
- return sys.stdin.readline()
-
-def main():
- lib_dir = copy_library()
- compile_files(lib_dir)
- run_regrtest(lib_dir)
- raw_input("Cleanup?")
- cleanup(lib_dir)
-
-if __name__ == "__main__":
- main()
diff --git a/Tools/compiler/stacktest.py b/Tools/compiler/stacktest.py
deleted file mode 100644
index 4f4b161..0000000
--- a/Tools/compiler/stacktest.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import compiler
-import dis
-import types
-
-def extract_code_objects(co):
- l = [co]
- for const in co.co_consts:
- if type(const) == types.CodeType:
- l.append(const)
- return l
-
-def compare(a, b):
- if not (a.co_name == "?" or a.co_name.startswith('<lambda')):
- assert a.co_name == b.co_name, (a, b)
- if a.co_stacksize != b.co_stacksize:
- print "stack mismatch %s: %d vs. %d" % (a.co_name,
- a.co_stacksize,
- b.co_stacksize)
- if a.co_stacksize > b.co_stacksize:
- print "good code"
- dis.dis(a)
- print "bad code"
- dis.dis(b)
- assert 0
-
-def main(files):
- for file in files:
- print file
- buf = open(file).read()
- try:
- co1 = compile(buf, file, "exec")
- except SyntaxError:
- print "skipped"
- continue
- co2 = compiler.compile(buf, file, "exec")
- co1l = extract_code_objects(co1)
- co2l = extract_code_objects(co2)
- for a, b in zip(co1l, co2l):
- compare(a, b)
-
-if __name__ == "__main__":
- import sys
- main(sys.argv[1:])