summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Demo/imputil/importers.py320
-rw-r--r--Lib/httplib.py1150
-rw-r--r--Lib/imputil.py975
3 files changed, 1227 insertions, 1218 deletions
diff --git a/Demo/imputil/importers.py b/Demo/imputil/importers.py
index cfb2daf..864ff02 100644
--- a/Demo/imputil/importers.py
+++ b/Demo/imputil/importers.py
@@ -34,70 +34,70 @@ _suffix = '.py' + _suffix_char
_c_suffixes = filter(lambda x: x[2] == imp.C_EXTENSION, imp.get_suffixes())
def _timestamp(pathname):
- "Return the file modification time as a Long."
- try:
- s = os.stat(pathname)
- except OSError:
- return None
- return long(s[8])
+ "Return the file modification time as a Long."
+ try:
+ s = os.stat(pathname)
+ except OSError:
+ return None
+ return long(s[8])
def _fs_import(dir, modname, fqname):
- "Fetch a module from the filesystem."
-
- pathname = os.path.join(dir, modname)
- if os.path.isdir(pathname):
- values = { '__pkgdir__' : pathname, '__path__' : [ pathname ] }
- ispkg = 1
- pathname = os.path.join(pathname, '__init__')
- else:
- values = { }
- ispkg = 0
-
- # look for dynload modules
- for desc in _c_suffixes:
- file = pathname + desc[0]
- try:
- fp = open(file, desc[1])
- except IOError:
- pass
- else:
- module = imp.load_module(fqname, fp, file, desc)
- values['__file__'] = file
- return 0, module, values
-
- t_py = _timestamp(pathname + '.py')
- t_pyc = _timestamp(pathname + _suffix)
- if t_py is None and t_pyc is None:
- return None
- code = None
- if t_py is None or (t_pyc is not None and t_pyc >= t_py):
- file = pathname + _suffix
- f = open(file, 'rb')
- if f.read(4) == imp.get_magic():
- t = struct.unpack('<I', f.read(4))[0]
- if t == t_py:
- code = marshal.load(f)
- f.close()
- if code is None:
- file = pathname + '.py'
- code = _compile(file, t_py)
-
- values['__file__'] = file
- return ispkg, code, values
+ "Fetch a module from the filesystem."
+
+ pathname = os.path.join(dir, modname)
+ if os.path.isdir(pathname):
+ values = { '__pkgdir__' : pathname, '__path__' : [ pathname ] }
+ ispkg = 1
+ pathname = os.path.join(pathname, '__init__')
+ else:
+ values = { }
+ ispkg = 0
+
+ # look for dynload modules
+ for desc in _c_suffixes:
+ file = pathname + desc[0]
+ try:
+ fp = open(file, desc[1])
+ except IOError:
+ pass
+ else:
+ module = imp.load_module(fqname, fp, file, desc)
+ values['__file__'] = file
+ return 0, module, values
+
+ t_py = _timestamp(pathname + '.py')
+ t_pyc = _timestamp(pathname + _suffix)
+ if t_py is None and t_pyc is None:
+ return None
+ code = None
+ if t_py is None or (t_pyc is not None and t_pyc >= t_py):
+ file = pathname + _suffix
+ f = open(file, 'rb')
+ if f.read(4) == imp.get_magic():
+ t = struct.unpack('<I', f.read(4))[0]
+ if t == t_py:
+ code = marshal.load(f)
+ f.close()
+ if code is None:
+ file = pathname + '.py'
+ code = _compile(file, t_py)
+
+ values['__file__'] = file
+ return ispkg, code, values
######################################################################
#
# Simple function-based importer
#
class FuncImporter(imputil.Importer):
- "Importer subclass to use a supplied function rather than method overrides."
- def __init__(self, func):
- self.func = func
- def get_code(self, parent, modname, fqname):
- return self.func(parent, modname, fqname)
+ "Importer subclass to delegate to a function rather than method overrides."
+ def __init__(self, func):
+ self.func = func
+ def get_code(self, parent, modname, fqname):
+ return self.func(parent, modname, fqname)
def install_with(func):
- FuncImporter(func).install()
+ FuncImporter(func).install()
######################################################################
@@ -105,79 +105,79 @@ def install_with(func):
# Base class for archive-based importing
#
class PackageArchiveImporter(imputil.Importer):
- """Importer subclass to import from (file) archives.
-
- This Importer handles imports of the style <archive>.<subfile>, where
- <archive> can be located using a subclass-specific mechanism and the
- <subfile> is found in the archive using a subclass-specific mechanism.
-
- This class defines two hooks for subclasses: one to locate an archive
- (and possibly return some context for future subfile lookups), and one
- to locate subfiles.
- """
-
- def get_code(self, parent, modname, fqname):
- if parent:
- # the Importer._finish_import logic ensures that we handle imports
- # under the top level module (package / archive).
- assert parent.__importer__ == self
-
- # if a parent "package" is provided, then we are importing a sub-file
- # from the archive.
- result = self.get_subfile(parent.__archive__, modname)
- if result is None:
- return None
- if isinstance(result, _TupleType):
- assert len(result) == 2
- return (0,) + result
- return 0, result, {}
-
- # no parent was provided, so the archive should exist somewhere on the
- # default "path".
- archive = self.get_archive(modname)
- if archive is None:
- return None
- return 1, "", {'__archive__':archive}
-
- def get_archive(self, modname):
- """Get an archive of modules.
-
- This method should locate an archive and return a value which can be
- used by get_subfile to load modules from it. The value may be a simple
- pathname, an open file, or a complex object that caches information
- for future imports.
-
- Return None if the archive was not found.
+ """Importer subclass to import from (file) archives.
+
+ This Importer handles imports of the style <archive>.<subfile>, where
+ <archive> can be located using a subclass-specific mechanism and the
+ <subfile> is found in the archive using a subclass-specific mechanism.
+
+ This class defines two hooks for subclasses: one to locate an archive
+ (and possibly return some context for future subfile lookups), and one
+ to locate subfiles.
"""
- raise RuntimeError, "get_archive not implemented"
- def get_subfile(self, archive, modname):
- """Get code from a subfile in the specified archive.
+ def get_code(self, parent, modname, fqname):
+ if parent:
+ # the Importer._finish_import logic ensures that we handle imports
+ # under the top level module (package / archive).
+ assert parent.__importer__ == self
- Given the specified archive (as returned by get_archive()), locate
- and return a code object for the specified module name.
+ # if a parent "package" is provided, then we are importing a
+ # sub-file from the archive.
+ result = self.get_subfile(parent.__archive__, modname)
+ if result is None:
+ return None
+ if isinstance(result, _TupleType):
+ assert len(result) == 2
+ return (0,) + result
+ return 0, result, {}
- A 2-tuple may be returned, consisting of a code object and a dict
- of name/values to place into the target module.
+ # no parent was provided, so the archive should exist somewhere on the
+ # default "path".
+ archive = self.get_archive(modname)
+ if archive is None:
+ return None
+ return 1, "", {'__archive__':archive}
- Return None if the subfile was not found.
- """
- raise RuntimeError, "get_subfile not implemented"
+ def get_archive(self, modname):
+ """Get an archive of modules.
+
+ This method should locate an archive and return a value which can be
+ used by get_subfile to load modules from it. The value may be a simple
+ pathname, an open file, or a complex object that caches information
+ for future imports.
+
+ Return None if the archive was not found.
+ """
+ raise RuntimeError, "get_archive not implemented"
+
+ def get_subfile(self, archive, modname):
+ """Get code from a subfile in the specified archive.
+
+ Given the specified archive (as returned by get_archive()), locate
+ and return a code object for the specified module name.
+
+ A 2-tuple may be returned, consisting of a code object and a dict
+ of name/values to place into the target module.
+
+ Return None if the subfile was not found.
+ """
+ raise RuntimeError, "get_subfile not implemented"
class PackageArchive(PackageArchiveImporter):
- "PackageArchiveImporter subclass that refers to a specific archive."
+ "PackageArchiveImporter subclass that refers to a specific archive."
- def __init__(self, modname, archive_pathname):
- self.__modname = modname
- self.__path = archive_pathname
+ def __init__(self, modname, archive_pathname):
+ self.__modname = modname
+ self.__path = archive_pathname
- def get_archive(self, modname):
- if modname == self.__modname:
- return self.__path
- return None
+ def get_archive(self, modname):
+ if modname == self.__modname:
+ return self.__path
+ return None
- # get_subfile is passed the full pathname of the archive
+ # get_subfile is passed the full pathname of the archive
######################################################################
@@ -185,26 +185,26 @@ class PackageArchive(PackageArchiveImporter):
# Emulate the standard directory-based import mechanism
#
class DirectoryImporter(imputil.Importer):
- "Importer subclass to emulate the standard importer."
+ "Importer subclass to emulate the standard importer."
- def __init__(self, dir):
- self.dir = dir
+ def __init__(self, dir):
+ self.dir = dir
- def get_code(self, parent, modname, fqname):
- if parent:
- dir = parent.__pkgdir__
- else:
- dir = self.dir
+ def get_code(self, parent, modname, fqname):
+ if parent:
+ dir = parent.__pkgdir__
+ else:
+ dir = self.dir
- # Return the module (and other info) if found in the specified
- # directory. Otherwise, return None.
- return _fs_import(dir, modname, fqname)
+ # Return the module (and other info) if found in the specified
+ # directory. Otherwise, return None.
+ return _fs_import(dir, modname, fqname)
- def __repr__(self):
- return '<%s.%s for "%s" at 0x%x>' % (self.__class__.__module__,
- self.__class__.__name__,
- self.dir,
- id(self))
+ def __repr__(self):
+ return '<%s.%s for "%s" at 0x%x>' % (self.__class__.__module__,
+ self.__class__.__name__,
+ self.dir,
+ id(self))
######################################################################
@@ -212,37 +212,37 @@ class DirectoryImporter(imputil.Importer):
# Emulate the standard path-style import mechanism
#
class PathImporter(imputil.Importer):
- def __init__(self, path=sys.path):
- self.path = path
-
- def get_code(self, parent, modname, fqname):
- if parent:
- # we are looking for a module inside of a specific package
- return _fs_import(parent.__pkgdir__, modname, fqname)
-
- # scan sys.path, looking for the requested module
- for dir in self.path:
- if isinstance(dir, _StringType):
- result = _fs_import(dir, modname, fqname)
- if result:
- return result
-
- # not found
- return None
+ def __init__(self, path=sys.path):
+ self.path = path
+
+ def get_code(self, parent, modname, fqname):
+ if parent:
+ # we are looking for a module inside of a specific package
+ return _fs_import(parent.__pkgdir__, modname, fqname)
+
+ # scan sys.path, looking for the requested module
+ for dir in self.path:
+ if isinstance(dir, _StringType):
+ result = _fs_import(dir, modname, fqname)
+ if result:
+ return result
+
+ # not found
+ return None
######################################################################
def _test_dir():
- "Debug/test function to create DirectoryImporters from sys.path."
- imputil.ImportManager().install()
- path = sys.path[:]
- path.reverse()
- for d in path:
- sys.path.insert(0, DirectoryImporter(d))
- sys.path.insert(0, imputil.BuiltinImporter())
+ "Debug/test function to create DirectoryImporters from sys.path."
+ imputil.ImportManager().install()
+ path = sys.path[:]
+ path.reverse()
+ for d in path:
+ sys.path.insert(0, DirectoryImporter(d))
+ sys.path.insert(0, imputil.BuiltinImporter())
def _test_revamp():
- "Debug/test function for the revamped import system."
- imputil.ImportManager().install()
- sys.path.insert(0, PathImporter())
- sys.path.insert(0, imputil.BuiltinImporter())
+ "Debug/test function for the revamped import system."
+ imputil.ImportManager().install()
+ sys.path.insert(0, PathImporter())
+ sys.path.insert(0, imputil.BuiltinImporter())
diff --git a/Lib/httplib.py b/Lib/httplib.py
index 5c0dacd..2a9546f 100644
--- a/Lib/httplib.py
+++ b/Lib/httplib.py
@@ -77,9 +77,9 @@ import string
import mimetools
try:
- from cStringIO import StringIO
+ from cStringIO import StringIO
except ImportError:
- from StringIO import StringIO
+ from StringIO import StringIO
HTTP_PORT = 80
HTTPS_PORT = 443
@@ -93,632 +93,636 @@ _CS_REQ_SENT = 'Request-sent'
class HTTPResponse:
- def __init__(self, sock):
- self.fp = sock.makefile('rb', 0)
-
- self.msg = None
-
- # from the Status-Line of the response
- self.version = _UNKNOWN # HTTP-Version
- self.status = _UNKNOWN # Status-Code
- self.reason = _UNKNOWN # Reason-Phrase
-
- self.chunked = _UNKNOWN # is "chunked" being used?
- self.chunk_left = _UNKNOWN # bytes left to read in current chunk
- self.length = _UNKNOWN # number of bytes left in response
- self.will_close = _UNKNOWN # connection will close at end of response
-
- def begin(self):
- if self.msg is not None:
- # we've already started reading the response
- return
-
- line = self.fp.readline()
- try:
- [version, status, reason] = string.split(line, None, 2)
- except ValueError:
- try:
- [version, status] = string.split(line, None, 1)
- reason = ""
- except ValueError:
- self.close()
- raise BadStatusLine(line)
- if version[:5] != 'HTTP/':
- self.close()
- raise BadStatusLine(line)
-
- self.status = status = int(status)
- self.reason = string.strip(reason)
-
- if version == 'HTTP/1.0':
- self.version = 10
- elif version[:7] == 'HTTP/1.':
- self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1
- else:
- raise UnknownProtocol(version)
-
- self.msg = mimetools.Message(self.fp, 0)
-
- # don't let the msg keep an fp
- self.msg.fp = None
-
- # are we using the chunked-style of transfer encoding?
- tr_enc = self.msg.getheader('transfer-encoding')
- if tr_enc:
- if string.lower(tr_enc) != 'chunked':
- raise UnknownTransferEncoding()
- self.chunked = 1
- self.chunk_left = None
- else:
- self.chunked = 0
-
- # will the connection close at the end of the response?
- conn = self.msg.getheader('connection')
- if conn:
- conn = string.lower(conn)
- # a "Connection: close" will always close the connection. if we
- # don't see that and this is not HTTP/1.1, then the connection will
- # close unless we see a Keep-Alive header.
- self.will_close = string.find(conn, 'close') != -1 or \
- ( self.version != 11 and \
- not self.msg.getheader('keep-alive') )
- else:
- # for HTTP/1.1, the connection will always remain open
- # otherwise, it will remain open IFF we see a Keep-Alive header
- self.will_close = self.version != 11 and \
- not self.msg.getheader('keep-alive')
-
- # do we have a Content-Length?
- # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
- length = self.msg.getheader('content-length')
- if length and not self.chunked:
- self.length = int(length)
- else:
- self.length = None
-
- # does the body have a fixed length? (of zero)
- if (status == 204 or # No Content
- status == 304 or # Not Modified
- 100 <= status < 200): # 1xx codes
- self.length = 0
-
- # if the connection remains open, and we aren't using chunked, and
- # a content-length was not provided, then assume that the connection
- # WILL close.
- if not self.will_close and \
- not self.chunked and \
- self.length is None:
- self.will_close = 1
-
- # if there is no body, then close NOW. read() may never be called, thus
- # we will never mark self as closed.
- if self.length == 0:
- self.close()
-
- def close(self):
- if self.fp:
- self.fp.close()
- self.fp = None
-
- def isclosed(self):
- # NOTE: it is possible that we will not ever call self.close(). This
- # case occurs when will_close is TRUE, length is None, and we
- # read up to the last byte, but NOT past it.
- #
- # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be
- # called, meaning self.isclosed() is meaningful.
- return self.fp is None
-
- def read(self, amt=None):
- if self.fp is None:
- return ''
-
- if self.chunked:
- chunk_left = self.chunk_left
- value = ''
- while 1:
- if chunk_left is None:
- line = self.fp.readline()
- i = string.find(line, ';')
- if i >= 0:
- line = line[:i] # strip chunk-extensions
- chunk_left = string.atoi(line, 16)
- if chunk_left == 0:
- break
- if amt is None:
- value = value + self._safe_read(chunk_left)
- elif amt < chunk_left:
- value = value + self._safe_read(amt)
- self.chunk_left = chunk_left - amt
- return value
- elif amt == chunk_left:
- value = value + self._safe_read(amt)
- self._safe_read(2) # toss the CRLF at the end of the chunk
- self.chunk_left = None
- return value
- else:
- value = value + self._safe_read(chunk_left)
- amt = amt - chunk_left
+ def __init__(self, sock):
+ self.fp = sock.makefile('rb', 0)
+
+ self.msg = None
+
+ # from the Status-Line of the response
+ self.version = _UNKNOWN # HTTP-Version
+ self.status = _UNKNOWN # Status-Code
+ self.reason = _UNKNOWN # Reason-Phrase
- # we read the whole chunk, get another
- self._safe_read(2) # toss the CRLF at the end of the chunk
- chunk_left = None
+ self.chunked = _UNKNOWN # is "chunked" being used?
+ self.chunk_left = _UNKNOWN # bytes left to read in current chunk
+ self.length = _UNKNOWN # number of bytes left in response
+ self.will_close = _UNKNOWN # conn will close at end of response
+
+ def begin(self):
+ if self.msg is not None:
+ # we've already started reading the response
+ return
- # read and discard trailer up to the CRLF terminator
- ### note: we shouldn't have any trailers!
- while 1:
line = self.fp.readline()
- if line == '\r\n':
- break
-
- # we read everything; close the "file"
- self.close()
-
- return value
-
- elif amt is None:
- # unbounded read
- if self.will_close:
- s = self.fp.read()
- else:
- s = self._safe_read(self.length)
- self.close() # we read everything
- return s
-
- if self.length is not None:
- if amt > self.length:
- # clip the read to the "end of response"
- amt = self.length
- self.length = self.length - amt
-
- # we do not use _safe_read() here because this may be a .will_close
- # connection, and the user is reading more bytes than will be provided
- # (for example, reading in 1k chunks)
- s = self.fp.read(amt)
-
- # close our "file" if we know we should
- ### I'm not sure about the len(s) < amt part; we should be safe because
- ### we shouldn't be using non-blocking sockets
- if self.length == 0 or len(s) < amt:
- self.close()
-
- return s
-
- def _safe_read(self, amt):
- """Read the number of bytes requested, compensating for partial reads.
-
- Normally, we have a blocking socket, but a read() can be interrupted
- by a signal (resulting in a partial read).
-
- Note that we cannot distinguish between EOF and an interrupt when zero
- bytes have been read. IncompleteRead() will be raised in this situation.
-
- This function should be used when <amt> bytes "should" be present for
- reading. If the bytes are truly not available (due to EOF), then the
- IncompleteRead exception can be used to detect the problem.
- """
- s = ''
- while amt > 0:
- chunk = self.fp.read(amt)
- if not chunk:
- raise IncompleteRead(s)
- s = s + chunk
- amt = amt - len(chunk)
- return s
+ try:
+ [version, status, reason] = string.split(line, None, 2)
+ except ValueError:
+ try:
+ [version, status] = string.split(line, None, 1)
+ reason = ""
+ except ValueError:
+ self.close()
+ raise BadStatusLine(line)
+ if version[:5] != 'HTTP/':
+ self.close()
+ raise BadStatusLine(line)
+
+ self.status = status = int(status)
+ self.reason = string.strip(reason)
+
+ if version == 'HTTP/1.0':
+ self.version = 10
+ elif version[:7] == 'HTTP/1.':
+ self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1
+ else:
+ raise UnknownProtocol(version)
- def getheader(self, name, default=None):
- if self.msg is None:
- raise ResponseNotReady()
- return self.msg.getheader(name, default)
+ self.msg = mimetools.Message(self.fp, 0)
+
+ # don't let the msg keep an fp
+ self.msg.fp = None
+
+ # are we using the chunked-style of transfer encoding?
+ tr_enc = self.msg.getheader('transfer-encoding')
+ if tr_enc:
+ if string.lower(tr_enc) != 'chunked':
+ raise UnknownTransferEncoding()
+ self.chunked = 1
+ self.chunk_left = None
+ else:
+ self.chunked = 0
+
+ # will the connection close at the end of the response?
+ conn = self.msg.getheader('connection')
+ if conn:
+ conn = string.lower(conn)
+ # a "Connection: close" will always close the connection. if we
+ # don't see that and this is not HTTP/1.1, then the connection will
+ # close unless we see a Keep-Alive header.
+ self.will_close = string.find(conn, 'close') != -1 or \
+ ( self.version != 11 and \
+ not self.msg.getheader('keep-alive') )
+ else:
+ # for HTTP/1.1, the connection will always remain open
+ # otherwise, it will remain open IFF we see a Keep-Alive header
+ self.will_close = self.version != 11 and \
+ not self.msg.getheader('keep-alive')
+
+ # do we have a Content-Length?
+ # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
+ length = self.msg.getheader('content-length')
+ if length and not self.chunked:
+ self.length = int(length)
+ else:
+ self.length = None
+
+ # does the body have a fixed length? (of zero)
+ if (status == 204 or # No Content
+ status == 304 or # Not Modified
+ 100 <= status < 200): # 1xx codes
+ self.length = 0
+
+ # if the connection remains open, and we aren't using chunked, and
+ # a content-length was not provided, then assume that the connection
+ # WILL close.
+ if not self.will_close and \
+ not self.chunked and \
+ self.length is None:
+ self.will_close = 1
+
+ # if there is no body, then close NOW. read() may never be called, thus
+ # we will never mark self as closed.
+ if self.length == 0:
+ self.close()
+
+ def close(self):
+ if self.fp:
+ self.fp.close()
+ self.fp = None
+
+ def isclosed(self):
+ # NOTE: it is possible that we will not ever call self.close(). This
+ # case occurs when will_close is TRUE, length is None, and we
+ # read up to the last byte, but NOT past it.
+ #
+ # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be
+ # called, meaning self.isclosed() is meaningful.
+ return self.fp is None
+
+ def read(self, amt=None):
+ if self.fp is None:
+ return ''
+
+ if self.chunked:
+ chunk_left = self.chunk_left
+ value = ''
+ while 1:
+ if chunk_left is None:
+ line = self.fp.readline()
+ i = string.find(line, ';')
+ if i >= 0:
+ line = line[:i] # strip chunk-extensions
+ chunk_left = string.atoi(line, 16)
+ if chunk_left == 0:
+ break
+ if amt is None:
+ value = value + self._safe_read(chunk_left)
+ elif amt < chunk_left:
+ value = value + self._safe_read(amt)
+ self.chunk_left = chunk_left - amt
+ return value
+ elif amt == chunk_left:
+ value = value + self._safe_read(amt)
+ self._safe_read(2) # toss the CRLF at the end of the chunk
+ self.chunk_left = None
+ return value
+ else:
+ value = value + self._safe_read(chunk_left)
+ amt = amt - chunk_left
+
+ # we read the whole chunk, get another
+ self._safe_read(2) # toss the CRLF at the end of the chunk
+ chunk_left = None
+
+ # read and discard trailer up to the CRLF terminator
+ ### note: we shouldn't have any trailers!
+ while 1:
+ line = self.fp.readline()
+ if line == '\r\n':
+ break
+
+ # we read everything; close the "file"
+ self.close()
+
+ return value
+
+ elif amt is None:
+ # unbounded read
+ if self.will_close:
+ s = self.fp.read()
+ else:
+ s = self._safe_read(self.length)
+ self.close() # we read everything
+ return s
+
+ if self.length is not None:
+ if amt > self.length:
+ # clip the read to the "end of response"
+ amt = self.length
+ self.length = self.length - amt
+
+ # we do not use _safe_read() here because this may be a .will_close
+ # connection, and the user is reading more bytes than will be provided
+ # (for example, reading in 1k chunks)
+ s = self.fp.read(amt)
+
+ # close our "file" if we know we should
+ ### I'm not sure about the len(s) < amt part; we should be safe because
+ ### we shouldn't be using non-blocking sockets
+ if self.length == 0 or len(s) < amt:
+ self.close()
+
+ return s
+
+ def _safe_read(self, amt):
+ """Read the number of bytes requested, compensating for partial reads.
+
+ Normally, we have a blocking socket, but a read() can be interrupted
+ by a signal (resulting in a partial read).
+
+ Note that we cannot distinguish between EOF and an interrupt when zero
+ bytes have been read. IncompleteRead() will be raised in this
+ situation.
+
+ This function should be used when <amt> bytes "should" be present for
+ reading. If the bytes are truly not available (due to EOF), then the
+ IncompleteRead exception can be used to detect the problem.
+ """
+ s = ''
+ while amt > 0:
+ chunk = self.fp.read(amt)
+ if not chunk:
+ raise IncompleteRead(s)
+ s = s + chunk
+ amt = amt - len(chunk)
+ return s
+
+ def getheader(self, name, default=None):
+ if self.msg is None:
+ raise ResponseNotReady()
+ return self.msg.getheader(name, default)
class HTTPConnection:
- _http_vsn = 11
- _http_vsn_str = 'HTTP/1.1'
-
- response_class = HTTPResponse
- default_port = HTTP_PORT
- auto_open = 1
-
- def __init__(self, host, port=None):
- self.sock = None
- self.__response = None
- self.__state = _CS_IDLE
-
- self._set_hostport(host, port)
-
- def _set_hostport(self, host, port):
- if port is None:
- i = string.find(host, ':')
- if i >= 0:
- port = int(host[i+1:])
- host = host[:i]
- else:
- port = self.default_port
- self.host = host
- self.port = port
-
- def connect(self):
- """Connect to the host and port specified in __init__."""
- self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.sock.connect((self.host, self.port))
-
- def close(self):
- """Close the connection to the HTTP server."""
- if self.sock:
- self.sock.close() # close it manually... there may be other refs
- self.sock = None
- if self.__response:
- self.__response.close()
- self.__response = None
- self.__state = _CS_IDLE
-
- def send(self, str):
- """Send `str' to the server."""
- if self.sock is None:
- if self.auto_open:
- self.connect()
- else:
- raise NotConnected()
-
- # send the data to the server. if we get a broken pipe, then close
- # the socket. we want to reconnect when somebody tries to send again.
- #
- # NOTE: we DO propagate the error, though, because we cannot simply
- # ignore the error... the caller will know if they can retry.
- try:
- self.sock.send(str)
- except socket.error, v:
- if v[0] == 32: # Broken pipe
- self.close()
- raise
-
- def putrequest(self, method, url):
- """Send a request to the server.
-
- `method' specifies an HTTP request method, e.g. 'GET'.
- `url' specifies the object being requested, e.g. '/index.html'.
- """
+ _http_vsn = 11
+ _http_vsn_str = 'HTTP/1.1'
+
+ response_class = HTTPResponse
+ default_port = HTTP_PORT
+ auto_open = 1
+
+ def __init__(self, host, port=None):
+ self.sock = None
+ self.__response = None
+ self.__state = _CS_IDLE
+
+ self._set_hostport(host, port)
+
+ def _set_hostport(self, host, port):
+ if port is None:
+ i = string.find(host, ':')
+ if i >= 0:
+ port = int(host[i+1:])
+ host = host[:i]
+ else:
+ port = self.default_port
+ self.host = host
+ self.port = port
+
+ def connect(self):
+ """Connect to the host and port specified in __init__."""
+ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.sock.connect((self.host, self.port))
+
+ def close(self):
+ """Close the connection to the HTTP server."""
+ if self.sock:
+ self.sock.close() # close it manually... there may be other refs
+ self.sock = None
+ if self.__response:
+ self.__response.close()
+ self.__response = None
+ self.__state = _CS_IDLE
+
+ def send(self, str):
+ """Send `str' to the server."""
+ if self.sock is None:
+ if self.auto_open:
+ self.connect()
+ else:
+ raise NotConnected()
+
+ # send the data to the server. if we get a broken pipe, then close
+ # the socket. we want to reconnect when somebody tries to send again.
+ #
+ # NOTE: we DO propagate the error, though, because we cannot simply
+ # ignore the error... the caller will know if they can retry.
+ try:
+ self.sock.send(str)
+ except socket.error, v:
+ if v[0] == 32: # Broken pipe
+ self.close()
+ raise
+
+ def putrequest(self, method, url):
+ """Send a request to the server.
+
+ `method' specifies an HTTP request method, e.g. 'GET'.
+ `url' specifies the object being requested, e.g. '/index.html'.
+ """
+
+ # check if a prior response has been completed
+ if self.__response and self.__response.isclosed():
+ self.__response = None
+
+ #
+ # in certain cases, we cannot issue another request on this connection.
+ # this occurs when:
+ # 1) we are in the process of sending a request. (_CS_REQ_STARTED)
+ # 2) a response to a previous request has signalled that it is going
+ # to close the connection upon completion.
+ # 3) the headers for the previous response have not been read, thus
+ # we cannot determine whether point (2) is true. (_CS_REQ_SENT)
+ #
+ # if there is no prior response, then we can request at will.
+ #
+ # if point (2) is true, then we will have passed the socket to the
+ # response (effectively meaning, "there is no prior response"), and
+ # will open a new one when a new request is made.
+ #
+ # Note: if a prior response exists, then we *can* start a new request.
+ # We are not allowed to begin fetching the response to this new
+ # request, however, until that prior response is complete.
+ #
+ if self.__state == _CS_IDLE:
+ self.__state = _CS_REQ_STARTED
+ else:
+ raise CannotSendRequest()
+
+ if not url:
+ url = '/'
+ str = '%s %s %s\r\n' % (method, url, self._http_vsn_str)
+
+ try:
+ self.send(str)
+ except socket.error, v:
+ # trap 'Broken pipe' if we're allowed to automatically reconnect
+ if v[0] != 32 or not self.auto_open:
+ raise
+ # try one more time (the socket was closed; this will reopen)
+ self.send(str)
+
+ if self._http_vsn == 11:
+ # Issue some standard headers for better HTTP/1.1 compliance
+
+ # this header is issued *only* for HTTP/1.1 connections. more
+ # specifically, this means it is only issued when the client uses
+ # the new HTTPConnection() class. backwards-compat clients will
+ # be using HTTP/1.0 and those clients may be issuing this header
+ # themselves. we should NOT issue it twice; some web servers (such
+ # as Apache) barf when they see two Host: headers
+ self.putheader('Host', self.host)
+
+ # note: we are assuming that clients will not attempt to set these
+ # headers since *this* library must deal with the
+ # consequences. this also means that when the supporting
+ # libraries are updated to recognize other forms, then this
+ # code should be changed (removed or updated).
+
+ # we only want a Content-Encoding of "identity" since we don't
+ # support encodings such as x-gzip or x-deflate.
+ self.putheader('Accept-Encoding', 'identity')
+
+ # we can accept "chunked" Transfer-Encodings, but no others
+ # NOTE: no TE header implies *only* "chunked"
+ #self.putheader('TE', 'chunked')
+
+ # if TE is supplied in the header, then it must appear in a
+ # Connection header.
+ #self.putheader('Connection', 'TE')
- # check if a prior response has been completed
- if self.__response and self.__response.isclosed():
- self.__response = None
-
- #
- # in certain cases, we cannot issue another request on this connection.
- # this occurs when:
- # 1) we are in the process of sending a request. (_CS_REQ_STARTED)
- # 2) a response to a previous request has signalled that it is going
- # to close the connection upon completion.
- # 3) the headers for the previous response have not been read, thus
- # we cannot determine whether point (2) is true. (_CS_REQ_SENT)
- #
- # if there is no prior response, then we can request at will.
- #
- # if point (2) is true, then we will have passed the socket to the
- # response (effectively meaning, "there is no prior response"), and will
- # open a new one when a new request is made.
- #
- # Note: if a prior response exists, then we *can* start a new request.
- # We are not allowed to begin fetching the response to this new
- # request, however, until that prior response is complete.
- #
- if self.__state == _CS_IDLE:
- self.__state = _CS_REQ_STARTED
- else:
- raise CannotSendRequest()
-
- if not url:
- url = '/'
- str = '%s %s %s\r\n' % (method, url, self._http_vsn_str)
-
- try:
- self.send(str)
- except socket.error, v:
- # trap 'Broken pipe' if we're allowed to automatically reconnect
- if v[0] != 32 or not self.auto_open:
- raise
- # try one more time (the socket was closed; this will reopen)
- self.send(str)
-
- if self._http_vsn == 11:
- # Issue some standard headers for better HTTP/1.1 compliance
-
- # this header is issued *only* for HTTP/1.1 connections. more
- # specifically, this means it is only issued when the client uses
- # the new HTTPConnection() class. backwards-compat clients will
- # be using HTTP/1.0 and those clients may be issuing this header
- # themselves. we should NOT issue it twice; some web servers (such
- # as Apache) barf when they see two Host: headers
- self.putheader('Host', self.host)
-
- # note: we are assuming that clients will not attempt to set these
- # headers since *this* library must deal with the consequences.
- # this also means that when the supporting libraries are
- # updated to recognize other forms, then this code should be
- # changed (removed or updated).
-
- # we only want a Content-Encoding of "identity" since we don't
- # support encodings such as x-gzip or x-deflate.
- self.putheader('Accept-Encoding', 'identity')
-
- # we can accept "chunked" Transfer-Encodings, but no others
- # NOTE: no TE header implies *only* "chunked"
- #self.putheader('TE', 'chunked')
-
- # if TE is supplied in the header, then it must appear in a
- # Connection header.
- #self.putheader('Connection', 'TE')
-
- else:
- # For HTTP/1.0, the server will assume "not chunked"
- pass
-
- def putheader(self, header, value):
- """Send a request header line to the server.
-
- For example: h.putheader('Accept', 'text/html')
- """
- if self.__state != _CS_REQ_STARTED:
- raise CannotSendHeader()
-
- str = '%s: %s\r\n' % (header, value)
- self.send(str)
-
- def endheaders(self):
- """Indicate that the last header line has been sent to the server."""
-
- if self.__state == _CS_REQ_STARTED:
- self.__state = _CS_REQ_SENT
- else:
- raise CannotSendHeader()
-
- self.send('\r\n')
-
- def request(self, method, url, body=None, headers={}):
- """Send a complete request to the server."""
-
- try:
- self._send_request(method, url, body, headers)
- except socket.error, v:
- # trap 'Broken pipe' if we're allowed to automatically reconnect
- if v[0] != 32 or not self.auto_open:
- raise
- # try one more time
- self._send_request(method, url, body, headers)
-
- def _send_request(self, method, url, body, headers):
- self.putrequest(method, url)
-
- if body:
- self.putheader('Content-Length', str(len(body)))
- for hdr, value in headers.items():
- self.putheader(hdr, value)
- self.endheaders()
-
- if body:
- self.send(body)
-
- def getresponse(self):
- "Get the response from the server."
-
- # check if a prior response has been completed
- if self.__response and self.__response.isclosed():
- self.__response = None
-
- #
- # if a prior response exists, then it must be completed (otherwise, we
- # cannot read this response's header to determine the connection-close
- # behavior)
- #
- # note: if a prior response existed, but was connection-close, then the
- # socket and response were made independent of this HTTPConnection object
- # since a new request requires that we open a whole new connection
- #
- # this means the prior response had one of two states:
- # 1) will_close: this connection was reset and the prior socket and
- # response operate independently
- # 2) persistent: the response was retained and we await its isclosed()
- # status to become true.
- #
- if self.__state != _CS_REQ_SENT or self.__response:
- raise ResponseNotReady()
-
- response = self.response_class(self.sock)
-
- response.begin()
- self.__state = _CS_IDLE
-
- if response.will_close:
- # this effectively passes the connection to the response
- self.close()
- else:
- # remember this, so we can tell when it is complete
- self.__response = response
-
- return response
+ else:
+ # For HTTP/1.0, the server will assume "not chunked"
+ pass
+
+ def putheader(self, header, value):
+ """Send a request header line to the server.
+
+ For example: h.putheader('Accept', 'text/html')
+ """
+ if self.__state != _CS_REQ_STARTED:
+ raise CannotSendHeader()
+
+ str = '%s: %s\r\n' % (header, value)
+ self.send(str)
+
+ def endheaders(self):
+ """Indicate that the last header line has been sent to the server."""
+
+ if self.__state == _CS_REQ_STARTED:
+ self.__state = _CS_REQ_SENT
+ else:
+ raise CannotSendHeader()
+
+ self.send('\r\n')
+
+ def request(self, method, url, body=None, headers={}):
+ """Send a complete request to the server."""
+
+ try:
+ self._send_request(method, url, body, headers)
+ except socket.error, v:
+ # trap 'Broken pipe' if we're allowed to automatically reconnect
+ if v[0] != 32 or not self.auto_open:
+ raise
+ # try one more time
+ self._send_request(method, url, body, headers)
+
+ def _send_request(self, method, url, body, headers):
+ self.putrequest(method, url)
+
+ if body:
+ self.putheader('Content-Length', str(len(body)))
+ for hdr, value in headers.items():
+ self.putheader(hdr, value)
+ self.endheaders()
+
+ if body:
+ self.send(body)
+
+ def getresponse(self):
+ "Get the response from the server."
+
+ # check if a prior response has been completed
+ if self.__response and self.__response.isclosed():
+ self.__response = None
+
+ #
+ # if a prior response exists, then it must be completed (otherwise, we
+ # cannot read this response's header to determine the connection-close
+ # behavior)
+ #
+ # note: if a prior response existed, but was connection-close, then the
+ # socket and response were made independent of this HTTPConnection
+ # object since a new request requires that we open a whole new
+ # connection
+ #
+ # this means the prior response had one of two states:
+ # 1) will_close: this connection was reset and the prior socket and
+ # response operate independently
+ # 2) persistent: the response was retained and we await its
+ # isclosed() status to become true.
+ #
+ if self.__state != _CS_REQ_SENT or self.__response:
+ raise ResponseNotReady()
+
+ response = self.response_class(self.sock)
+
+ response.begin()
+ self.__state = _CS_IDLE
+
+ if response.will_close:
+ # this effectively passes the connection to the response
+ self.close()
+ else:
+ # remember this, so we can tell when it is complete
+ self.__response = response
+
+ return response
class FakeSocket:
- def __init__(self, sock, ssl):
- self.__sock = sock
- self.__ssl = ssl
+ def __init__(self, sock, ssl):
+ self.__sock = sock
+ self.__ssl = ssl
- def makefile(self, mode): # hopefully, never have to write
- if mode != 'r' and mode != 'rb':
- raise UnimplementedFileMode()
+ def makefile(self, mode): # hopefully, never have to write
+ if mode != 'r' and mode != 'rb':
+ raise UnimplementedFileMode()
- msgbuf = ""
- while 1:
- try:
- msgbuf = msgbuf + self.__ssl.read()
- except socket.sslerror, msg:
- break
- return StringIO(msgbuf)
+ msgbuf = ""
+ while 1:
+ try:
+ msgbuf = msgbuf + self.__ssl.read()
+ except socket.sslerror, msg:
+ break
+ return StringIO(msgbuf)
- def send(self, stuff, flags = 0):
- return self.__ssl.write(stuff)
+ def send(self, stuff, flags = 0):
+ return self.__ssl.write(stuff)
- def recv(self, len = 1024, flags = 0):
- return self.__ssl.read(len)
+ def recv(self, len = 1024, flags = 0):
+ return self.__ssl.read(len)
- def __getattr__(self, attr):
- return getattr(self.__sock, attr)
+ def __getattr__(self, attr):
+ return getattr(self.__sock, attr)
class HTTPSConnection(HTTPConnection):
- "This class allows communication via SSL."
-
- default_port = HTTPS_PORT
-
- def __init__(self, host, port=None, **x509):
- keys = x509.keys()
- try:
- keys.remove('key_file')
- except ValueError:
- pass
- try:
- keys.remove('cert_file')
- except ValueError:
- pass
- if keys:
- raise IllegalKeywordArgument()
- HTTPConnection.__init__(self, host, port)
- self.key_file = x509.get('key_file')
- self.cert_file = x509.get('cert_file')
-
- def connect(self):
- "Connect to a host on a given (SSL) port."
-
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.connect((self.host, self.port))
- ssl = socket.ssl(sock, self.key_file, self.cert_file)
- self.sock = FakeSocket(sock, ssl)
+ "This class allows communication via SSL."
+
+ default_port = HTTPS_PORT
+
+ def __init__(self, host, port=None, **x509):
+ keys = x509.keys()
+ try:
+ keys.remove('key_file')
+ except ValueError:
+ pass
+ try:
+ keys.remove('cert_file')
+ except ValueError:
+ pass
+ if keys:
+ raise IllegalKeywordArgument()
+ HTTPConnection.__init__(self, host, port)
+ self.key_file = x509.get('key_file')
+ self.cert_file = x509.get('cert_file')
+
+ def connect(self):
+ "Connect to a host on a given (SSL) port."
+
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.connect((self.host, self.port))
+ ssl = socket.ssl(sock, self.key_file, self.cert_file)
+ self.sock = FakeSocket(sock, ssl)
class HTTP(HTTPConnection):
- "Compatibility class with httplib.py from 1.5."
+ "Compatibility class with httplib.py from 1.5."
- _http_vsn = 10
- _http_vsn_str = 'HTTP/1.0'
+ _http_vsn = 10
+ _http_vsn_str = 'HTTP/1.0'
- debuglevel = 0
+ debuglevel = 0
- def __init__(self, host='', port=None, **x509):
- "Provide a default host, since the superclass requires one."
+ def __init__(self, host='', port=None, **x509):
+ "Provide a default host, since the superclass requires one."
- # some joker passed 0 explicitly, meaning default port
- if port == 0:
- port = None
+ # some joker passed 0 explicitly, meaning default port
+ if port == 0:
+ port = None
- # Note that we may pass an empty string as the host; this will throw
- # an error when we attempt to connect. Presumably, the client code
- # will call connect before then, with a proper host.
- HTTPConnection.__init__(self, host, port)
+ # Note that we may pass an empty string as the host; this will throw
+ # an error when we attempt to connect. Presumably, the client code
+ # will call connect before then, with a proper host.
+ HTTPConnection.__init__(self, host, port)
- # we never actually use these for anything, but we keep them here for
- # compatibility with post-1.5.2 CVS.
- self.key_file = x509.get('key_file')
- self.cert_file = x509.get('cert_file')
+ # we never actually use these for anything, but we keep them here for
+ # compatibility with post-1.5.2 CVS.
+ self.key_file = x509.get('key_file')
+ self.cert_file = x509.get('cert_file')
- self.file = None
+ self.file = None
- def connect(self, host=None, port=None):
- "Accept arguments to set the host/port, since the superclass doesn't."
+ def connect(self, host=None, port=None):
+ "Accept arguments to set the host/port, since the superclass doesn't."
- if host is not None:
- self._set_hostport(host, port)
- HTTPConnection.connect(self)
+ if host is not None:
+ self._set_hostport(host, port)
+ HTTPConnection.connect(self)
- def set_debuglevel(self, debuglevel):
- "The class no longer supports the debuglevel."
- pass
+ def set_debuglevel(self, debuglevel):
+ "The class no longer supports the debuglevel."
+ pass
- def getfile(self):
- "Provide a getfile, since the superclass' does not use this concept."
- return self.file
+ def getfile(self):
+ "Provide a getfile, since the superclass' does not use this concept."
+ return self.file
- def putheader(self, header, *values):
- "The superclass allows only one value argument."
- HTTPConnection.putheader(self, header, string.joinfields(values, '\r\n\t'))
+ def putheader(self, header, *values):
+ "The superclass allows only one value argument."
+ HTTPConnection.putheader(self, header,
+ string.joinfields(values, '\r\n\t'))
- def getreply(self):
- """Compat definition since superclass does not define it.
+ def getreply(self):
+ """Compat definition since superclass does not define it.
- Returns a tuple consisting of:
- - server status code (e.g. '200' if all goes well)
- - server "reason" corresponding to status code
- - any RFC822 headers in the response from the server
- """
- try:
- response = self.getresponse()
- except BadStatusLine, e:
- ### hmm. if getresponse() ever closes the socket on a bad request,
- ### then we are going to have problems with self.sock
+ Returns a tuple consisting of:
+ - server status code (e.g. '200' if all goes well)
+ - server "reason" corresponding to status code
+ - any RFC822 headers in the response from the server
+ """
+ try:
+ response = self.getresponse()
+ except BadStatusLine, e:
+ ### hmm. if getresponse() ever closes the socket on a bad request,
+ ### then we are going to have problems with self.sock
- ### should we keep this behavior? do people use it?
- # keep the socket open (as a file), and return it
- self.file = self.sock.makefile('rb', 0)
+ ### should we keep this behavior? do people use it?
+ # keep the socket open (as a file), and return it
+ self.file = self.sock.makefile('rb', 0)
- # close our socket -- we want to restart after any protocol error
- self.close()
+ # close our socket -- we want to restart after any protocol error
+ self.close()
- self.headers = None
- return -1, e.line, None
+ self.headers = None
+ return -1, e.line, None
- self.headers = response.msg
- self.file = response.fp
- return response.status, response.reason, response.msg
+ self.headers = response.msg
+ self.file = response.fp
+ return response.status, response.reason, response.msg
- def close(self):
- HTTPConnection.close(self)
+ def close(self):
+ HTTPConnection.close(self)
- # note that self.file == response.fp, which gets closed by the
- # superclass. just clear the object ref here.
- ### hmm. messy. if status==-1, then self.file is owned by us.
- ### well... we aren't explicitly closing, but losing this ref will do it
- self.file = None
+ # note that self.file == response.fp, which gets closed by the
+ # superclass. just clear the object ref here.
+ ### hmm. messy. if status==-1, then self.file is owned by us.
+ ### well... we aren't explicitly closing, but losing this ref will
+ ### do it
+ self.file = None
class HTTPException(Exception):
- pass
+ pass
class NotConnected(HTTPException):
- pass
+ pass
class UnknownProtocol(HTTPException):
- def __init__(self, version):
- self.version = version
+ def __init__(self, version):
+ self.version = version
class UnknownTransferEncoding(HTTPException):
- pass
+ pass
class IllegalKeywordArgument(HTTPException):
- pass
+ pass
class UnimplementedFileMode(HTTPException):
- pass
+ pass
class IncompleteRead(HTTPException):
- def __init__(self, partial):
- self.partial = partial
+ def __init__(self, partial):
+ self.partial = partial
class ImproperConnectionState(HTTPException):
- pass
+ pass
class CannotSendRequest(ImproperConnectionState):
- pass
+ pass
class CannotSendHeader(ImproperConnectionState):
- pass
+ pass
class ResponseNotReady(ImproperConnectionState):
- pass
+ pass
class BadStatusLine(HTTPException):
- def __init__(self, line):
- self.line = line
+ def __init__(self, line):
+ self.line = line
# for backwards compatibility
error = HTTPException
@@ -760,19 +764,19 @@ def test():
print h.getfile().read()
if hasattr(socket, 'ssl'):
- host = 'www.c2.net'
- hs = HTTPS()
- hs.connect(host)
- hs.putrequest('GET', selector)
- hs.endheaders()
- status, reason, headers = hs.getreply()
- print 'status =', status
- print 'reason =', reason
- print
- if headers:
- for header in headers.headers: print string.strip(header)
- print
- print hs.getfile().read()
+ host = 'www.c2.net'
+ hs = HTTPS()
+ hs.connect(host)
+ hs.putrequest('GET', selector)
+ hs.endheaders()
+ status, reason, headers = hs.getreply()
+ print 'status =', status
+ print 'reason =', reason
+ print
+ if headers:
+ for header in headers.headers: print string.strip(header)
+ print
+ print hs.getfile().read()
if __name__ == '__main__':
diff --git a/Lib/imputil.py b/Lib/imputil.py
index b2a643a..d442d9f 100644
--- a/Lib/imputil.py
+++ b/Lib/imputil.py
@@ -18,346 +18,351 @@ _StringType = type('')
_ModuleType = type(sys) ### doesn't work in JPython...
class ImportManager:
- "Manage the import process."
-
- def install(self, namespace=vars(__builtin__)):
- "Install this ImportManager into the specified namespace."
-
- if isinstance(namespace, _ModuleType):
- namespace = vars(namespace)
-
- ### Note that we have no notion of "uninstall" or "chaining"
-
- namespace['__import__'] = self._import_hook
- ### fix this
- #namespace['reload'] = self._reload_hook
-
- def add_suffix(self, suffix, importFunc):
- assert callable(importFunc)
- self.fs_imp.add_suffix(suffix, importFunc)
-
- ######################################################################
- #
- # PRIVATE METHODS
- #
-
- clsFilesystemImporter = None
-
- def __init__(self, fs_imp=None):
- # we're definitely going to be importing something in the future,
- # so let's just load the OS-related facilities.
- if not _os_stat:
- _os_bootstrap()
-
- # This is the Importer that we use for grabbing stuff from the
- # filesystem. It defines one more method (import_from_dir) for our use.
- if not fs_imp:
- cls = self.clsFilesystemImporter or _FilesystemImporter
- fs_imp = cls()
- self.fs_imp = fs_imp
-
- # Initialize the set of suffixes that we recognize and import.
- # The default will import dynamic-load modules first, followed by
- # .py files (or a .py file's cached bytecode)
- for desc in imp.get_suffixes():
- if desc[2] == imp.C_EXTENSION:
- self.add_suffix(desc[0], DynLoadSuffixImporter(desc).import_file)
- self.add_suffix('.py', py_suffix_importer)
-
- def _import_hook(self, fqname, globals=None, locals=None, fromlist=None):
- """Python calls this hook to locate and import a module."""
-
- parts = strop.split(fqname, '.')
-
- # determine the context of this import
- parent = self._determine_import_context(globals)
-
- # if there is a parent, then its importer should manage this import
- if parent:
- module = parent.__importer__._do_import(parent, parts, fromlist)
- if module:
- return module
+ "Manage the import process."
- # has the top module already been imported?
- try:
- top_module = sys.modules[parts[0]]
- except KeyError:
+ def install(self, namespace=vars(__builtin__)):
+ "Install this ImportManager into the specified namespace."
- # look for the topmost module
- top_module = self._import_top_module(parts[0])
- if not top_module:
- # the topmost module wasn't found at all.
- raise ImportError, 'No module named ' + fqname
+ if isinstance(namespace, _ModuleType):
+ namespace = vars(namespace)
- # fast-path simple imports
- if len(parts) == 1:
- if not fromlist:
- return top_module
+ ### Note that we have no notion of "uninstall" or "chaining"
- if not top_module.__dict__.get('__ispkg__'):
- # __ispkg__ isn't defined (the module was not imported by us), or
- # it is zero.
- #
- # In the former case, there is no way that we could import
- # sub-modules that occur in the fromlist (but we can't raise an
- # error because it may just be names) because we don't know how
- # to deal with packages that were imported by other systems.
- #
- # In the latter case (__ispkg__ == 0), there can't be any sub-
- # modules present, so we can just return.
- #
- # In both cases, since len(parts) == 1, the top_module is also
- # the "bottom" which is the defined return when a fromlist exists.
- return top_module
-
- importer = top_module.__dict__.get('__importer__')
- if importer:
- return importer._finish_import(top_module, parts[1:], fromlist)
-
- # If the importer does not exist, then we have to bail. A missing importer
- # means that something else imported the module, and we have no knowledge
- # of how to get sub-modules out of the thing.
- raise ImportError, 'No module named ' + fqname
-
- def _determine_import_context(self, globals):
- """Returns the context in which a module should be imported.
-
- The context could be a loaded (package) module and the imported module
- will be looked for within that package. The context could also be None,
- meaning there is no context -- the module should be looked for as a
- "top-level" module.
- """
+ namespace['__import__'] = self._import_hook
+ ### fix this
+ #namespace['reload'] = self._reload_hook
- if not globals or not globals.get('__importer__'):
- # globals does not refer to one of our modules or packages. That
- # implies there is no relative import context (as far as we are
- # concerned), and it should just pick it off the standard path.
- return None
-
- # The globals refer to a module or package of ours. It will define
- # the context of the new import. Get the module/package fqname.
- parent_fqname = globals['__name__']
-
- # if a package is performing the import, then return itself (imports
- # refer to pkg contents)
- if globals['__ispkg__']:
- parent = sys.modules[parent_fqname]
- assert globals is parent.__dict__
- return parent
-
- i = strop.rfind(parent_fqname, '.')
-
- # a module outside of a package has no particular import context
- if i == -1:
- return None
-
- # if a module in a package is performing the import, then return the
- # package (imports refer to siblings)
- parent_fqname = parent_fqname[:i]
- parent = sys.modules[parent_fqname]
- assert parent.__name__ == parent_fqname
- return parent
-
- def _import_top_module(self, name):
- # scan sys.path looking for a location in the filesystem that contains
- # the module, or an Importer object that can import the module.
- for item in sys.path:
- if isinstance(item, _StringType):
- module = self.fs_imp.import_from_dir(item, name)
- else:
- module = item.import_top(name)
- if module:
- return module
- return None
+ def add_suffix(self, suffix, importFunc):
+ assert callable(importFunc)
+ self.fs_imp.add_suffix(suffix, importFunc)
- def _reload_hook(self, module):
- "Python calls this hook to reload a module."
+ ######################################################################
+ #
+ # PRIVATE METHODS
+ #
- # reloading of a module may or may not be possible (depending on the
- # importer), but at least we can validate that it's ours to reload
- importer = module.__dict__.get('__importer__')
- if not importer:
- ### oops. now what...
- pass
+ clsFilesystemImporter = None
+
+ def __init__(self, fs_imp=None):
+ # we're definitely going to be importing something in the future,
+ # so let's just load the OS-related facilities.
+ if not _os_stat:
+ _os_bootstrap()
+
+ # This is the Importer that we use for grabbing stuff from the
+ # filesystem. It defines one more method (import_from_dir) for our use.
+ if not fs_imp:
+ cls = self.clsFilesystemImporter or _FilesystemImporter
+ fs_imp = cls()
+ self.fs_imp = fs_imp
+
+ # Initialize the set of suffixes that we recognize and import.
+ # The default will import dynamic-load modules first, followed by
+ # .py files (or a .py file's cached bytecode)
+ for desc in imp.get_suffixes():
+ if desc[2] == imp.C_EXTENSION:
+ self.add_suffix(desc[0],
+ DynLoadSuffixImporter(desc).import_file)
+ self.add_suffix('.py', py_suffix_importer)
+
+ def _import_hook(self, fqname, globals=None, locals=None, fromlist=None):
+ """Python calls this hook to locate and import a module."""
+
+ parts = strop.split(fqname, '.')
+
+ # determine the context of this import
+ parent = self._determine_import_context(globals)
+
+ # if there is a parent, then its importer should manage this import
+ if parent:
+ module = parent.__importer__._do_import(parent, parts, fromlist)
+ if module:
+ return module
+
+ # has the top module already been imported?
+ try:
+ top_module = sys.modules[parts[0]]
+ except KeyError:
+
+ # look for the topmost module
+ top_module = self._import_top_module(parts[0])
+ if not top_module:
+ # the topmost module wasn't found at all.
+ raise ImportError, 'No module named ' + fqname
+
+ # fast-path simple imports
+ if len(parts) == 1:
+ if not fromlist:
+ return top_module
+
+ if not top_module.__dict__.get('__ispkg__'):
+ # __ispkg__ isn't defined (the module was not imported by us),
+ # or it is zero.
+ #
+ # In the former case, there is no way that we could import
+ # sub-modules that occur in the fromlist (but we can't raise an
+ # error because it may just be names) because we don't know how
+ # to deal with packages that were imported by other systems.
+ #
+ # In the latter case (__ispkg__ == 0), there can't be any sub-
+ # modules present, so we can just return.
+ #
+ # In both cases, since len(parts) == 1, the top_module is also
+ # the "bottom" which is the defined return when a fromlist
+ # exists.
+ return top_module
+
+ importer = top_module.__dict__.get('__importer__')
+ if importer:
+ return importer._finish_import(top_module, parts[1:], fromlist)
+
+ # If the importer does not exist, then we have to bail. A missing
+ # importer means that something else imported the module, and we have
+ # no knowledge of how to get sub-modules out of the thing.
+ raise ImportError, 'No module named ' + fqname
- # okay. it is using the imputil system, and we must delegate it, but
- # we don't know what to do (yet)
- ### we should blast the module dict and do another get_code(). need to
- ### flesh this out and add proper docco...
- raise SystemError, "reload not yet implemented"
+ def _determine_import_context(self, globals):
+ """Returns the context in which a module should be imported.
+
+ The context could be a loaded (package) module and the imported module
+ will be looked for within that package. The context could also be None,
+ meaning there is no context -- the module should be looked for as a
+ "top-level" module.
+ """
+
+ if not globals or not globals.get('__importer__'):
+ # globals does not refer to one of our modules or packages. That
+ # implies there is no relative import context (as far as we are
+ # concerned), and it should just pick it off the standard path.
+ return None
+
+ # The globals refer to a module or package of ours. It will define
+ # the context of the new import. Get the module/package fqname.
+ parent_fqname = globals['__name__']
+
+ # if a package is performing the import, then return itself (imports
+ # refer to pkg contents)
+ if globals['__ispkg__']:
+ parent = sys.modules[parent_fqname]
+ assert globals is parent.__dict__
+ return parent
+
+ i = strop.rfind(parent_fqname, '.')
+
+ # a module outside of a package has no particular import context
+ if i == -1:
+ return None
+
+ # if a module in a package is performing the import, then return the
+ # package (imports refer to siblings)
+ parent_fqname = parent_fqname[:i]
+ parent = sys.modules[parent_fqname]
+ assert parent.__name__ == parent_fqname
+ return parent
+
+ def _import_top_module(self, name):
+ # scan sys.path looking for a location in the filesystem that contains
+ # the module, or an Importer object that can import the module.
+ for item in sys.path:
+ if isinstance(item, _StringType):
+ module = self.fs_imp.import_from_dir(item, name)
+ else:
+ module = item.import_top(name)
+ if module:
+ return module
+ return None
+
+ def _reload_hook(self, module):
+ "Python calls this hook to reload a module."
+
+ # reloading of a module may or may not be possible (depending on the
+ # importer), but at least we can validate that it's ours to reload
+ importer = module.__dict__.get('__importer__')
+ if not importer:
+ ### oops. now what...
+ pass
+
+ # okay. it is using the imputil system, and we must delegate it, but
+ # we don't know what to do (yet)
+ ### we should blast the module dict and do another get_code(). need to
+ ### flesh this out and add proper docco...
+ raise SystemError, "reload not yet implemented"
class Importer:
- "Base class for replacing standard import functions."
-
- def import_top(self, name):
- "Import a top-level module."
- return self._import_one(None, name, name)
-
- ######################################################################
- #
- # PRIVATE METHODS
- #
- def _finish_import(self, top, parts, fromlist):
- # if "a.b.c" was provided, then load the ".b.c" portion down from
- # below the top-level module.
- bottom = self._load_tail(top, parts)
-
- # if the form is "import a.b.c", then return "a"
- if not fromlist:
- # no fromlist: return the top of the import tree
- return top
-
- # the top module was imported by self.
- #
- # this means that the bottom module was also imported by self (just
- # now, or in the past and we fetched it from sys.modules).
- #
- # since we imported/handled the bottom module, this means that we can
- # also handle its fromlist (and reliably use __ispkg__).
+ "Base class for replacing standard import functions."
- # if the bottom node is a package, then (potentially) import some modules.
- #
- # note: if it is not a package, then "fromlist" refers to names in
- # the bottom module rather than modules.
- # note: for a mix of names and modules in the fromlist, we will
- # import all modules and insert those into the namespace of
- # the package module. Python will pick up all fromlist names
- # from the bottom (package) module; some will be modules that
- # we imported and stored in the namespace, others are expected
- # to be present already.
- if bottom.__ispkg__:
- self._import_fromlist(bottom, fromlist)
-
- # if the form is "from a.b import c, d" then return "b"
- return bottom
-
- def _import_one(self, parent, modname, fqname):
- "Import a single module."
-
- # has the module already been imported?
- try:
- return sys.modules[fqname]
- except KeyError:
- pass
-
- # load the module's code, or fetch the module itself
- result = self.get_code(parent, modname, fqname)
- if result is None:
- return None
+ def import_top(self, name):
+ "Import a top-level module."
+ return self._import_one(None, name, name)
- module = self._process_result(result, fqname)
+ ######################################################################
+ #
+ # PRIVATE METHODS
+ #
+ def _finish_import(self, top, parts, fromlist):
+ # if "a.b.c" was provided, then load the ".b.c" portion down from
+ # below the top-level module.
+ bottom = self._load_tail(top, parts)
- # insert the module into its parent
- if parent:
- setattr(parent, modname, module)
- return module
+ # if the form is "import a.b.c", then return "a"
+ if not fromlist:
+ # no fromlist: return the top of the import tree
+ return top
- def _process_result(self, (ispkg, code, values), fqname):
- # did get_code() return an actual module? (rather than a code object)
- is_module = isinstance(code, _ModuleType)
+ # the top module was imported by self.
+ #
+ # this means that the bottom module was also imported by self (just
+ # now, or in the past and we fetched it from sys.modules).
+ #
+ # since we imported/handled the bottom module, this means that we can
+ # also handle its fromlist (and reliably use __ispkg__).
- # use the returned module, or create a new one to exec code into
- if is_module:
- module = code
- else:
- module = imp.new_module(fqname)
+ # if the bottom node is a package, then (potentially) import some
+ # modules.
+ #
+ # note: if it is not a package, then "fromlist" refers to names in
+ # the bottom module rather than modules.
+ # note: for a mix of names and modules in the fromlist, we will
+ # import all modules and insert those into the namespace of
+ # the package module. Python will pick up all fromlist names
+ # from the bottom (package) module; some will be modules that
+ # we imported and stored in the namespace, others are expected
+ # to be present already.
+ if bottom.__ispkg__:
+ self._import_fromlist(bottom, fromlist)
+
+ # if the form is "from a.b import c, d" then return "b"
+ return bottom
+
+ def _import_one(self, parent, modname, fqname):
+ "Import a single module."
+
+ # has the module already been imported?
+ try:
+ return sys.modules[fqname]
+ except KeyError:
+ pass
+
+ # load the module's code, or fetch the module itself
+ result = self.get_code(parent, modname, fqname)
+ if result is None:
+ return None
+
+ module = self._process_result(result, fqname)
+
+ # insert the module into its parent
+ if parent:
+ setattr(parent, modname, module)
+ return module
- ### record packages a bit differently??
- module.__importer__ = self
- module.__ispkg__ = ispkg
+ def _process_result(self, (ispkg, code, values), fqname):
+ # did get_code() return an actual module? (rather than a code object)
+ is_module = isinstance(code, _ModuleType)
- # insert additional values into the module (before executing the code)
- module.__dict__.update(values)
+ # use the returned module, or create a new one to exec code into
+ if is_module:
+ module = code
+ else:
+ module = imp.new_module(fqname)
- # the module is almost ready... make it visible
- sys.modules[fqname] = module
+ ### record packages a bit differently??
+ module.__importer__ = self
+ module.__ispkg__ = ispkg
- # execute the code within the module's namespace
- if not is_module:
- exec code in module.__dict__
+ # insert additional values into the module (before executing the code)
+ module.__dict__.update(values)
- return module
+ # the module is almost ready... make it visible
+ sys.modules[fqname] = module
- def _load_tail(self, m, parts):
- """Import the rest of the modules, down from the top-level module.
+ # execute the code within the module's namespace
+ if not is_module:
+ exec code in module.__dict__
- Returns the last module in the dotted list of modules.
- """
- for part in parts:
- fqname = "%s.%s" % (m.__name__, part)
- m = self._import_one(m, part, fqname)
- if not m:
- raise ImportError, "No module named " + fqname
- return m
-
- def _import_fromlist(self, package, fromlist):
- 'Import any sub-modules in the "from" list.'
-
- # if '*' is present in the fromlist, then look for the '__all__' variable
- # to find additional items (modules) to import.
- if '*' in fromlist:
- fromlist = list(fromlist) + list(package.__dict__.get('__all__', []))
-
- for sub in fromlist:
- # if the name is already present, then don't try to import it (it
- # might not be a module!).
- if sub != '*' and not hasattr(package, sub):
- subname = "%s.%s" % (package.__name__, sub)
- submod = self._import_one(package, sub, subname)
- if not submod:
- raise ImportError, "cannot import name " + subname
-
- def _do_import(self, parent, parts, fromlist):
- """Attempt to import the module relative to parent.
-
- This method is used when the import context specifies that <self>
- imported the parent module.
- """
- top_name = parts[0]
- top_fqname = parent.__name__ + '.' + top_name
- top_module = self._import_one(parent, top_name, top_fqname)
- if not top_module:
- # this importer and parent could not find the module (relatively)
- return None
-
- return self._finish_import(top_module, parts[1:], fromlist)
+ return module
- ######################################################################
- #
- # METHODS TO OVERRIDE
- #
- def get_code(self, parent, modname, fqname):
- """Find and retrieve the code for the given module.
+ def _load_tail(self, m, parts):
+ """Import the rest of the modules, down from the top-level module.
+
+ Returns the last module in the dotted list of modules.
+ """
+ for part in parts:
+ fqname = "%s.%s" % (m.__name__, part)
+ m = self._import_one(m, part, fqname)
+ if not m:
+ raise ImportError, "No module named " + fqname
+ return m
+
+ def _import_fromlist(self, package, fromlist):
+ 'Import any sub-modules in the "from" list.'
+
+ # if '*' is present in the fromlist, then look for the '__all__'
+ # variable to find additional items (modules) to import.
+ if '*' in fromlist:
+ fromlist = list(fromlist) + \
+ list(package.__dict__.get('__all__', []))
+
+ for sub in fromlist:
+ # if the name is already present, then don't try to import it (it
+ # might not be a module!).
+ if sub != '*' and not hasattr(package, sub):
+ subname = "%s.%s" % (package.__name__, sub)
+ submod = self._import_one(package, sub, subname)
+ if not submod:
+ raise ImportError, "cannot import name " + subname
+
+ def _do_import(self, parent, parts, fromlist):
+ """Attempt to import the module relative to parent.
+
+ This method is used when the import context specifies that <self>
+ imported the parent module.
+ """
+ top_name = parts[0]
+ top_fqname = parent.__name__ + '.' + top_name
+ top_module = self._import_one(parent, top_name, top_fqname)
+ if not top_module:
+ # this importer and parent could not find the module (relatively)
+ return None
+
+ return self._finish_import(top_module, parts[1:], fromlist)
+
+ ######################################################################
+ #
+ # METHODS TO OVERRIDE
+ #
+ def get_code(self, parent, modname, fqname):
+ """Find and retrieve the code for the given module.
- parent specifies a parent module to define a context for importing. It
- may be None, indicating no particular context for the search.
+ parent specifies a parent module to define a context for importing. It
+ may be None, indicating no particular context for the search.
- modname specifies a single module (not dotted) within the parent.
+ modname specifies a single module (not dotted) within the parent.
- fqname specifies the fully-qualified module name. This is a (potentially)
- dotted name from the "root" of the module namespace down to the modname.
- If there is no parent, then modname==fqname.
+ fqname specifies the fully-qualified module name. This is a
+ (potentially) dotted name from the "root" of the module namespace
+ down to the modname.
+ If there is no parent, then modname==fqname.
- This method should return None, or a 3-tuple.
+ This method should return None, or a 3-tuple.
- * If the module was not found, then None should be returned.
+ * If the module was not found, then None should be returned.
- * The first item of the 2- or 3-tuple should be the integer 0 or 1,
- specifying whether the module that was found is a package or not.
+ * The first item of the 2- or 3-tuple should be the integer 0 or 1,
+ specifying whether the module that was found is a package or not.
- * The second item is the code object for the module (it will be
- executed within the new module's namespace). This item can also
- be a fully-loaded module object (e.g. loaded from a shared lib).
+ * The second item is the code object for the module (it will be
+ executed within the new module's namespace). This item can also
+ be a fully-loaded module object (e.g. loaded from a shared lib).
- * The third item is a dictionary of name/value pairs that will be
- inserted into new module before the code object is executed. This
- is provided in case the module's code expects certain values (such
- as where the module was found). When the second item is a module
- object, then these names/values will be inserted *after* the module
- has been loaded/initialized.
- """
- raise RuntimeError, "get_code not implemented"
+ * The third item is a dictionary of name/value pairs that will be
+ inserted into new module before the code object is executed. This
+ is provided in case the module's code expects certain values (such
+ as where the module was found). When the second item is a module
+ object, then these names/values will be inserted *after* the module
+ has been loaded/initialized.
+ """
+ raise RuntimeError, "get_code not implemented"
######################################################################
@@ -372,99 +377,99 @@ _suffix_char = __debug__ and 'c' or 'o'
_suffix = '.py' + _suffix_char
def _compile(pathname, timestamp):
- """Compile (and cache) a Python source file.
-
- The file specified by <pathname> is compiled to a code object and
- returned.
-
- Presuming the appropriate privileges exist, the bytecodes will be
- saved back to the filesystem for future imports. The source file's
- modification timestamp must be provided as a Long value.
- """
- codestring = open(pathname, 'r').read()
- if codestring and codestring[-1] != '\n':
- codestring = codestring + '\n'
- code = __builtin__.compile(codestring, pathname, 'exec')
-
- # try to cache the compiled code
- try:
- f = open(pathname + _suffix_char, 'wb')
- except IOError:
- pass
- else:
- f.write('\0\0\0\0')
- f.write(struct.pack('<I', timestamp))
- marshal.dump(code, f)
- f.flush()
- f.seek(0, 0)
- f.write(imp.get_magic())
- f.close()
-
- return code
+ """Compile (and cache) a Python source file.
+
+ The file specified by <pathname> is compiled to a code object and
+ returned.
+
+ Presuming the appropriate privileges exist, the bytecodes will be
+ saved back to the filesystem for future imports. The source file's
+ modification timestamp must be provided as a Long value.
+ """
+ codestring = open(pathname, 'r').read()
+ if codestring and codestring[-1] != '\n':
+ codestring = codestring + '\n'
+ code = __builtin__.compile(codestring, pathname, 'exec')
+
+ # try to cache the compiled code
+ try:
+ f = open(pathname + _suffix_char, 'wb')
+ except IOError:
+ pass
+ else:
+ f.write('\0\0\0\0')
+ f.write(struct.pack('<I', timestamp))
+ marshal.dump(code, f)
+ f.flush()
+ f.seek(0, 0)
+ f.write(imp.get_magic())
+ f.close()
+
+ return code
_os_stat = _os_path_join = None
def _os_bootstrap():
- "Set up 'os' module replacement functions for use during import bootstrap."
-
- names = sys.builtin_module_names
-
- join = None
- if 'posix' in names:
- sep = '/'
- from posix import stat
- elif 'nt' in names:
- sep = '\\'
- from nt import stat
- elif 'dos' in names:
- sep = '\\'
- from dos import stat
- elif 'os2' in names:
- sep = '\\'
- from os2 import stat
- elif 'mac' in names:
- from mac import stat
- def join(a, b):
- if a == '':
- return b
- path = s
- if ':' not in a:
- a = ':' + a
- if a[-1:] <> ':':
- a = a + ':'
- return a + b
- else:
- raise ImportError, 'no os specific module found'
-
- if join is None:
- def join(a, b, sep=sep):
- if a == '':
- return b
- lastchar = a[-1:]
- if lastchar == '/' or lastchar == sep:
- return a + b
- return a + sep + b
-
- global _os_stat
- _os_stat = stat
-
- global _os_path_join
- _os_path_join = join
+ "Set up 'os' module replacement functions for use during import bootstrap."
+
+ names = sys.builtin_module_names
+
+ join = None
+ if 'posix' in names:
+ sep = '/'
+ from posix import stat
+ elif 'nt' in names:
+ sep = '\\'
+ from nt import stat
+ elif 'dos' in names:
+ sep = '\\'
+ from dos import stat
+ elif 'os2' in names:
+ sep = '\\'
+ from os2 import stat
+ elif 'mac' in names:
+ from mac import stat
+ def join(a, b):
+ if a == '':
+ return b
+ path = s
+ if ':' not in a:
+ a = ':' + a
+ if a[-1:] <> ':':
+ a = a + ':'
+ return a + b
+ else:
+ raise ImportError, 'no os specific module found'
+
+ if join is None:
+ def join(a, b, sep=sep):
+ if a == '':
+ return b
+ lastchar = a[-1:]
+ if lastchar == '/' or lastchar == sep:
+ return a + b
+ return a + sep + b
+
+ global _os_stat
+ _os_stat = stat
+
+ global _os_path_join
+ _os_path_join = join
def _os_path_isdir(pathname):
- "Local replacement for os.path.isdir()."
- try:
- s = _os_stat(pathname)
- except OSError:
- return None
- return (s[0] & 0170000) == 0040000
+ "Local replacement for os.path.isdir()."
+ try:
+ s = _os_stat(pathname)
+ except OSError:
+ return None
+ return (s[0] & 0170000) == 0040000
def _timestamp(pathname):
- "Return the file modification time as a Long."
- try:
- s = _os_stat(pathname)
- except OSError:
- return None
- return long(s[8])
+ "Return the file modification time as a Long."
+ try:
+ s = _os_stat(pathname)
+ except OSError:
+ return None
+ return long(s[8])
######################################################################
@@ -472,23 +477,23 @@ def _timestamp(pathname):
# Emulate the import mechanism for builtin and frozen modules
#
class BuiltinImporter(Importer):
- def get_code(self, parent, modname, fqname):
- if parent:
- # these modules definitely do not occur within a package context
- return None
-
- # look for the module
- if imp.is_builtin(modname):
- type = imp.C_BUILTIN
- elif imp.is_frozen(modname):
- type = imp.PY_FROZEN
- else:
- # not found
- return None
+ def get_code(self, parent, modname, fqname):
+ if parent:
+ # these modules definitely do not occur within a package context
+ return None
+
+ # look for the module
+ if imp.is_builtin(modname):
+ type = imp.C_BUILTIN
+ elif imp.is_frozen(modname):
+ type = imp.PY_FROZEN
+ else:
+ # not found
+ return None
- # got it. now load and return it.
- module = imp.load_module(modname, None, modname, ('', '', type))
- return 0, module, { }
+ # got it. now load and return it.
+ module = imp.load_module(modname, None, modname, ('', '', type))
+ return 0, module, { }
######################################################################
@@ -496,49 +501,49 @@ class BuiltinImporter(Importer):
# Internal importer used for importing from the filesystem
#
class _FilesystemImporter(Importer):
- def __init__(self):
- self.suffixes = [ ]
-
- def add_suffix(self, suffix, importFunc):
- assert callable(importFunc)
- self.suffixes.append((suffix, importFunc))
-
- def import_from_dir(self, dir, fqname):
- result = self._import_pathname(_os_path_join(dir, fqname), fqname)
- if result:
- return self._process_result(result, fqname)
- return None
-
- def get_code(self, parent, modname, fqname):
- # This importer is never used with an empty parent. Its existence is
- # private to the ImportManager. The ImportManager uses the
- # import_from_dir() method to import top-level modules/packages.
- # This method is only used when we look for a module within a package.
- assert parent
-
- return self._import_pathname(_os_path_join(parent.__pkgdir__, modname),
- fqname)
-
- def _import_pathname(self, pathname, fqname):
- if _os_path_isdir(pathname):
- result = self._import_pathname(_os_path_join(pathname, '__init__'),
+ def __init__(self):
+ self.suffixes = [ ]
+
+ def add_suffix(self, suffix, importFunc):
+ assert callable(importFunc)
+ self.suffixes.append((suffix, importFunc))
+
+ def import_from_dir(self, dir, fqname):
+ result = self._import_pathname(_os_path_join(dir, fqname), fqname)
+ if result:
+ return self._process_result(result, fqname)
+ return None
+
+ def get_code(self, parent, modname, fqname):
+ # This importer is never used with an empty parent. Its existence is
+ # private to the ImportManager. The ImportManager uses the
+ # import_from_dir() method to import top-level modules/packages.
+ # This method is only used when we look for a module within a package.
+ assert parent
+
+ return self._import_pathname(_os_path_join(parent.__pkgdir__, modname),
fqname)
- if result:
- values = result[2]
- values['__pkgdir__'] = pathname
- values['__path__'] = [ pathname ]
- return 1, result[1], values
- return None
-
- for suffix, importFunc in self.suffixes:
- filename = pathname + suffix
- try:
- finfo = _os_stat(filename)
- except OSError:
- pass
- else:
- return importFunc(filename, finfo, fqname)
- return None
+
+ def _import_pathname(self, pathname, fqname):
+ if _os_path_isdir(pathname):
+ result = self._import_pathname(_os_path_join(pathname, '__init__'),
+ fqname)
+ if result:
+ values = result[2]
+ values['__pkgdir__'] = pathname
+ values['__path__'] = [ pathname ]
+ return 1, result[1], values
+ return None
+
+ for suffix, importFunc in self.suffixes:
+ filename = pathname + suffix
+ try:
+ finfo = _os_stat(filename)
+ except OSError:
+ pass
+ else:
+ return importFunc(filename, finfo, fqname)
+ return None
######################################################################
#
@@ -546,49 +551,49 @@ class _FilesystemImporter(Importer):
#
def py_suffix_importer(filename, finfo, fqname):
- file = filename[:-3] + _suffix
- t_py = long(finfo[8])
- t_pyc = _timestamp(file)
-
- code = None
- if t_pyc is not None and t_pyc >= t_py:
- f = open(file, 'rb')
- if f.read(4) == imp.get_magic():
- t = struct.unpack('<I', f.read(4))[0]
- if t == t_py:
- code = marshal.load(f)
- f.close()
- if code is None:
- file = filename
- code = _compile(file, t_py)
-
- return 0, code, { '__file__' : file }
+ file = filename[:-3] + _suffix
+ t_py = long(finfo[8])
+ t_pyc = _timestamp(file)
+
+ code = None
+ if t_pyc is not None and t_pyc >= t_py:
+ f = open(file, 'rb')
+ if f.read(4) == imp.get_magic():
+ t = struct.unpack('<I', f.read(4))[0]
+ if t == t_py:
+ code = marshal.load(f)
+ f.close()
+ if code is None:
+ file = filename
+ code = _compile(file, t_py)
+
+ return 0, code, { '__file__' : file }
class DynLoadSuffixImporter:
- def __init__(self, desc):
- self.desc = desc
+ def __init__(self, desc):
+ self.desc = desc
- def import_file(self, filename, finfo, fqname):
- fp = open(filename, self.desc[1])
- module = imp.load_module(fqname, fp, filename, self.desc)
- module.__file__ = filename
- return 0, module, { }
+ def import_file(self, filename, finfo, fqname):
+ fp = open(filename, self.desc[1])
+ module = imp.load_module(fqname, fp, filename, self.desc)
+ module.__file__ = filename
+ return 0, module, { }
######################################################################
def _print_importers():
- items = sys.modules.items()
- items.sort()
- for name, module in items:
- if module:
- print name, module.__dict__.get('__importer__', '-- no importer')
- else:
- print name, '-- non-existent module'
+ items = sys.modules.items()
+ items.sort()
+ for name, module in items:
+ if module:
+ print name, module.__dict__.get('__importer__', '-- no importer')
+ else:
+ print name, '-- non-existent module'
def _test_revamp():
- ImportManager().install()
- sys.path.insert(0, BuiltinImporter())
+ ImportManager().install()
+ sys.path.insert(0, BuiltinImporter())
######################################################################