summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
Diffstat (limited to 'Lib')
-rw-r--r--Lib/_osx_support.py2
-rw-r--r--Lib/abc.py6
-rw-r--r--Lib/argparse.py19
-rw-r--r--Lib/ast.py6
-rw-r--r--Lib/bz2.py44
-rw-r--r--Lib/collections/__init__.py3
-rw-r--r--Lib/collections/abc.py3
-rw-r--r--Lib/compileall.py2
-rw-r--r--Lib/concurrent/futures/_base.py3
-rw-r--r--Lib/concurrent/futures/process.py6
-rw-r--r--Lib/concurrent/futures/thread.py2
-rw-r--r--Lib/dbm/__init__.py6
-rw-r--r--Lib/decimal.py4
-rw-r--r--Lib/difflib.py21
-rw-r--r--Lib/distutils/__init__.py2
-rw-r--r--Lib/distutils/ccompiler.py5
-rw-r--r--Lib/distutils/command/bdist.py3
-rw-r--r--Lib/distutils/command/bdist_dumb.py8
-rw-r--r--Lib/distutils/command/build_ext.py26
-rw-r--r--Lib/distutils/command/install.py15
-rw-r--r--Lib/distutils/emxccompiler.py315
-rw-r--r--Lib/distutils/spawn.py24
-rw-r--r--Lib/distutils/sysconfig.py24
-rw-r--r--Lib/distutils/tests/test_bdist_dumb.py2
-rw-r--r--Lib/distutils/tests/test_install.py2
-rw-r--r--Lib/distutils/tests/test_util.py2
-rw-r--r--Lib/distutils/util.py6
-rw-r--r--Lib/doctest.py8
-rw-r--r--Lib/email/_header_value_parser.py3
-rw-r--r--Lib/email/iterators.py6
-rw-r--r--Lib/filecmp.py6
-rw-r--r--Lib/fractions.py14
-rw-r--r--Lib/functools.py28
-rw-r--r--Lib/getpass.py2
-rw-r--r--Lib/glob.py3
-rw-r--r--Lib/hashlib.py15
-rw-r--r--Lib/http/cookiejar.py3
-rw-r--r--Lib/http/server.py4
-rw-r--r--Lib/idlelib/FileList.py2
-rw-r--r--Lib/idlelib/GrepDialog.py2
-rw-r--r--Lib/idlelib/IOBinding.py2
-rw-r--r--Lib/idlelib/NEWS.txt2
-rw-r--r--Lib/idlelib/PathBrowser.py2
-rw-r--r--Lib/idlelib/PyShell.py10
-rw-r--r--Lib/idlelib/TreeWidget.py4
-rw-r--r--Lib/idlelib/help.txt2
-rw-r--r--Lib/idlelib/idlever.py2
-rw-r--r--Lib/importlib/_bootstrap.py23
-rw-r--r--Lib/importlib/abc.py177
-rw-r--r--Lib/json/__init__.py25
-rw-r--r--Lib/json/decoder.py14
-rw-r--r--Lib/json/encoder.py29
-rw-r--r--Lib/json/tool.py3
-rw-r--r--Lib/lib2to3/btm_utils.py6
-rw-r--r--Lib/lib2to3/fixer_util.py23
-rw-r--r--Lib/lib2to3/fixes/fix_intern.py21
-rw-r--r--Lib/lib2to3/fixes/fix_reload.py28
-rw-r--r--Lib/lib2to3/pytree.py9
-rw-r--r--Lib/lib2to3/tests/test_fixers.py59
-rw-r--r--Lib/logging/__init__.py31
-rw-r--r--Lib/logging/config.py72
-rw-r--r--Lib/logging/handlers.py48
-rw-r--r--Lib/lzma.py130
-rw-r--r--Lib/mailbox.py12
-rw-r--r--Lib/mimetypes.py6
-rw-r--r--Lib/multiprocessing/__init__.py7
-rw-r--r--Lib/multiprocessing/forking.py22
-rw-r--r--Lib/multiprocessing/queues.py4
-rw-r--r--Lib/ntpath.py6
-rw-r--r--Lib/os.py32
-rw-r--r--Lib/os2emxpath.py158
-rw-r--r--Lib/pkgutil.py6
-rw-r--r--Lib/plat-os2emx/IN.py82
-rw-r--r--Lib/plat-os2emx/SOCKET.py106
-rw-r--r--Lib/plat-os2emx/_emx_link.py79
-rw-r--r--Lib/plat-os2emx/grp.py182
-rw-r--r--Lib/plat-os2emx/pwd.py208
-rwxr-xr-xLib/plat-os2emx/regen7
-rwxr-xr-xLib/platform.py10
-rw-r--r--Lib/poplib.py91
-rw-r--r--Lib/pty.py1
-rwxr-xr-xLib/pydoc.py2
-rw-r--r--Lib/random.py7
-rw-r--r--Lib/shelve.py9
-rw-r--r--Lib/shutil.py18
-rw-r--r--Lib/site.py23
-rw-r--r--Lib/smtplib.py8
-rw-r--r--Lib/subprocess.py28
-rw-r--r--Lib/sysconfig.py22
-rw-r--r--Lib/tarfile.py11
-rw-r--r--Lib/telnetlib.py4
-rw-r--r--Lib/test/json_tests/test_indent.py4
-rw-r--r--Lib/test/mock_socket.py8
-rwxr-xr-xLib/test/regrtest.py364
-rw-r--r--Lib/test/support.py20
-rw-r--r--Lib/test/test_abc.py13
-rw-r--r--Lib/test/test_argparse.py32
-rw-r--r--Lib/test/test_ast.py13
-rw-r--r--Lib/test/test_builtin.py5
-rw-r--r--Lib/test/test_bytes.py23
-rw-r--r--Lib/test/test_bz2.py136
-rw-r--r--Lib/test/test_cmd_line.py19
-rw-r--r--Lib/test/test_cmd_line_script.py9
-rw-r--r--Lib/test/test_complex.py2
-rw-r--r--Lib/test/test_concurrent_futures.py22
-rw-r--r--Lib/test/test_devpoll.py2
-rw-r--r--Lib/test/test_doctest.py47
-rw-r--r--Lib/test/test_enumerate.py10
-rw-r--r--Lib/test/test_epoll.py7
-rw-r--r--Lib/test/test_exceptions.py17
-rw-r--r--Lib/test/test_fcntl.py15
-rw-r--r--Lib/test/test_fileio.py2
-rw-r--r--Lib/test/test_format.py16
-rw-r--r--Lib/test/test_fractions.py32
-rw-r--r--Lib/test/test_functools.py208
-rw-r--r--Lib/test/test_gc.py26
-rw-r--r--Lib/test/test_generators.py21
-rw-r--r--Lib/test/test_genericpath.py24
-rw-r--r--Lib/test/test_hashlib.py127
-rw-r--r--Lib/test/test_httpservers.py11
-rw-r--r--Lib/test/test_imp.py14
-rw-r--r--Lib/test/test_importlib/import_/test_fromlist.py2
-rw-r--r--Lib/test/test_importlib/source/test_abc_loader.py518
-rw-r--r--Lib/test/test_importlib/test_abc.py18
-rw-r--r--Lib/test/test_io.py10
-rw-r--r--Lib/test/test_iterlen.py62
-rw-r--r--Lib/test/test_itertools.py5
-rw-r--r--Lib/test/test_logging.py165
-rw-r--r--Lib/test/test_mailbox.py2
-rw-r--r--Lib/test/test_mimetypes.py2
-rw-r--r--Lib/test/test_mmap.py4
-rw-r--r--Lib/test/test_multiprocessing.py172
-rw-r--r--Lib/test/test_openpty.py2
-rw-r--r--Lib/test/test_operator.py25
-rw-r--r--Lib/test/test_os.py98
-rw-r--r--Lib/test/test_pep277.py4
-rw-r--r--Lib/test/test_poll.py21
-rw-r--r--Lib/test/test_poplib.py157
-rw-r--r--Lib/test/test_random.py33
-rw-r--r--Lib/test/test_range.py2
-rw-r--r--Lib/test/test_select.py4
-rw-r--r--Lib/test/test_set.py2
-rw-r--r--Lib/test/test_shelve.py13
-rw-r--r--Lib/test/test_shutil.py17
-rw-r--r--Lib/test/test_signal.py9
-rw-r--r--Lib/test/test_site.py6
-rw-r--r--Lib/test/test_slice.py114
-rw-r--r--Lib/test/test_smtplib.py10
-rw-r--r--Lib/test/test_socketserver.py16
-rw-r--r--Lib/test/test_subprocess.py18
-rw-r--r--Lib/test/test_sundry.py34
-rw-r--r--Lib/test/test_syntax.py4
-rw-r--r--Lib/test/test_sys.py31
-rw-r--r--Lib/test/test_sysconfig.py44
-rw-r--r--Lib/test/test_tarfile.py4
-rw-r--r--Lib/test/test_tempfile.py8
-rw-r--r--Lib/test/test_thread.py2
-rw-r--r--Lib/test/test_threading.py6
-rw-r--r--Lib/test/test_threadsignals.py2
-rw-r--r--Lib/test/test_unicode.py30
-rw-r--r--Lib/test/test_unicodedata.py2
-rw-r--r--Lib/test/test_urllib.py92
-rw-r--r--Lib/test/test_urllib2.py44
-rw-r--r--Lib/test/test_urllib2net.py2
-rw-r--r--Lib/test/test_venv.py59
-rw-r--r--Lib/test/test_wait3.py12
-rw-r--r--Lib/test/test_weakref.py140
-rw-r--r--Lib/test/test_winreg.py12
-rw-r--r--Lib/tkinter/__init__.py39
-rw-r--r--Lib/tkinter/filedialog.py4
-rw-r--r--Lib/traceback.py3
-rw-r--r--Lib/unittest/loader.py3
-rw-r--r--Lib/urllib/error.py8
-rw-r--r--Lib/urllib/request.py75
-rw-r--r--Lib/venv/__init__.py23
-rw-r--r--Lib/venv/scripts/posix/activate.csh37
-rw-r--r--Lib/venv/scripts/posix/activate.fish74
-rw-r--r--Lib/weakref.py59
-rw-r--r--Lib/webbrowser.py11
-rw-r--r--Lib/xml/etree/ElementPath.py6
-rw-r--r--Lib/xml/etree/ElementTree.py6
-rw-r--r--Lib/zipfile.py1
182 files changed, 2792 insertions, 3375 deletions
diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py
index b3aad56..1076778 100644
--- a/Lib/_osx_support.py
+++ b/Lib/_osx_support.py
@@ -38,7 +38,7 @@ def _find_executable(executable, path=None):
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
- if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+ if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
diff --git a/Lib/abc.py b/Lib/abc.py
index 09778e8..e807895 100644
--- a/Lib/abc.py
+++ b/Lib/abc.py
@@ -226,3 +226,9 @@ class ABCMeta(type):
# No dice; update negative cache
cls._abc_negative_cache.add(subclass)
return False
+
+class ABC(metaclass=ABCMeta):
+ """Helper class that provides a standard way to create an ABC using
+ inheritance.
+ """
+ pass
diff --git a/Lib/argparse.py b/Lib/argparse.py
index f25b1b6..3b1a079 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -606,8 +606,7 @@ class HelpFormatter(object):
pass
else:
self._indent()
- for subaction in get_subactions():
- yield subaction
+ yield from get_subactions()
self._dedent()
def _split_lines(self, text, width):
@@ -1141,11 +1140,17 @@ class FileType(object):
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
+ - encoding -- The file's encoding. Accepts the same values as the
+ the builtin open() function.
+ - errors -- A string indicating how encoding and decoding errors are to
+ be handled. Accepts the same value as the builtin open() function.
"""
- def __init__(self, mode='r', bufsize=-1):
+ def __init__(self, mode='r', bufsize=-1, encoding=None, errors=None):
self._mode = mode
self._bufsize = bufsize
+ self._encoding = encoding
+ self._errors = errors
def __call__(self, string):
# the special argument "-" means sys.std{in,out}
@@ -1160,14 +1165,18 @@ class FileType(object):
# all other arguments are used as file names
try:
- return open(string, self._mode, self._bufsize)
+ return open(string, self._mode, self._bufsize, self._encoding,
+ self._errors)
except IOError as e:
message = _("can't open '%s': %s")
raise ArgumentTypeError(message % (string, e))
def __repr__(self):
args = self._mode, self._bufsize
- args_str = ', '.join(repr(arg) for arg in args if arg != -1)
+ kwargs = [('encoding', self._encoding), ('errors', self._errors)]
+ args_str = ', '.join([repr(arg) for arg in args if arg != -1] +
+ ['%s=%r' % (kw, arg) for kw, arg in kwargs
+ if arg is not None])
return '%s(%s)' % (type(self).__name__, args_str)
# ===========================
diff --git a/Lib/ast.py b/Lib/ast.py
index 13f59f9..02c3b28 100644
--- a/Lib/ast.py
+++ b/Lib/ast.py
@@ -42,7 +42,6 @@ def literal_eval(node_or_string):
Python literal structures: strings, bytes, numbers, tuples, lists, dicts,
sets, booleans, and None.
"""
- _safe_names = {'None': None, 'True': True, 'False': False}
if isinstance(node_or_string, str):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
@@ -61,9 +60,8 @@ def literal_eval(node_or_string):
elif isinstance(node, Dict):
return dict((_convert(k), _convert(v)) for k, v
in zip(node.keys, node.values))
- elif isinstance(node, Name):
- if node.id in _safe_names:
- return _safe_names[node.id]
+ elif isinstance(node, NameConstant):
+ return node.value
elif isinstance(node, UnaryOp) and \
isinstance(node.op, (UAdd, USub)) and \
isinstance(node.operand, (Num, UnaryOp, BinOp)):
diff --git a/Lib/bz2.py b/Lib/bz2.py
index c307507..6e6a2b9 100644
--- a/Lib/bz2.py
+++ b/Lib/bz2.py
@@ -9,7 +9,6 @@ __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor",
__author__ = "Nadeem Vawda <nadeem.vawda@gmail.com>"
-import builtins
import io
import warnings
@@ -28,6 +27,8 @@ _MODE_WRITE = 3
_BUFFER_SIZE = 8192
+_builtin_open = open
+
class BZ2File(io.BufferedIOBase):
@@ -43,12 +44,13 @@ class BZ2File(io.BufferedIOBase):
def __init__(self, filename, mode="r", buffering=None, compresslevel=9):
"""Open a bzip2-compressed file.
- If filename is a str or bytes object, is gives the name of the file to
- be opened. Otherwise, it should be a file object, which will be used to
- read or write the compressed data.
+ If filename is a str or bytes object, it gives the name
+ of the file to be opened. Otherwise, it should be a file object,
+ which will be used to read or write the compressed data.
- mode can be 'r' for reading (default), 'w' for (over)writing, or 'a' for
- appending. These can equivalently be given as 'rb', 'wb', and 'ab'.
+ mode can be 'r' for reading (default), 'w' for (over)writing,
+ or 'a' for appending. These can equivalently be given as 'rb',
+ 'wb', and 'ab'.
buffering is ignored. Its use is deprecated.
@@ -90,10 +92,10 @@ class BZ2File(io.BufferedIOBase):
mode_code = _MODE_WRITE
self._compressor = BZ2Compressor(compresslevel)
else:
- raise ValueError("Invalid mode: {!r}".format(mode))
+ raise ValueError("Invalid mode: %r" % (mode,))
if isinstance(filename, (str, bytes)):
- self._fp = builtins.open(filename, mode)
+ self._fp = _builtin_open(filename, mode)
self._closefp = True
self._mode = mode_code
elif hasattr(filename, "read") or hasattr(filename, "write"):
@@ -189,15 +191,17 @@ class BZ2File(io.BufferedIOBase):
if not rawblock:
if self._decompressor.eof:
+ # End-of-stream marker and end of file. We're good.
self._mode = _MODE_READ_EOF
self._size = self._pos
return False
else:
+ # Problem - we were expecting more compressed data.
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
- # Continue to next stream.
if self._decompressor.eof:
+ # Continue to next stream.
self._decompressor = BZ2Decompressor()
self._buffer = self._decompressor.decompress(rawblock)
@@ -412,7 +416,7 @@ class BZ2File(io.BufferedIOBase):
self._read_all(return_data=False)
offset = self._size + offset
else:
- raise ValueError("Invalid value for whence: {}".format(whence))
+ raise ValueError("Invalid value for whence: %s" % (whence,))
# Make it so that offset is the number of bytes to skip forward.
if offset < self._pos:
@@ -436,20 +440,20 @@ def open(filename, mode="rb", compresslevel=9,
encoding=None, errors=None, newline=None):
"""Open a bzip2-compressed file in binary or text mode.
- The filename argument can be an actual filename (a str or bytes object), or
- an existing file object to read from or write to.
+ The filename argument can be an actual filename (a str or bytes
+ object), or an existing file object to read from or write to.
- The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for binary mode,
- or "rt", "wt" or "at" for text mode. The default mode is "rb", and the
- default compresslevel is 9.
+ The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for
+ binary mode, or "rt", "wt" or "at" for text mode. The default mode
+ is "rb", and the default compresslevel is 9.
- For binary mode, this function is equivalent to the BZ2File constructor:
- BZ2File(filename, mode, compresslevel). In this case, the encoding, errors
- and newline arguments must not be provided.
+ For binary mode, this function is equivalent to the BZ2File
+ constructor: BZ2File(filename, mode, compresslevel). In this case,
+ the encoding, errors and newline arguments must not be provided.
For text mode, a BZ2File object is created, and wrapped in an
- io.TextIOWrapper instance with the specified encoding, error handling
- behavior, and line ending(s).
+ io.TextIOWrapper instance with the specified encoding, error
+ handling behavior, and line ending(s).
"""
if "t" in mode:
diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py
index e5f9599..53083e4 100644
--- a/Lib/collections/__init__.py
+++ b/Lib/collections/__init__.py
@@ -228,8 +228,7 @@ class OrderedDict(dict):
'''
if isinstance(other, OrderedDict):
- return len(self)==len(other) and \
- all(map(_eq, self.items(), other.items()))
+ return dict.__eq__(self, other) and all(map(_eq, self, other))
return dict.__eq__(self, other)
diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py
index c23b7dd..18c07bf 100644
--- a/Lib/collections/abc.py
+++ b/Lib/collections/abc.py
@@ -430,8 +430,7 @@ class KeysView(MappingView, Set):
return key in self._mapping
def __iter__(self):
- for key in self._mapping:
- yield key
+ yield from self._mapping
KeysView.register(dict_keys)
diff --git a/Lib/compileall.py b/Lib/compileall.py
index d3cff6a..fd22fc3c 100644
--- a/Lib/compileall.py
+++ b/Lib/compileall.py
@@ -209,7 +209,7 @@ def main():
with (sys.stdin if args.flist=='-' else open(args.flist)) as f:
for line in f:
compile_dests.append(line.strip())
- except EnvironmentError:
+ except OSError:
print("Error reading file list {}".format(args.flist))
return False
diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py
index 1e098be..883d993 100644
--- a/Lib/concurrent/futures/_base.py
+++ b/Lib/concurrent/futures/_base.py
@@ -198,8 +198,7 @@ def as_completed(fs, timeout=None):
waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
try:
- for future in finished:
- yield future
+ yield from finished
while pending:
if timeout is None:
diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py
index 04238a7..3c20b93 100644
--- a/Lib/concurrent/futures/process.py
+++ b/Lib/concurrent/futures/process.py
@@ -40,7 +40,7 @@ Local worker thread:
Process #1..n:
- reads _CallItems from "Call Q", executes the calls, and puts the resulting
- _ResultItems in "Request Q"
+ _ResultItems in "Result Q"
"""
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
@@ -240,6 +240,8 @@ def _queue_management_worker(executor_reference,
"terminated abruptly while the future was "
"running or pending."
))
+ # Delete references to object. See issue16284
+ del work_item
pending_work_items.clear()
# Terminate remaining workers forcibly: the queues or their
# locks may be in a dirty state and block forever.
@@ -264,6 +266,8 @@ def _queue_management_worker(executor_reference,
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
+ # Delete references to object. See issue16284
+ del work_item
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py
index 95bb682..f9beb0f 100644
--- a/Lib/concurrent/futures/thread.py
+++ b/Lib/concurrent/futures/thread.py
@@ -63,6 +63,8 @@ def _worker(executor_reference, work_queue):
work_item = work_queue.get(block=True)
if work_item is not None:
work_item.run()
+ # Delete references to object. See issue16284
+ del work_item
continue
executor = executor_reference()
# Exit if:
diff --git a/Lib/dbm/__init__.py b/Lib/dbm/__init__.py
index 813a29d..3bff170 100644
--- a/Lib/dbm/__init__.py
+++ b/Lib/dbm/__init__.py
@@ -106,10 +106,8 @@ def whichdb(filename):
try:
f = io.open(filename + ".pag", "rb")
f.close()
- # dbm linked with gdbm on OS/2 doesn't have .dir file
- if not (ndbm.library == "GNU gdbm" and sys.platform == "os2emx"):
- f = io.open(filename + ".dir", "rb")
- f.close()
+ f = io.open(filename + ".dir", "rb")
+ f.close()
return "dbm.ndbm"
except IOError:
# some dbm emulations based on Berkeley DB generate a .db file
diff --git a/Lib/decimal.py b/Lib/decimal.py
index 746b34a..25f8fbc 100644
--- a/Lib/decimal.py
+++ b/Lib/decimal.py
@@ -703,8 +703,7 @@ class Decimal(object):
raise TypeError("Cannot convert %r to Decimal" % value)
- # @classmethod, but @decorator is not valid Python 2.3 syntax, so
- # don't use it (see notes on Py2.3 compatibility at top of file)
+ @classmethod
def from_float(cls, f):
"""Converts a float to a decimal number, exactly.
@@ -743,7 +742,6 @@ class Decimal(object):
return result
else:
return cls(result)
- from_float = classmethod(from_float)
def _isnan(self):
"""Returns whether the number is not actually one.
diff --git a/Lib/difflib.py b/Lib/difflib.py
index ae377d7..db5f82e 100644
--- a/Lib/difflib.py
+++ b/Lib/difflib.py
@@ -922,8 +922,7 @@ class Differ:
else:
raise ValueError('unknown tag %r' % (tag,))
- for line in g:
- yield line
+ yield from g
def _dump(self, tag, x, lo, hi):
"""Generate comparison results for a same-tagged range."""
@@ -942,8 +941,7 @@ class Differ:
second = self._dump('+', b, blo, bhi)
for g in first, second:
- for line in g:
- yield line
+ yield from g
def _fancy_replace(self, a, alo, ahi, b, blo, bhi):
r"""
@@ -997,8 +995,7 @@ class Differ:
# no non-identical "pretty close" pair
if eqi is None:
# no identical pair either -- treat it as a straight replace
- for line in self._plain_replace(a, alo, ahi, b, blo, bhi):
- yield line
+ yield from self._plain_replace(a, alo, ahi, b, blo, bhi)
return
# no close pair, but an identical pair -- synch up on that
best_i, best_j, best_ratio = eqi, eqj, 1.0
@@ -1010,8 +1007,7 @@ class Differ:
# identical
# pump out diffs from before the synch point
- for line in self._fancy_helper(a, alo, best_i, b, blo, best_j):
- yield line
+ yield from self._fancy_helper(a, alo, best_i, b, blo, best_j)
# do intraline marking on the synch pair
aelt, belt = a[best_i], b[best_j]
@@ -1033,15 +1029,13 @@ class Differ:
btags += ' ' * lb
else:
raise ValueError('unknown tag %r' % (tag,))
- for line in self._qformat(aelt, belt, atags, btags):
- yield line
+ yield from self._qformat(aelt, belt, atags, btags)
else:
# the synch pair is identical
yield ' ' + aelt
# pump out diffs from after the synch point
- for line in self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi):
- yield line
+ yield from self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi)
def _fancy_helper(self, a, alo, ahi, b, blo, bhi):
g = []
@@ -1053,8 +1047,7 @@ class Differ:
elif blo < bhi:
g = self._dump('+', b, blo, bhi)
- for line in g:
- yield line
+ yield from g
def _qformat(self, aline, bline, atags, btags):
r"""
diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py
index 345ac4f..70a72b0 100644
--- a/Lib/distutils/__init__.py
+++ b/Lib/distutils/__init__.py
@@ -13,5 +13,5 @@ used from a setup script as
# Updated automatically by the Python release process.
#
#--start constants--
-__version__ = "3.3.0"
+__version__ = "3.4.0a0"
#--end constants--
diff --git a/Lib/distutils/ccompiler.py b/Lib/distutils/ccompiler.py
index c795c95..911e84d 100644
--- a/Lib/distutils/ccompiler.py
+++ b/Lib/distutils/ccompiler.py
@@ -351,7 +351,7 @@ class CCompiler:
return macros, objects, extra, pp_opts, build
def _get_cc_args(self, pp_opts, debug, before):
- # works for unixccompiler, emxccompiler, cygwinccompiler
+ # works for unixccompiler, cygwinccompiler
cc_args = pp_opts + ['-c']
if debug:
cc_args[:0] = ['-g']
@@ -926,7 +926,6 @@ _default_compilers = (
# on a cygwin built python we can use gcc like an ordinary UNIXish
# compiler
('cygwin.*', 'unix'),
- ('os2emx', 'emx'),
# OS name mappings
('posix', 'unix'),
@@ -968,8 +967,6 @@ compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler',
"Mingw32 port of GNU C Compiler for Win32"),
'bcpp': ('bcppcompiler', 'BCPPCompiler',
"Borland C++ Compiler"),
- 'emx': ('emxccompiler', 'EMXCCompiler',
- "EMX port of GNU C Compiler for OS/2"),
}
def show_compilers():
diff --git a/Lib/distutils/command/bdist.py b/Lib/distutils/command/bdist.py
index c5188eb..38b169a 100644
--- a/Lib/distutils/command/bdist.py
+++ b/Lib/distutils/command/bdist.py
@@ -52,8 +52,7 @@ class bdist(Command):
# This won't do in reality: will need to distinguish RPM-ish Linux,
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
default_format = {'posix': 'gztar',
- 'nt': 'zip',
- 'os2': 'zip'}
+ 'nt': 'zip'}
# Establish the preferred order (for the --help-formats option).
format_commands = ['rpm', 'gztar', 'bztar', 'ztar', 'tar',
diff --git a/Lib/distutils/command/bdist_dumb.py b/Lib/distutils/command/bdist_dumb.py
index 1ab09d1..eefdfea 100644
--- a/Lib/distutils/command/bdist_dumb.py
+++ b/Lib/distutils/command/bdist_dumb.py
@@ -38,8 +38,7 @@ class bdist_dumb(Command):
boolean_options = ['keep-temp', 'skip-build', 'relative']
default_format = { 'posix': 'gztar',
- 'nt': 'zip',
- 'os2': 'zip' }
+ 'nt': 'zip' }
def initialize_options(self):
self.bdist_dir = None
@@ -85,11 +84,6 @@ class bdist_dumb(Command):
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
self.plat_name)
- # OS/2 objects to any ":" characters in a filename (such as when
- # a timestamp is used in a version) so change them to hyphens.
- if os.name == "os2":
- archive_basename = archive_basename.replace(":", "-")
-
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
index b1d951e..f7c71b3 100644
--- a/Lib/distutils/command/build_ext.py
+++ b/Lib/distutils/command/build_ext.py
@@ -230,11 +230,6 @@ class build_ext(Command):
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VC6'))
- # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
- # import libraries in its "Config" subdirectory
- if os.name == 'os2':
- self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
-
# for extensions under Cygwin and AtheOS Python's library directory must be
# appended to library_dirs
if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
@@ -620,9 +615,6 @@ class build_ext(Command):
return fn
else:
return "swig.exe"
- elif os.name == "os2":
- # assume swig available in the PATH.
- return "swig.exe"
else:
raise DistutilsPlatformError(
"I don't know how to find (much less run) SWIG "
@@ -673,9 +665,6 @@ class build_ext(Command):
"""
from distutils.sysconfig import get_config_var
ext_path = ext_name.split('.')
- # OS/2 has an 8 character module (extension) limit :-(
- if os.name == "os2":
- ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
# extensions in debug_mode are named 'module_d.pyd' under windows
so_ext = get_config_var('SO')
if os.name == 'nt' and self.debug:
@@ -696,7 +685,7 @@ class build_ext(Command):
def get_libraries(self, ext):
"""Return the list of libraries to link against when building a
shared extension. On most platforms, this is just 'ext.libraries';
- on Windows and OS/2, we add the Python library (eg. python20.dll).
+ on Windows, we add the Python library (eg. python20.dll).
"""
# The python library is always needed on Windows. For MSVC, this
# is redundant, since the library is mentioned in a pragma in
@@ -716,19 +705,6 @@ class build_ext(Command):
return ext.libraries + [pythonlib]
else:
return ext.libraries
- elif sys.platform == "os2emx":
- # EMX/GCC requires the python library explicitly, and I
- # believe VACPP does as well (though not confirmed) - AIM Apr01
- template = "python%d%d"
- # debug versions of the main DLL aren't supported, at least
- # not at this time - AIM Apr01
- #if self.debug:
- # template = template + '_d'
- pythonlib = (template %
- (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
- # don't extend ext.libraries, it may be shared with other
- # extensions, it is a reference to the original list
- return ext.libraries + [pythonlib]
elif sys.platform[:6] == "cygwin":
template = "python%d.%d"
pythonlib = (template %
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index 0161898..04326a1 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -58,13 +58,6 @@ INSTALL_SCHEMES = {
'data' : '$base',
},
'nt': WINDOWS_SCHEME,
- 'os2': {
- 'purelib': '$base/Lib/site-packages',
- 'platlib': '$base/Lib/site-packages',
- 'headers': '$base/Include/$dist_name',
- 'scripts': '$base/Scripts',
- 'data' : '$base',
- },
}
# user site schemes
@@ -86,14 +79,6 @@ if HAS_USER_SITE:
'data' : '$userbase',
}
- INSTALL_SCHEMES['os2_home'] = {
- 'purelib': '$usersite',
- 'platlib': '$usersite',
- 'headers': '$userbase/include/python$py_version_short/$dist_name',
- 'scripts': '$userbase/bin',
- 'data' : '$userbase',
- }
-
# The keys to an installation scheme; if any new types of files are to be
# installed, be sure to add an entry to every installation scheme above,
# and to SCHEME_KEYS here.
diff --git a/Lib/distutils/emxccompiler.py b/Lib/distutils/emxccompiler.py
deleted file mode 100644
index 3675f8d..0000000
--- a/Lib/distutils/emxccompiler.py
+++ /dev/null
@@ -1,315 +0,0 @@
-"""distutils.emxccompiler
-
-Provides the EMXCCompiler class, a subclass of UnixCCompiler that
-handles the EMX port of the GNU C compiler to OS/2.
-"""
-
-# issues:
-#
-# * OS/2 insists that DLLs can have names no longer than 8 characters
-# We put export_symbols in a def-file, as though the DLL can have
-# an arbitrary length name, but truncate the output filename.
-#
-# * only use OMF objects and use LINK386 as the linker (-Zomf)
-#
-# * always build for multithreading (-Zmt) as the accompanying OS/2 port
-# of Python is only distributed with threads enabled.
-#
-# tested configurations:
-#
-# * EMX gcc 2.81/EMX 0.9d fix03
-
-import os,sys,copy
-from distutils.ccompiler import gen_preprocess_options, gen_lib_options
-from distutils.unixccompiler import UnixCCompiler
-from distutils.file_util import write_file
-from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
-from distutils import log
-
-class EMXCCompiler (UnixCCompiler):
-
- compiler_type = 'emx'
- obj_extension = ".obj"
- static_lib_extension = ".lib"
- shared_lib_extension = ".dll"
- static_lib_format = "%s%s"
- shared_lib_format = "%s%s"
- res_extension = ".res" # compiled resource file
- exe_extension = ".exe"
-
- def __init__ (self,
- verbose=0,
- dry_run=0,
- force=0):
-
- UnixCCompiler.__init__ (self, verbose, dry_run, force)
-
- (status, details) = check_config_h()
- self.debug_print("Python's GCC status: %s (details: %s)" %
- (status, details))
- if status is not CONFIG_H_OK:
- self.warn(
- "Python's pyconfig.h doesn't seem to support your compiler. " +
- ("Reason: %s." % details) +
- "Compiling may fail because of undefined preprocessor macros.")
-
- (self.gcc_version, self.ld_version) = \
- get_versions()
- self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
- (self.gcc_version,
- self.ld_version) )
-
- # Hard-code GCC because that's what this is all about.
- # XXX optimization, warnings etc. should be customizable.
- self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
- compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
- linker_exe='gcc -Zomf -Zmt -Zcrtdll',
- linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
-
- # want the gcc library statically linked (so that we don't have
- # to distribute a version dependent on the compiler we have)
- self.dll_libraries=["gcc"]
-
- # __init__ ()
-
- def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
- if ext == '.rc':
- # gcc requires '.rc' compiled to binary ('.res') files !!!
- try:
- self.spawn(["rc", "-r", src])
- except DistutilsExecError as msg:
- raise CompileError(msg)
- else: # for other files use the C-compiler
- try:
- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
- extra_postargs)
- except DistutilsExecError as msg:
- raise CompileError(msg)
-
- def link (self,
- target_desc,
- objects,
- output_filename,
- output_dir=None,
- libraries=None,
- library_dirs=None,
- runtime_library_dirs=None,
- export_symbols=None,
- debug=0,
- extra_preargs=None,
- extra_postargs=None,
- build_temp=None,
- target_lang=None):
-
- # use separate copies, so we can modify the lists
- extra_preargs = copy.copy(extra_preargs or [])
- libraries = copy.copy(libraries or [])
- objects = copy.copy(objects or [])
-
- # Additional libraries
- libraries.extend(self.dll_libraries)
-
- # handle export symbols by creating a def-file
- # with executables this only works with gcc/ld as linker
- if ((export_symbols is not None) and
- (target_desc != self.EXECUTABLE)):
- # (The linker doesn't do anything if output is up-to-date.
- # So it would probably better to check if we really need this,
- # but for this we had to insert some unchanged parts of
- # UnixCCompiler, and this is not what we want.)
-
- # we want to put some files in the same directory as the
- # object files are, build_temp doesn't help much
- # where are the object files
- temp_dir = os.path.dirname(objects[0])
- # name of dll to give the helper files the same base name
- (dll_name, dll_extension) = os.path.splitext(
- os.path.basename(output_filename))
-
- # generate the filenames for these files
- def_file = os.path.join(temp_dir, dll_name + ".def")
-
- # Generate .def file
- contents = [
- "LIBRARY %s INITINSTANCE TERMINSTANCE" % \
- os.path.splitext(os.path.basename(output_filename))[0],
- "DATA MULTIPLE NONSHARED",
- "EXPORTS"]
- for sym in export_symbols:
- contents.append(' "%s"' % sym)
- self.execute(write_file, (def_file, contents),
- "writing %s" % def_file)
-
- # next add options for def-file and to creating import libraries
- # for gcc/ld the def-file is specified as any other object files
- objects.append(def_file)
-
- #end: if ((export_symbols is not None) and
- # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
-
- # who wants symbols and a many times larger output file
- # should explicitly switch the debug mode on
- # otherwise we let dllwrap/ld strip the output file
- # (On my machine: 10KB < stripped_file < ??100KB
- # unstripped_file = stripped_file + XXX KB
- # ( XXX=254 for a typical python extension))
- if not debug:
- extra_preargs.append("-s")
-
- UnixCCompiler.link(self,
- target_desc,
- objects,
- output_filename,
- output_dir,
- libraries,
- library_dirs,
- runtime_library_dirs,
- None, # export_symbols, we do this in our def-file
- debug,
- extra_preargs,
- extra_postargs,
- build_temp,
- target_lang)
-
- # link ()
-
- # -- Miscellaneous methods -----------------------------------------
-
- # override the object_filenames method from CCompiler to
- # support rc and res-files
- def object_filenames (self,
- source_filenames,
- strip_dir=0,
- output_dir=''):
- if output_dir is None: output_dir = ''
- obj_names = []
- for src_name in source_filenames:
- # use normcase to make sure '.rc' is really '.rc' and not '.RC'
- (base, ext) = os.path.splitext (os.path.normcase(src_name))
- if ext not in (self.src_extensions + ['.rc']):
- raise UnknownFileError("unknown file type '%s' (from '%s')" % \
- (ext, src_name))
- if strip_dir:
- base = os.path.basename (base)
- if ext == '.rc':
- # these need to be compiled to object files
- obj_names.append (os.path.join (output_dir,
- base + self.res_extension))
- else:
- obj_names.append (os.path.join (output_dir,
- base + self.obj_extension))
- return obj_names
-
- # object_filenames ()
-
- # override the find_library_file method from UnixCCompiler
- # to deal with file naming/searching differences
- def find_library_file(self, dirs, lib, debug=0):
- shortlib = '%s.lib' % lib
- longlib = 'lib%s.lib' % lib # this form very rare
-
- # get EMX's default library directory search path
- try:
- emx_dirs = os.environ['LIBRARY_PATH'].split(';')
- except KeyError:
- emx_dirs = []
-
- for dir in dirs + emx_dirs:
- shortlibp = os.path.join(dir, shortlib)
- longlibp = os.path.join(dir, longlib)
- if os.path.exists(shortlibp):
- return shortlibp
- elif os.path.exists(longlibp):
- return longlibp
-
- # Oops, didn't find it in *any* of 'dirs'
- return None
-
-# class EMXCCompiler
-
-
-# Because these compilers aren't configured in Python's pyconfig.h file by
-# default, we should at least warn the user if he is using a unmodified
-# version.
-
-CONFIG_H_OK = "ok"
-CONFIG_H_NOTOK = "not ok"
-CONFIG_H_UNCERTAIN = "uncertain"
-
-def check_config_h():
-
- """Check if the current Python installation (specifically, pyconfig.h)
- appears amenable to building extensions with GCC. Returns a tuple
- (status, details), where 'status' is one of the following constants:
- CONFIG_H_OK
- all is well, go ahead and compile
- CONFIG_H_NOTOK
- doesn't look good
- CONFIG_H_UNCERTAIN
- not sure -- unable to read pyconfig.h
- 'details' is a human-readable string explaining the situation.
-
- Note there are two ways to conclude "OK": either 'sys.version' contains
- the string "GCC" (implying that this Python was built with GCC), or the
- installed "pyconfig.h" contains the string "__GNUC__".
- """
-
- # XXX since this function also checks sys.version, it's not strictly a
- # "pyconfig.h" check -- should probably be renamed...
-
- from distutils import sysconfig
- # if sys.version contains GCC then python was compiled with
- # GCC, and the pyconfig.h file should be OK
- if sys.version.find("GCC") >= 0:
- return (CONFIG_H_OK, "sys.version mentions 'GCC'")
-
- fn = sysconfig.get_config_h_filename()
- try:
- # It would probably better to read single lines to search.
- # But we do this only once, and it is fast enough
- f = open(fn)
- try:
- s = f.read()
- finally:
- f.close()
-
- except IOError as exc:
- # if we can't read this file, we cannot say it is wrong
- # the compiler will complain later about this file as missing
- return (CONFIG_H_UNCERTAIN,
- "couldn't read '%s': %s" % (fn, exc.strerror))
-
- else:
- # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
- if s.find("__GNUC__") >= 0:
- return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
- else:
- return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
-
-
-def get_versions():
- """ Try to find out the versions of gcc and ld.
- If not possible it returns None for it.
- """
- from distutils.version import StrictVersion
- from distutils.spawn import find_executable
- import re
-
- gcc_exe = find_executable('gcc')
- if gcc_exe:
- out = os.popen(gcc_exe + ' -dumpversion','r')
- try:
- out_string = out.read()
- finally:
- out.close()
- result = re.search('(\d+\.\d+\.\d+)', out_string, re.ASCII)
- if result:
- gcc_version = StrictVersion(result.group(1))
- else:
- gcc_version = None
- else:
- gcc_version = None
- # EMX ld has no way of reporting version number, and we use GCC
- # anyway - so we can link OMF DLLs
- ld_version = None
- return (gcc_version, ld_version)
diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py
index f58c55f..b1c5a44 100644
--- a/Lib/distutils/spawn.py
+++ b/Lib/distutils/spawn.py
@@ -32,8 +32,6 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0):
_spawn_posix(cmd, search_path, dry_run=dry_run)
elif os.name == 'nt':
_spawn_nt(cmd, search_path, dry_run=dry_run)
- elif os.name == 'os2':
- _spawn_os2(cmd, search_path, dry_run=dry_run)
else:
raise DistutilsPlatformError(
"don't know how to spawn programs on platform '%s'" % os.name)
@@ -74,26 +72,6 @@ def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0):
raise DistutilsExecError(
"command '%s' failed with exit status %d" % (cmd[0], rc))
-def _spawn_os2(cmd, search_path=1, verbose=0, dry_run=0):
- executable = cmd[0]
- if search_path:
- # either we find one or it stays the same
- executable = find_executable(executable) or executable
- log.info(' '.join([executable] + cmd[1:]))
- if not dry_run:
- # spawnv for OS/2 EMX requires a full path to the .exe
- try:
- rc = os.spawnv(os.P_WAIT, executable, cmd)
- except OSError as exc:
- # this seems to happen when the command isn't found
- raise DistutilsExecError(
- "command '%s' failed: %s" % (cmd[0], exc.args[-1]))
- if rc != 0:
- # and this reflects the command running but failing
- log.debug("command '%s' failed with exit status %d" % (cmd[0], rc))
- raise DistutilsExecError(
- "command '%s' failed with exit status %d" % (cmd[0], rc))
-
if sys.platform == 'darwin':
from distutils import sysconfig
_cfg_target = None
@@ -180,7 +158,7 @@ def find_executable(executable, path=None):
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
- if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+ if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index 317640c..3b6549f 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -110,8 +110,6 @@ def get_python_inc(plat_specific=0, prefix=None):
return os.path.join(prefix, "include", python_dir)
elif os.name == "nt":
return os.path.join(prefix, "include")
- elif os.name == "os2":
- return os.path.join(prefix, "Include")
else:
raise DistutilsPlatformError(
"I don't know where Python installs its C header files "
@@ -153,11 +151,6 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
return prefix
else:
return os.path.join(prefix, "Lib", "site-packages")
- elif os.name == "os2":
- if standard_lib:
- return os.path.join(prefix, "Lib")
- else:
- return os.path.join(prefix, "Lib", "site-packages")
else:
raise DistutilsPlatformError(
"I don't know where Python installs its library "
@@ -492,23 +485,6 @@ def _init_nt():
_config_vars = g
-def _init_os2():
- """Initialize the module as appropriate for OS/2"""
- g = {}
- # set basic install directories
- g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
- g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
-
- # XXX hmmm.. a normal install puts include files here
- g['INCLUDEPY'] = get_python_inc(plat_specific=0)
-
- g['SO'] = '.pyd'
- g['EXE'] = ".exe"
-
- global _config_vars
- _config_vars = g
-
-
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform. Generally this includes
diff --git a/Lib/distutils/tests/test_bdist_dumb.py b/Lib/distutils/tests/test_bdist_dumb.py
index 1037d82..761051c 100644
--- a/Lib/distutils/tests/test_bdist_dumb.py
+++ b/Lib/distutils/tests/test_bdist_dumb.py
@@ -75,8 +75,6 @@ class BuildDumbTestCase(support.TempdirManager,
# see what we have
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name)
- if os.name == 'os2':
- base = base.replace(':', '-')
self.assertEqual(dist_created, [base])
diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
index cb2e1f2..47d630c 100644
--- a/Lib/distutils/tests/test_install.py
+++ b/Lib/distutils/tests/test_install.py
@@ -94,7 +94,7 @@ class InstallTestCase(support.TempdirManager,
self.addCleanup(cleanup)
- for key in ('nt_user', 'unix_user', 'os2_home'):
+ for key in ('nt_user', 'unix_user'):
self.assertIn(key, INSTALL_SCHEMES)
dist = Distribution({'name': 'xx'})
diff --git a/Lib/distutils/tests/test_util.py b/Lib/distutils/tests/test_util.py
index eac9b51..b73cfe9 100644
--- a/Lib/distutils/tests/test_util.py
+++ b/Lib/distutils/tests/test_util.py
@@ -236,7 +236,7 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase):
self.assertRaises(DistutilsPlatformError,
change_root, 'c:\\root', 'its\\here')
- # XXX platforms to be covered: os2, mac
+ # XXX platforms to be covered: mac
def test_check_environ(self):
util._environ_checked = 0
diff --git a/Lib/distutils/util.py b/Lib/distutils/util.py
index 67d8166..ba09ac4 100644
--- a/Lib/distutils/util.py
+++ b/Lib/distutils/util.py
@@ -154,12 +154,6 @@ def change_root (new_root, pathname):
path = path[1:]
return os.path.join(new_root, path)
- elif os.name == 'os2':
- (drive, path) = os.path.splitdrive(pathname)
- if path[0] == os.sep:
- path = path[1:]
- return os.path.join(new_root, path)
-
else:
raise DistutilsPlatformError("nothing known about platform '%s'" % os.name)
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 3af05fb..16a732d 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -62,6 +62,7 @@ __all__ = [
'REPORT_NDIFF',
'REPORT_ONLY_FIRST_FAILURE',
'REPORTING_FLAGS',
+ 'FAIL_FAST',
# 1. Utility Functions
# 2. Example & DocTest
'Example',
@@ -150,11 +151,13 @@ REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
+FAIL_FAST = register_optionflag('FAIL_FAST')
REPORTING_FLAGS = (REPORT_UDIFF |
REPORT_CDIFF |
REPORT_NDIFF |
- REPORT_ONLY_FIRST_FAILURE)
+ REPORT_ONLY_FIRST_FAILURE |
+ FAIL_FAST)
# Special string markers for use in `want` strings:
BLANKLINE_MARKER = '<BLANKLINE>'
@@ -1342,6 +1345,9 @@ class DocTestRunner:
else:
assert False, ("unknown outcome", outcome)
+ if failures and self.optionflags & FAIL_FAST:
+ break
+
# Restore the option flags (in case they were modified)
self.optionflags = original_optionflags
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
index 1924ed1..b596462 100644
--- a/Lib/email/_header_value_parser.py
+++ b/Lib/email/_header_value_parser.py
@@ -367,8 +367,7 @@ class TokenList(list):
yield (indent + ' !! invalid element in token '
'list: {!r}'.format(token))
else:
- for line in token._pp(indent+' '):
- yield line
+ yield from token._pp(indent+' ')
if self.defects:
extra = ' Defects: {}'.format(self.defects)
else:
diff --git a/Lib/email/iterators.py b/Lib/email/iterators.py
index 3adc4a0..b5502ee 100644
--- a/Lib/email/iterators.py
+++ b/Lib/email/iterators.py
@@ -26,8 +26,7 @@ def walk(self):
yield self
if self.is_multipart():
for subpart in self.get_payload():
- for subsubpart in subpart.walk():
- yield subsubpart
+ yield from subpart.walk()
@@ -40,8 +39,7 @@ def body_line_iterator(msg, decode=False):
for subpart in msg.walk():
payload = subpart.get_payload(decode=decode)
if isinstance(payload, str):
- for line in StringIO(payload):
- yield line
+ yield from StringIO(payload)
def typed_subpart_iterator(msg, maintype='text', subtype=None):
diff --git a/Lib/filecmp.py b/Lib/filecmp.py
index f5cea1d..5bbd0e8 100644
--- a/Lib/filecmp.py
+++ b/Lib/filecmp.py
@@ -147,12 +147,12 @@ class dircmp:
ok = 1
try:
a_stat = os.stat(a_path)
- except os.error as why:
+ except OSError as why:
# print('Can\'t stat', a_path, ':', why.args[1])
ok = 0
try:
b_stat = os.stat(b_path)
- except os.error as why:
+ except OSError as why:
# print('Can\'t stat', b_path, ':', why.args[1])
ok = 0
@@ -268,7 +268,7 @@ def cmpfiles(a, b, common, shallow=True):
def _cmp(a, b, sh, abs=abs, cmp=cmp):
try:
return not abs(cmp(a, b, sh))
- except os.error:
+ except OSError:
return 2
diff --git a/Lib/fractions.py b/Lib/fractions.py
index 8be52d2..79e83ff 100644
--- a/Lib/fractions.py
+++ b/Lib/fractions.py
@@ -182,8 +182,10 @@ class Fraction(numbers.Rational):
elif not isinstance(f, float):
raise TypeError("%s.from_float() only takes floats, not %r (%s)" %
(cls.__name__, f, type(f).__name__))
- if math.isnan(f) or math.isinf(f):
- raise TypeError("Cannot convert %r to %s." % (f, cls.__name__))
+ if math.isnan(f):
+ raise ValueError("Cannot convert %r to %s." % (f, cls.__name__))
+ if math.isinf(f):
+ raise OverflowError("Cannot convert %r to %s." % (f, cls.__name__))
return cls(*f.as_integer_ratio())
@classmethod
@@ -196,9 +198,11 @@ class Fraction(numbers.Rational):
raise TypeError(
"%s.from_decimal() only takes Decimals, not %r (%s)" %
(cls.__name__, dec, type(dec).__name__))
- if not dec.is_finite():
- # Catches infinities and nans.
- raise TypeError("Cannot convert %s to %s." % (dec, cls.__name__))
+ if dec.is_infinite():
+ raise OverflowError(
+ "Cannot convert %s to %s." % (dec, cls.__name__))
+ if dec.is_nan():
+ raise ValueError("Cannot convert %s to %s." % (dec, cls.__name__))
sign, digits, exp = dec.as_tuple()
digits = int(''.join(map(str, digits)))
if sign:
diff --git a/Lib/functools.py b/Lib/functools.py
index 226a46e..60d2cf1 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -11,7 +11,10 @@
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
-from _functools import partial, reduce
+try:
+ from _functools import reduce
+except ImportError:
+ pass
from collections import namedtuple
try:
from _thread import allocate_lock as Lock
@@ -137,6 +140,29 @@ except ImportError:
################################################################################
+### partial() argument application
+################################################################################
+
+def partial(func, *args, **keywords):
+ """new function with partial application of the given arguments
+ and keywords.
+ """
+ def newfunc(*fargs, **fkeywords):
+ newkeywords = keywords.copy()
+ newkeywords.update(fkeywords)
+ return func(*(args + fargs), **newkeywords)
+ newfunc.func = func
+ newfunc.args = args
+ newfunc.keywords = keywords
+ return newfunc
+
+try:
+ from _functools import partial
+except ImportError:
+ pass
+
+
+################################################################################
### LRU Cache function decorator
################################################################################
diff --git a/Lib/getpass.py b/Lib/getpass.py
index 0044742..53689e9 100644
--- a/Lib/getpass.py
+++ b/Lib/getpass.py
@@ -47,7 +47,7 @@ def unix_getpass(prompt='Password: ', stream=None):
input = tty
if not stream:
stream = tty
- except EnvironmentError as e:
+ except OSError as e:
# If that fails, see if stdin can be controlled.
try:
fd = sys.stdin.fileno()
diff --git a/Lib/glob.py b/Lib/glob.py
index 58888d6..3c9c30c 100644
--- a/Lib/glob.py
+++ b/Lib/glob.py
@@ -26,8 +26,7 @@ def iglob(pathname):
return
dirname, basename = os.path.split(pathname)
if not dirname:
- for name in glob1(None, basename):
- yield name
+ yield from glob1(None, basename)
return
# `os.path.split()` returns the argument itself as a dirname if it is a
# drive or UNC path. Prevent an infinite recursion if a drive or UNC path
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
index 21454c7..a1bd8b2 100644
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -54,7 +54,8 @@ More condensed:
# This tuple and __get_builtin_constructor() must be modified if a new
# always available algorithm is added.
-__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
+__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
+ 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512')
algorithms_guaranteed = set(__always_supported)
algorithms_available = set(__always_supported)
@@ -85,6 +86,18 @@ def __get_builtin_constructor(name):
return _sha512.sha512
elif bs == '384':
return _sha512.sha384
+ elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
+ 'SHA3_224', 'SHA3_256', 'SHA3_384', 'SHA3_512'}:
+ import _sha3
+ bs = name[5:]
+ if bs == '224':
+ return _sha3.sha3_224
+ elif bs == '256':
+ return _sha3.sha3_256
+ elif bs == '384':
+ return _sha3.sha3_384
+ elif bs == '512':
+ return _sha3.sha3_512
except ImportError:
pass # no extension module, this hash is unsupported.
diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py
index 901e762..a77dc3f 100644
--- a/Lib/http/cookiejar.py
+++ b/Lib/http/cookiejar.py
@@ -1193,8 +1193,7 @@ def deepvalues(mapping):
pass
else:
mapping = True
- for subobj in deepvalues(obj):
- yield subobj
+ yield from deepvalues(obj)
if not mapping:
yield obj
diff --git a/Lib/http/server.py b/Lib/http/server.py
index c4ac703..7167142 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -425,12 +425,14 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
content = (self.error_message_format %
{'code': code, 'message': _quote_html(message), 'explain': explain})
+ body = content.encode('UTF-8', 'replace')
self.send_response(code, message)
self.send_header("Content-Type", self.error_content_type)
self.send_header('Connection', 'close')
+ self.send_header('Content-Length', int(len(body)))
self.end_headers()
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
- self.wfile.write(content.encode('UTF-8', 'replace'))
+ self.wfile.write(body)
def send_response(self, code, message=None):
"""Add the response header to the headers buffer and log the
diff --git a/Lib/idlelib/FileList.py b/Lib/idlelib/FileList.py
index 37a337e..a9989a8 100644
--- a/Lib/idlelib/FileList.py
+++ b/Lib/idlelib/FileList.py
@@ -103,7 +103,7 @@ class FileList:
if not os.path.isabs(filename):
try:
pwd = os.getcwd()
- except os.error:
+ except OSError:
pass
else:
filename = os.path.join(pwd, filename)
diff --git a/Lib/idlelib/GrepDialog.py b/Lib/idlelib/GrepDialog.py
index 27fcc33..1450f7a 100644
--- a/Lib/idlelib/GrepDialog.py
+++ b/Lib/idlelib/GrepDialog.py
@@ -110,7 +110,7 @@ class GrepDialog(SearchDialogBase):
def findfiles(self, dir, base, rec):
try:
names = os.listdir(dir or os.curdir)
- except os.error as msg:
+ except OSError as msg:
print(msg)
return []
list = []
diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py
index ec50eb2..62c62b0 100644
--- a/Lib/idlelib/IOBinding.py
+++ b/Lib/idlelib/IOBinding.py
@@ -503,7 +503,7 @@ class IOBinding:
else:
try:
pwd = os.getcwd()
- except os.error:
+ except OSError:
pwd = ""
return pwd, ""
diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt
index f6e8917d..d193b0a 100644
--- a/Lib/idlelib/NEWS.txt
+++ b/Lib/idlelib/NEWS.txt
@@ -1,4 +1,4 @@
-What's New in IDLE 3.3.1?
+What's New in IDLE 3.4.0?
=========================
- Issue #16226: Fix IDLE Path Browser crash.
diff --git a/Lib/idlelib/PathBrowser.py b/Lib/idlelib/PathBrowser.py
index 55bf1aa..ab05c67 100644
--- a/Lib/idlelib/PathBrowser.py
+++ b/Lib/idlelib/PathBrowser.py
@@ -45,7 +45,7 @@ class DirBrowserTreeItem(TreeItem):
def GetSubList(self):
try:
names = os.listdir(self.dir or os.curdir)
- except os.error:
+ except OSError:
return []
packages = []
for name in names:
diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py
index 3b78f38..bff52a9 100644
--- a/Lib/idlelib/PyShell.py
+++ b/Lib/idlelib/PyShell.py
@@ -1014,7 +1014,10 @@ class PyShell(OutputWindow):
self.close()
return False
else:
- nosub = "==== No Subprocess ===="
+ nosub = ("==== No Subprocess ====\n\n" +
+ "WARNING: Running IDLE without a Subprocess is deprecated\n" +
+ "and will be removed in a later version. See Help/IDLE Help\n" +
+ "for details.\n\n")
sys.displayhook = rpc.displayhook
self.write("Python %s on %s\n%s\n%s" %
@@ -1314,7 +1317,8 @@ USAGE: idle [-deins] [-t title] [file]*
idle [-dns] [-t title] - [arg]*
-h print this help message and exit
- -n run IDLE without a subprocess (see Help/IDLE Help for details)
+ -n run IDLE without a subprocess (DEPRECATED,
+ see Help/IDLE Help for details)
The following options will override the IDLE 'settings' configuration:
@@ -1392,6 +1396,8 @@ def main():
if o == '-i':
enable_shell = True
if o == '-n':
+ print(" Warning: running IDLE without a subprocess is deprecated.",
+ file=sys.stderr)
use_subprocess = False
if o == '-r':
script = a
diff --git a/Lib/idlelib/TreeWidget.py b/Lib/idlelib/TreeWidget.py
index d4e524b..833896c 100644
--- a/Lib/idlelib/TreeWidget.py
+++ b/Lib/idlelib/TreeWidget.py
@@ -382,7 +382,7 @@ class FileTreeItem(TreeItem):
try:
os.rename(self.path, newpath)
self.path = newpath
- except os.error:
+ except OSError:
pass
def GetIconName(self):
@@ -395,7 +395,7 @@ class FileTreeItem(TreeItem):
def GetSubList(self):
try:
names = os.listdir(self.path)
- except os.error:
+ except OSError:
return []
names.sort(key = os.path.normcase)
sublist = []
diff --git a/Lib/idlelib/help.txt b/Lib/idlelib/help.txt
index 815ee40..ef90a3a 100644
--- a/Lib/idlelib/help.txt
+++ b/Lib/idlelib/help.txt
@@ -274,7 +274,7 @@ Command line usage:
Enter idle -h at the command prompt to get a usage message.
-Running without a subprocess:
+Running without a subprocess: (DEPRECATED)
If IDLE is started with the -n command line switch it will run in a
single process and will not create the subprocess which runs the RPC
diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py
index 8d8317d..efe96a4 100644
--- a/Lib/idlelib/idlever.py
+++ b/Lib/idlelib/idlever.py
@@ -1 +1 @@
-IDLE_VERSION = "3.3.0"
+IDLE_VERSION = "3.4.0a0"
diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py
index a18ccc4..0c6252e 100644
--- a/Lib/importlib/_bootstrap.py
+++ b/Lib/importlib/_bootstrap.py
@@ -237,7 +237,7 @@ class _ModuleLock:
self.wakeup.release()
def __repr__(self):
- return "_ModuleLock(%r) at %d" % (self.name, id(self))
+ return "_ModuleLock({!r}) at {}".format(self.name, id(self))
class _DummyModuleLock:
@@ -258,7 +258,7 @@ class _DummyModuleLock:
self.count -= 1
def __repr__(self):
- return "_DummyModuleLock(%r) at %d" % (self.name, id(self))
+ return "_DummyModuleLock({!r}) at {}".format(self.name, id(self))
# The following two functions are for consumption by Python/import.c.
@@ -931,6 +931,14 @@ class SourceLoader(_LoaderBasics):
raise ImportError("Failed to decode source file",
name=fullname) from exc
+ def source_to_code(self, data, path):
+ """Return the code object compiled from source.
+
+ The 'data' argument can be any object type that compile() supports.
+ """
+ return _call_with_frames_removed(compile, data, path, 'exec',
+ dont_inherit=True)
+
def get_code(self, fullname):
"""Concrete implementation of InspectLoader.get_code.
@@ -976,9 +984,7 @@ class SourceLoader(_LoaderBasics):
raise ImportError(msg.format(bytecode_path),
name=fullname, path=bytecode_path)
source_bytes = self.get_data(source_path)
- code_object = _call_with_frames_removed(compile,
- source_bytes, source_path, 'exec',
- dont_inherit=True)
+ code_object = self.source_to_code(source_bytes, source_path)
_verbose_message('code object from {}', source_path)
if (not sys.dont_write_bytecode and bytecode_path is not None and
source_mtime is not None):
@@ -1439,7 +1445,7 @@ class FileFinder:
return path_hook_for_FileFinder
def __repr__(self):
- return "FileFinder(%r)" % (self.path,)
+ return "FileFinder({!r})".format(self.path)
# Import itself ###############################################################
@@ -1714,7 +1720,7 @@ def _setup(sys_module, _imp_module):
builtin_module = sys.modules[builtin_name]
setattr(self_module, builtin_name, builtin_module)
- os_details = ('posix', ['/']), ('nt', ['\\', '/']), ('os2', ['\\', '/'])
+ os_details = ('posix', ['/']), ('nt', ['\\', '/'])
for builtin_os, path_separators in os_details:
# Assumption made in _path_join()
assert all(len(sep) == 1 for sep in path_separators)
@@ -1725,9 +1731,6 @@ def _setup(sys_module, _imp_module):
else:
try:
os_module = BuiltinImporter.load_module(builtin_os)
- # TODO: rip out os2 code after 3.3 is released as per PEP 11
- if builtin_os == 'os2' and 'EMX GCC' in sys.version:
- path_sep = path_separators[1]
break
except ImportError:
continue
diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py
index 387567a..11e45f4 100644
--- a/Lib/importlib/abc.py
+++ b/Lib/importlib/abc.py
@@ -225,180 +225,3 @@ class SourceLoader(_bootstrap.SourceLoader, ResourceLoader, ExecutionLoader):
raise NotImplementedError
_register(SourceLoader, machinery.SourceFileLoader)
-
-class PyLoader(SourceLoader):
-
- """Implement the deprecated PyLoader ABC in terms of SourceLoader.
-
- This class has been deprecated! It is slated for removal in Python 3.4.
- If compatibility with Python 3.1 is not needed then implement the
- SourceLoader ABC instead of this class. If Python 3.1 compatibility is
- needed, then use the following idiom to have a single class that is
- compatible with Python 3.1 onwards::
-
- try:
- from importlib.abc import SourceLoader
- except ImportError:
- from importlib.abc import PyLoader as SourceLoader
-
-
- class CustomLoader(SourceLoader):
- def get_filename(self, fullname):
- # Implement ...
-
- def source_path(self, fullname):
- '''Implement source_path in terms of get_filename.'''
- try:
- return self.get_filename(fullname)
- except ImportError:
- return None
-
- def is_package(self, fullname):
- filename = os.path.basename(self.get_filename(fullname))
- return os.path.splitext(filename)[0] == '__init__'
-
- """
-
- @abc.abstractmethod
- def is_package(self, fullname):
- raise NotImplementedError
-
- @abc.abstractmethod
- def source_path(self, fullname):
- """Abstract method. Accepts a str module name and returns the path to
- the source code for the module."""
- raise NotImplementedError
-
- def get_filename(self, fullname):
- """Implement get_filename in terms of source_path.
-
- As get_filename should only return a source file path there is no
- chance of the path not existing but loading still being possible, so
- ImportError should propagate instead of being turned into returning
- None.
-
- """
- warnings.warn("importlib.abc.PyLoader is deprecated and is "
- "slated for removal in Python 3.4; "
- "use SourceLoader instead. "
- "See the importlib documentation on how to be "
- "compatible with Python 3.1 onwards.",
- DeprecationWarning)
- path = self.source_path(fullname)
- if path is None:
- raise ImportError(name=fullname)
- else:
- return path
-
-
-class PyPycLoader(PyLoader):
-
- """Abstract base class to assist in loading source and bytecode by
- requiring only back-end storage methods to be implemented.
-
- This class has been deprecated! Removal is slated for Python 3.4. Implement
- the SourceLoader ABC instead. If Python 3.1 compatibility is needed, see
- PyLoader.
-
- The methods get_code, get_source, and load_module are implemented for the
- user.
-
- """
-
- def get_filename(self, fullname):
- """Return the source or bytecode file path."""
- path = self.source_path(fullname)
- if path is not None:
- return path
- path = self.bytecode_path(fullname)
- if path is not None:
- return path
- raise ImportError("no source or bytecode path available for "
- "{0!r}".format(fullname), name=fullname)
-
- def get_code(self, fullname):
- """Get a code object from source or bytecode."""
- warnings.warn("importlib.abc.PyPycLoader is deprecated and slated for "
- "removal in Python 3.4; use SourceLoader instead. "
- "If Python 3.1 compatibility is required, see the "
- "latest documentation for PyLoader.",
- DeprecationWarning)
- source_timestamp = self.source_mtime(fullname)
- # Try to use bytecode if it is available.
- bytecode_path = self.bytecode_path(fullname)
- if bytecode_path:
- data = self.get_data(bytecode_path)
- try:
- magic = data[:4]
- if len(magic) < 4:
- raise ImportError(
- "bad magic number in {}".format(fullname),
- name=fullname, path=bytecode_path)
- raw_timestamp = data[4:8]
- if len(raw_timestamp) < 4:
- raise EOFError("bad timestamp in {}".format(fullname))
- pyc_timestamp = _bootstrap._r_long(raw_timestamp)
- raw_source_size = data[8:12]
- if len(raw_source_size) != 4:
- raise EOFError("bad file size in {}".format(fullname))
- # Source size is unused as the ABC does not provide a way to
- # get the size of the source ahead of reading it.
- bytecode = data[12:]
- # Verify that the magic number is valid.
- if imp.get_magic() != magic:
- raise ImportError(
- "bad magic number in {}".format(fullname),
- name=fullname, path=bytecode_path)
- # Verify that the bytecode is not stale (only matters when
- # there is source to fall back on.
- if source_timestamp:
- if pyc_timestamp < source_timestamp:
- raise ImportError("bytecode is stale", name=fullname,
- path=bytecode_path)
- except (ImportError, EOFError):
- # If source is available give it a shot.
- if source_timestamp is not None:
- pass
- else:
- raise
- else:
- # Bytecode seems fine, so try to use it.
- return marshal.loads(bytecode)
- elif source_timestamp is None:
- raise ImportError("no source or bytecode available to create code "
- "object for {0!r}".format(fullname),
- name=fullname)
- # Use the source.
- source_path = self.source_path(fullname)
- if source_path is None:
- message = "a source path must exist to load {0}".format(fullname)
- raise ImportError(message, name=fullname)
- source = self.get_data(source_path)
- code_object = compile(source, source_path, 'exec', dont_inherit=True)
- # Generate bytecode and write it out.
- if not sys.dont_write_bytecode:
- data = bytearray(imp.get_magic())
- data.extend(_bootstrap._w_long(source_timestamp))
- data.extend(_bootstrap._w_long(len(source) & 0xFFFFFFFF))
- data.extend(marshal.dumps(code_object))
- self.write_bytecode(fullname, data)
- return code_object
-
- @abc.abstractmethod
- def source_mtime(self, fullname):
- """Abstract method. Accepts a str filename and returns an int
- modification time for the source of the module."""
- raise NotImplementedError
-
- @abc.abstractmethod
- def bytecode_path(self, fullname):
- """Abstract method. Accepts a str filename and returns the str pathname
- to the bytecode for the module."""
- raise NotImplementedError
-
- @abc.abstractmethod
- def write_bytecode(self, fullname, bytecode):
- """Abstract method. Accepts a str filename and bytes object
- representing the bytecode for the module. Returns a boolean
- representing whether the bytecode was written or not."""
- raise NotImplementedError
diff --git a/Lib/json/__init__.py b/Lib/json/__init__.py
index 44f49c4..3b23224 100644
--- a/Lib/json/__init__.py
+++ b/Lib/json/__init__.py
@@ -39,8 +39,7 @@ Compact encoding::
Pretty printing::
>>> import json
- >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True,
- ... indent=4, separators=(',', ': ')))
+ >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
{
"4": 5,
"6": 7
@@ -146,13 +145,12 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
- representation. Since the default item separator is ``', '``, the
- output might include trailing whitespace when ``indent`` is specified.
- You can use ``separators=(',', ': ')`` to avoid this.
+ representation.
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
+ If specified, ``separators`` should be an ``(item_separator, key_separator)``
+ tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
+ ``(',', ': ')`` otherwise. To get the most compact JSON representation,
+ you should specify ``(',', ':')`` to eliminate whitespace.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
@@ -209,13 +207,12 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
- representation. Since the default item separator is ``', '``, the
- output might include trailing whitespace when ``indent`` is specified.
- You can use ``separators=(',', ': ')`` to avoid this.
+ representation.
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
+ If specified, ``separators`` should be an ``(item_separator, key_separator)``
+ tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
+ ``(',', ': ')`` otherwise. To get the most compact JSON representation,
+ you should specify ``(',', ':')`` to eliminate whitespace.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
diff --git a/Lib/json/decoder.py b/Lib/json/decoder.py
index 07fd696..9b7438c 100644
--- a/Lib/json/decoder.py
+++ b/Lib/json/decoder.py
@@ -1,9 +1,6 @@
"""Implementation of JSONDecoder
"""
-import binascii
import re
-import sys
-import struct
from json import scanner
try:
@@ -15,14 +12,9 @@ __all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
-def _floatconstants():
- _BYTES = binascii.unhexlify(b'7FF80000000000007FF0000000000000')
- if sys.byteorder != 'big':
- _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
- nan, inf = struct.unpack('dd', _BYTES)
- return nan, inf, -inf
-
-NaN, PosInf, NegInf = _floatconstants()
+NaN = float('nan')
+PosInf = float('inf')
+NegInf = float('-inf')
def linecol(doc, pos):
diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py
index e1ed21f..93b5ea7 100644
--- a/Lib/json/encoder.py
+++ b/Lib/json/encoder.py
@@ -125,14 +125,12 @@ class JSONEncoder(object):
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
- None is the most compact representation. Since the default
- item separator is ', ', the output might include trailing
- whitespace when indent is specified. You can use
- separators=(',', ': ') to avoid this.
+ None is the most compact representation.
- If specified, separators should be a (item_separator, key_separator)
- tuple. The default is (', ', ': '). To get the most compact JSON
- representation you should specify (',', ':') to eliminate whitespace.
+ If specified, separators should be an (item_separator, key_separator)
+ tuple. The default is (', ', ': ') if *indent* is ``None`` and
+ (',', ': ') otherwise. To get the most compact JSON representation,
+ you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
@@ -148,6 +146,8 @@ class JSONEncoder(object):
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
+ elif indent is not None:
+ self.item_separator = ','
if default is not None:
self.default = default
@@ -308,8 +308,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
+ yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
@@ -384,8 +383,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
+ yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
@@ -407,11 +405,9 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
- for chunk in _iterencode_list(o, _current_indent_level):
- yield chunk
+ yield from _iterencode_list(o, _current_indent_level)
elif isinstance(o, dict):
- for chunk in _iterencode_dict(o, _current_indent_level):
- yield chunk
+ yield from _iterencode_dict(o, _current_indent_level)
else:
if markers is not None:
markerid = id(o)
@@ -419,8 +415,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
- for chunk in _iterencode(o, _current_indent_level):
- yield chunk
+ yield from _iterencode(o, _current_indent_level)
if markers is not None:
del markers[markerid]
return _iterencode
diff --git a/Lib/json/tool.py b/Lib/json/tool.py
index 0f108c6..9ab6d65 100644
--- a/Lib/json/tool.py
+++ b/Lib/json/tool.py
@@ -31,8 +31,7 @@ def main():
except ValueError as e:
raise SystemExit(e)
with outfile:
- json.dump(obj, outfile, sort_keys=True,
- indent=4, separators=(',', ': '))
+ json.dump(obj, outfile, sort_keys=True, indent=4)
outfile.write('\n')
diff --git a/Lib/lib2to3/btm_utils.py b/Lib/lib2to3/btm_utils.py
index 2276dc9..339750e 100644
--- a/Lib/lib2to3/btm_utils.py
+++ b/Lib/lib2to3/btm_utils.py
@@ -96,8 +96,7 @@ class MinNode(object):
def leaves(self):
"Generator that returns the leaves of the tree"
for child in self.children:
- for x in child.leaves():
- yield x
+ yield from child.leaves()
if not self.children:
yield self
@@ -277,7 +276,6 @@ def rec_test(sequence, test_func):
sub-iterables"""
for x in sequence:
if isinstance(x, (list, tuple)):
- for y in rec_test(x, test_func):
- yield y
+ yield from rec_test(x, test_func)
else:
yield test_func(x)
diff --git a/Lib/lib2to3/fixer_util.py b/Lib/lib2to3/fixer_util.py
index 60d219f..6e259c5 100644
--- a/Lib/lib2to3/fixer_util.py
+++ b/Lib/lib2to3/fixer_util.py
@@ -129,6 +129,29 @@ def FromImport(package_name, name_leafs):
imp = Node(syms.import_from, children)
return imp
+def ImportAndCall(node, results, names):
+ """Returns an import statement and calls a method
+ of the module:
+
+ import module
+ module.name()"""
+ obj = results["obj"].clone()
+ if obj.type == syms.arglist:
+ newarglist = obj.clone()
+ else:
+ newarglist = Node(syms.arglist, [obj.clone()])
+ after = results["after"]
+ if after:
+ after = [n.clone() for n in after]
+ new = Node(syms.power,
+ Attr(Name(names[0]), Name(names[1])) +
+ [Node(syms.trailer,
+ [results["lpar"].clone(),
+ newarglist,
+ results["rpar"].clone()])] + after)
+ new.prefix = node.prefix
+ return new
+
###########################################################
### Determine whether a node represents a given literal
diff --git a/Lib/lib2to3/fixes/fix_intern.py b/Lib/lib2to3/fixes/fix_intern.py
index 6be11cd..fb2973c 100644
--- a/Lib/lib2to3/fixes/fix_intern.py
+++ b/Lib/lib2to3/fixes/fix_intern.py
@@ -6,9 +6,8 @@
intern(s) -> sys.intern(s)"""
# Local imports
-from .. import pytree
from .. import fixer_base
-from ..fixer_util import Name, Attr, touch_import
+from ..fixer_util import ImportAndCall, touch_import
class FixIntern(fixer_base.BaseFix):
@@ -26,21 +25,7 @@ class FixIntern(fixer_base.BaseFix):
"""
def transform(self, node, results):
- syms = self.syms
- obj = results["obj"].clone()
- if obj.type == syms.arglist:
- newarglist = obj.clone()
- else:
- newarglist = pytree.Node(syms.arglist, [obj.clone()])
- after = results["after"]
- if after:
- after = [n.clone() for n in after]
- new = pytree.Node(syms.power,
- Attr(Name("sys"), Name("intern")) +
- [pytree.Node(syms.trailer,
- [results["lpar"].clone(),
- newarglist,
- results["rpar"].clone()])] + after)
- new.prefix = node.prefix
+ names = ('sys', 'intern')
+ new = ImportAndCall(node, results, names)
touch_import(None, 'sys', node)
return new
diff --git a/Lib/lib2to3/fixes/fix_reload.py b/Lib/lib2to3/fixes/fix_reload.py
new file mode 100644
index 0000000..1855357
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_reload.py
@@ -0,0 +1,28 @@
+"""Fixer for reload().
+
+reload(s) -> imp.reload(s)"""
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import ImportAndCall, touch_import
+
+
+class FixReload(fixer_base.BaseFix):
+ BM_compatible = True
+ order = "pre"
+
+ PATTERN = """
+ power< 'reload'
+ trailer< lpar='('
+ ( not(arglist | argument<any '=' any>) obj=any
+ | obj=arglist<(not argument<any '=' any>) any ','> )
+ rpar=')' >
+ after=any*
+ >
+ """
+
+ def transform(self, node, results):
+ names = ('imp', 'reload')
+ new = ImportAndCall(node, results, names)
+ touch_import(None, 'imp', node)
+ return new
diff --git a/Lib/lib2to3/pytree.py b/Lib/lib2to3/pytree.py
index 17cbf0a..c4a1be3 100644
--- a/Lib/lib2to3/pytree.py
+++ b/Lib/lib2to3/pytree.py
@@ -194,8 +194,7 @@ class Base(object):
def leaves(self):
for child in self.children:
- for x in child.leaves():
- yield x
+ yield from child.leaves()
def depth(self):
if self.parent is None:
@@ -274,16 +273,14 @@ class Node(Base):
def post_order(self):
"""Return a post-order iterator for the tree."""
for child in self.children:
- for node in child.post_order():
- yield node
+ yield from child.post_order()
yield self
def pre_order(self):
"""Return a pre-order iterator for the tree."""
yield self
for child in self.children:
- for node in child.pre_order():
- yield node
+ yield from child.pre_order()
def _prefix_getter(self):
"""
diff --git a/Lib/lib2to3/tests/test_fixers.py b/Lib/lib2to3/tests/test_fixers.py
index 914b3bf..d7659fa 100644
--- a/Lib/lib2to3/tests/test_fixers.py
+++ b/Lib/lib2to3/tests/test_fixers.py
@@ -282,6 +282,65 @@ class Test_apply(FixerTestCase):
b = """f(*args, **kwds)"""
self.check(a, b)
+class Test_reload(FixerTestCase):
+ fixer = "reload"
+
+ def test(self):
+ b = """reload(a)"""
+ a = """import imp\nimp.reload(a)"""
+ self.check(b, a)
+
+ def test_comment(self):
+ b = """reload( a ) # comment"""
+ a = """import imp\nimp.reload( a ) # comment"""
+ self.check(b, a)
+
+ # PEP 8 comments
+ b = """reload( a ) # comment"""
+ a = """import imp\nimp.reload( a ) # comment"""
+ self.check(b, a)
+
+ def test_space(self):
+ b = """reload( a )"""
+ a = """import imp\nimp.reload( a )"""
+ self.check(b, a)
+
+ b = """reload( a)"""
+ a = """import imp\nimp.reload( a)"""
+ self.check(b, a)
+
+ b = """reload(a )"""
+ a = """import imp\nimp.reload(a )"""
+ self.check(b, a)
+
+ def test_unchanged(self):
+ s = """reload(a=1)"""
+ self.unchanged(s)
+
+ s = """reload(f, g)"""
+ self.unchanged(s)
+
+ s = """reload(f, *h)"""
+ self.unchanged(s)
+
+ s = """reload(f, *h, **i)"""
+ self.unchanged(s)
+
+ s = """reload(f, **i)"""
+ self.unchanged(s)
+
+ s = """reload(*h, **i)"""
+ self.unchanged(s)
+
+ s = """reload(*h)"""
+ self.unchanged(s)
+
+ s = """reload(**i)"""
+ self.unchanged(s)
+
+ s = """reload()"""
+ self.unchanged(s)
+
class Test_intern(FixerTestCase):
fixer = "intern"
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
index e79018f..9242023 100644
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -67,7 +67,7 @@ else: #pragma: no cover
"""Return the frame object for the caller's stack frame."""
try:
raise Exception
- except:
+ except Exception:
return sys.exc_info()[2].tb_frame.f_back
# _srcfile is only used in conjunction with sys._getframe().
@@ -879,16 +879,27 @@ class Handler(Filterer):
The record which was being processed is passed in to this method.
"""
if raiseExceptions and sys.stderr: # see issue 13807
- ei = sys.exc_info()
+ t, v, tb = sys.exc_info()
try:
- traceback.print_exception(ei[0], ei[1], ei[2],
- None, sys.stderr)
- sys.stderr.write('Logged from file %s, line %s\n' % (
- record.filename, record.lineno))
+ sys.stderr.write('--- Logging error ---\n')
+ traceback.print_exception(t, v, tb, None, sys.stderr)
+ sys.stderr.write('Call stack:\n')
+ # Walk the stack frame up until we're out of logging,
+ # so as to print the calling context.
+ frame = tb.tb_frame
+ while (frame and os.path.dirname(frame.f_code.co_filename) ==
+ __path__[0]):
+ frame = frame.f_back
+ if frame:
+ traceback.print_stack(frame, file=sys.stderr)
+ else:
+ # couldn't find the right stack frame, for some reason
+ sys.stderr.write('Logged from file %s, line %s\n' % (
+ record.filename, record.lineno))
except IOError: #pragma: no cover
pass # see issue 5971
finally:
- del ei
+ del t, v, tb
class StreamHandler(Handler):
"""
@@ -938,9 +949,7 @@ class StreamHandler(Handler):
stream.write(msg)
stream.write(self.terminator)
self.flush()
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class FileHandler(StreamHandler):
@@ -1837,7 +1846,7 @@ def shutdown(handlerList=_handlerList):
pass
finally:
h.release()
- except:
+ except: # ignore everything, as we're shutting down
if raiseExceptions:
raise
#else, swallow
diff --git a/Lib/logging/config.py b/Lib/logging/config.py
index 5ef5c91..ef7d2bc 100644
--- a/Lib/logging/config.py
+++ b/Lib/logging/config.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2012 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
@@ -19,7 +19,7 @@ Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
-Copyright (C) 2001-2010 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2012 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
@@ -61,11 +61,14 @@ def fileConfig(fname, defaults=None, disable_existing_loggers=True):
"""
import configparser
- cp = configparser.ConfigParser(defaults)
- if hasattr(fname, 'readline'):
- cp.read_file(fname)
+ if isinstance(fname, configparser.RawConfigParser):
+ cp = fname
else:
- cp.read(fname)
+ cp = configparser.ConfigParser(defaults)
+ if hasattr(fname, 'readline'):
+ cp.read_file(fname)
+ else:
+ cp.read(fname)
formatters = _create_formatters(cp)
@@ -707,6 +710,7 @@ class DictConfigurator(BaseConfigurator):
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
+ props = config.pop('.', None)
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
try:
result = factory(**kwargs)
@@ -725,6 +729,9 @@ class DictConfigurator(BaseConfigurator):
result.setLevel(logging._checkLevel(level))
if filters:
self.add_filters(result, filters)
+ if props:
+ for name, value in props.items():
+ setattr(result, name, value)
return result
def add_handlers(self, logger, handlers):
@@ -773,7 +780,7 @@ def dictConfig(config):
dictConfigClass(config).configure()
-def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
+def listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None):
"""
Start up a socket server on the specified port, and listen for new
configurations.
@@ -782,6 +789,15 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
+
+ Use the ``verify`` argument to verify any bytes received across the wire
+ from a client. If specified, it should be a callable which receives a
+ single argument - the bytes of configuration data received across the
+ network - and it should return either ``None``, to indicate that the
+ passed in bytes could not be verified and should be discarded, or a
+ byte string which is then passed to the configuration machinery as
+ normal. Note that you can return transformed bytes, e.g. by decrypting
+ the bytes passed in.
"""
if not thread: #pragma: no cover
raise NotImplementedError("listen() needs threading to work")
@@ -809,22 +825,23 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
- chunk = chunk.decode("utf-8")
- try:
- import json
- d =json.loads(chunk)
- assert isinstance(d, dict)
- dictConfig(d)
- except:
- #Apply new configuration.
-
- file = io.StringIO(chunk)
+ if self.server.verify is not None:
+ chunk = self.server.verify(chunk)
+ if chunk is not None: # verified, can process
+ chunk = chunk.decode("utf-8")
try:
- fileConfig(file)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
- traceback.print_exc()
+ import json
+ d =json.loads(chunk)
+ assert isinstance(d, dict)
+ dictConfig(d)
+ except Exception:
+ #Apply new configuration.
+
+ file = io.StringIO(chunk)
+ try:
+ fileConfig(file)
+ except Exception:
+ traceback.print_exc()
if self.server.ready:
self.server.ready.set()
except socket.error as e:
@@ -843,13 +860,14 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
- handler=None, ready=None):
+ handler=None, ready=None, verify=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
self.ready = ready
+ self.verify = verify
def serve_until_stopped(self):
import select
@@ -867,16 +885,18 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
class Server(threading.Thread):
- def __init__(self, rcvr, hdlr, port):
+ def __init__(self, rcvr, hdlr, port, verify):
super(Server, self).__init__()
self.rcvr = rcvr
self.hdlr = hdlr
self.port = port
+ self.verify = verify
self.ready = threading.Event()
def run(self):
server = self.rcvr(port=self.port, handler=self.hdlr,
- ready=self.ready)
+ ready=self.ready,
+ verify=self.verify)
if self.port == 0:
self.port = server.server_address[1]
self.ready.set()
@@ -886,7 +906,7 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
logging._releaseLock()
server.serve_until_stopped()
- return Server(ConfigSocketReceiver, ConfigStreamHandler, port)
+ return Server(ConfigSocketReceiver, ConfigStreamHandler, port, verify)
def stopListening():
"""
diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
index f286cd6..4cbc320 100644
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -72,9 +72,7 @@ class BaseRotatingHandler(logging.FileHandler):
if self.shouldRollover(record):
self.doRollover()
logging.FileHandler.emit(self, record)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def rotation_filename(self, default_name):
@@ -496,15 +494,7 @@ class SocketHandler(logging.Handler):
A factory method which allows subclasses to define the precise
type of socket they want.
"""
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- if hasattr(s, 'settimeout'):
- s.settimeout(timeout)
- try:
- s.connect((self.host, self.port))
- return s
- except socket.error:
- s.close()
- raise
+ return socket.create_connection((self.host, self.port), timeout=timeout)
def createSocket(self):
"""
@@ -548,15 +538,7 @@ class SocketHandler(logging.Handler):
#but are still unable to connect.
if self.sock:
try:
- if hasattr(self.sock, "sendall"):
- self.sock.sendall(s)
- else: #pragma: no cover
- sentsofar = 0
- left = len(s)
- while left > 0:
- sent = self.sock.send(s[sentsofar:])
- sentsofar = sentsofar + sent
- left = left - sent
+ self.sock.sendall(s)
except socket.error: #pragma: no cover
self.sock.close()
self.sock = None # so we can call createSocket next time
@@ -607,9 +589,7 @@ class SocketHandler(logging.Handler):
try:
s = self.makePickle(record)
self.send(s)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def close(self):
@@ -869,9 +849,7 @@ class SysLogHandler(logging.Handler):
self.socket.sendto(msg, self.address)
else:
self.socket.sendall(msg)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class SMTPHandler(logging.Handler):
@@ -949,9 +927,7 @@ class SMTPHandler(logging.Handler):
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class NTEventLogHandler(logging.Handler):
@@ -1036,9 +1012,7 @@ class NTEventLogHandler(logging.Handler):
type = self.getEventType(record)
msg = self.format(record)
self._welu.ReportEvent(self.appname, id, cat, type, [msg])
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def close(self):
@@ -1123,9 +1097,7 @@ class HTTPHandler(logging.Handler):
if self.method == "POST":
h.send(data.encode('utf-8'))
h.getresponse() #can't do anything with the result
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class BufferingHandler(logging.Handler):
@@ -1305,9 +1277,7 @@ class QueueHandler(logging.Handler):
"""
try:
self.enqueue(self.prepare(record))
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
if threading:
diff --git a/Lib/lzma.py b/Lib/lzma.py
index 1a1b065..b2e2f7e 100644
--- a/Lib/lzma.py
+++ b/Lib/lzma.py
@@ -55,7 +55,7 @@ class LZMAFile(io.BufferedIOBase):
be an existing file object to read from or write to.
mode can be "r" for reading (default), "w" for (over)writing, or
- "a" for appending. These can equivalently be given as "rb", "wb",
+ "a" for appending. These can equivalently be given as "rb", "wb"
and "ab" respectively.
format specifies the container format to use for the file.
@@ -110,7 +110,8 @@ class LZMAFile(io.BufferedIOBase):
# stream will need a separate decompressor object.
self._init_args = {"format":format, "filters":filters}
self._decompressor = LZMADecompressor(**self._init_args)
- self._buffer = None
+ self._buffer = b""
+ self._buffer_offset = 0
elif mode in ("w", "wb", "a", "ab"):
if format is None:
format = FORMAT_XZ
@@ -143,7 +144,7 @@ class LZMAFile(io.BufferedIOBase):
try:
if self._mode in (_MODE_READ, _MODE_READ_EOF):
self._decompressor = None
- self._buffer = None
+ self._buffer = b""
elif self._mode == _MODE_WRITE:
self._fp.write(self._compressor.flush())
self._compressor = None
@@ -187,15 +188,18 @@ class LZMAFile(io.BufferedIOBase):
raise ValueError("I/O operation on closed file")
def _check_can_read(self):
- if not self.readable():
+ if self._mode not in (_MODE_READ, _MODE_READ_EOF):
+ self._check_not_closed()
raise io.UnsupportedOperation("File not open for reading")
def _check_can_write(self):
- if not self.writable():
+ if self._mode != _MODE_WRITE:
+ self._check_not_closed()
raise io.UnsupportedOperation("File not open for writing")
def _check_can_seek(self):
- if not self.readable():
+ if self._mode not in (_MODE_READ, _MODE_READ_EOF):
+ self._check_not_closed()
raise io.UnsupportedOperation("Seeking is only supported "
"on files open for reading")
if not self._fp.seekable():
@@ -204,16 +208,13 @@ class LZMAFile(io.BufferedIOBase):
# Fill the readahead buffer if it is empty. Returns False on EOF.
def _fill_buffer(self):
+ if self._mode == _MODE_READ_EOF:
+ return False
# Depending on the input data, our call to the decompressor may not
# return any data. In this case, try again after reading another block.
- while True:
- if self._buffer:
- return True
-
- if self._decompressor.unused_data:
- rawblock = self._decompressor.unused_data
- else:
- rawblock = self._fp.read(_BUFFER_SIZE)
+ while self._buffer_offset == len(self._buffer):
+ rawblock = (self._decompressor.unused_data or
+ self._fp.read(_BUFFER_SIZE))
if not rawblock:
if self._decompressor.eof:
@@ -229,30 +230,48 @@ class LZMAFile(io.BufferedIOBase):
self._decompressor = LZMADecompressor(**self._init_args)
self._buffer = self._decompressor.decompress(rawblock)
+ self._buffer_offset = 0
+ return True
# Read data until EOF.
# If return_data is false, consume the data without returning it.
def _read_all(self, return_data=True):
+ # The loop assumes that _buffer_offset is 0. Ensure that this is true.
+ self._buffer = self._buffer[self._buffer_offset:]
+ self._buffer_offset = 0
+
blocks = []
while self._fill_buffer():
if return_data:
blocks.append(self._buffer)
self._pos += len(self._buffer)
- self._buffer = None
+ self._buffer = b""
if return_data:
return b"".join(blocks)
# Read a block of up to n bytes.
# If return_data is false, consume the data without returning it.
def _read_block(self, n, return_data=True):
+ # If we have enough data buffered, return immediately.
+ end = self._buffer_offset + n
+ if end <= len(self._buffer):
+ data = self._buffer[self._buffer_offset : end]
+ self._buffer_offset = end
+ self._pos += len(data)
+ return data if return_data else None
+
+ # The loop assumes that _buffer_offset is 0. Ensure that this is true.
+ self._buffer = self._buffer[self._buffer_offset:]
+ self._buffer_offset = 0
+
blocks = []
while n > 0 and self._fill_buffer():
if n < len(self._buffer):
data = self._buffer[:n]
- self._buffer = self._buffer[n:]
+ self._buffer_offset = n
else:
data = self._buffer
- self._buffer = None
+ self._buffer = b""
if return_data:
blocks.append(data)
self._pos += len(data)
@@ -267,9 +286,9 @@ class LZMAFile(io.BufferedIOBase):
The exact number of bytes returned is unspecified.
"""
self._check_can_read()
- if self._mode == _MODE_READ_EOF or not self._fill_buffer():
+ if not self._fill_buffer():
return b""
- return self._buffer
+ return self._buffer[self._buffer_offset:]
def read(self, size=-1):
"""Read up to size uncompressed bytes from the file.
@@ -278,7 +297,7 @@ class LZMAFile(io.BufferedIOBase):
Returns b"" if the file is already at EOF.
"""
self._check_can_read()
- if self._mode == _MODE_READ_EOF or size == 0:
+ if size == 0:
return b""
elif size < 0:
return self._read_all()
@@ -295,18 +314,40 @@ class LZMAFile(io.BufferedIOBase):
# this does not give enough data for the decompressor to make progress.
# In this case we make multiple reads, to avoid returning b"".
self._check_can_read()
- if (size == 0 or self._mode == _MODE_READ_EOF or
- not self._fill_buffer()):
+ if (size == 0 or
+ # Only call _fill_buffer() if the buffer is actually empty.
+ # This gives a significant speedup if *size* is small.
+ (self._buffer_offset == len(self._buffer) and not self._fill_buffer())):
return b""
- if 0 < size < len(self._buffer):
- data = self._buffer[:size]
- self._buffer = self._buffer[size:]
+ if size > 0:
+ data = self._buffer[self._buffer_offset :
+ self._buffer_offset + size]
+ self._buffer_offset += len(data)
else:
- data = self._buffer
- self._buffer = None
+ data = self._buffer[self._buffer_offset:]
+ self._buffer = b""
+ self._buffer_offset = 0
self._pos += len(data)
return data
+ def readline(self, size=-1):
+ """Read a line of uncompressed bytes from the file.
+
+ The terminating newline (if present) is retained. If size is
+ non-negative, no more than size bytes will be read (in which
+ case the line may be incomplete). Returns b'' if already at EOF.
+ """
+ self._check_can_read()
+ # Shortcut for the common case - the whole line is in the buffer.
+ if size < 0:
+ end = self._buffer.find(b"\n", self._buffer_offset) + 1
+ if end > 0:
+ line = self._buffer[self._buffer_offset : end]
+ self._buffer_offset = end
+ self._pos += len(line)
+ return line
+ return io.BufferedIOBase.readline(self, size)
+
def write(self, data):
"""Write a bytes object to the file.
@@ -326,7 +367,8 @@ class LZMAFile(io.BufferedIOBase):
self._mode = _MODE_READ
self._pos = 0
self._decompressor = LZMADecompressor(**self._init_args)
- self._buffer = None
+ self._buffer = b""
+ self._buffer_offset = 0
def seek(self, offset, whence=0):
"""Change the file position.
@@ -365,8 +407,7 @@ class LZMAFile(io.BufferedIOBase):
offset -= self._pos
# Read and discard data until we reach the desired position.
- if self._mode != _MODE_READ_EOF:
- self._read_block(offset, return_data=False)
+ self._read_block(offset, return_data=False)
return self._pos
@@ -381,23 +422,24 @@ def open(filename, mode="rb", *,
encoding=None, errors=None, newline=None):
"""Open an LZMA-compressed file in binary or text mode.
- filename can be either an actual file name (given as a str or bytes object),
- in which case the named file is opened, or it can be an existing file object
- to read from or write to.
+ filename can be either an actual file name (given as a str or bytes
+ object), in which case the named file is opened, or it can be an
+ existing file object to read from or write to.
- The mode argument can be "r", "rb" (default), "w", "wb", "a", or "ab" for
- binary mode, or "rt", "wt" or "at" for text mode.
+ The mode argument can be "r", "rb" (default), "w", "wb", "a" or "ab"
+ for binary mode, or "rt", "wt" or "at" for text mode.
- The format, check, preset and filters arguments specify the compression
- settings, as for LZMACompressor, LZMADecompressor and LZMAFile.
+ The format, check, preset and filters arguments specify the
+ compression settings, as for LZMACompressor, LZMADecompressor and
+ LZMAFile.
- For binary mode, this function is equivalent to the LZMAFile constructor:
- LZMAFile(filename, mode, ...). In this case, the encoding, errors and
- newline arguments must not be provided.
+ For binary mode, this function is equivalent to the LZMAFile
+ constructor: LZMAFile(filename, mode, ...). In this case, the
+ encoding, errors and newline arguments must not be provided.
For text mode, a LZMAFile object is created, and wrapped in an
- io.TextIOWrapper instance with the specified encoding, error handling
- behavior, and line ending(s).
+ io.TextIOWrapper instance with the specified encoding, error
+ handling behavior, and line ending(s).
"""
if "t" in mode:
@@ -427,7 +469,7 @@ def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None):
Refer to LZMACompressor's docstring for a description of the
optional arguments *format*, *check*, *preset* and *filters*.
- For incremental compression, use an LZMACompressor object instead.
+ For incremental compression, use an LZMACompressor instead.
"""
comp = LZMACompressor(format, check, preset, filters)
return comp.compress(data) + comp.flush()
@@ -439,7 +481,7 @@ def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None):
Refer to LZMADecompressor's docstring for a description of the
optional arguments *format*, *check* and *filters*.
- For incremental decompression, use a LZMADecompressor object instead.
+ For incremental decompression, use an LZMADecompressor instead.
"""
results = []
while True:
diff --git a/Lib/mailbox.py b/Lib/mailbox.py
index d3bf3fd..01f3551 100644
--- a/Lib/mailbox.py
+++ b/Lib/mailbox.py
@@ -22,9 +22,6 @@ import email.generator
import io
import contextlib
try:
- if sys.platform == 'os2emx':
- # OS/2 EMX fcntl() not adequate
- raise ImportError
import fcntl
except ImportError:
fcntl = None
@@ -631,8 +628,7 @@ class _singlefileMailbox(Mailbox):
def iterkeys(self):
"""Return an iterator over keys."""
self._lookup()
- for key in self._toc.keys():
- yield key
+ yield from self._toc.keys()
def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise."""
@@ -711,8 +707,7 @@ class _singlefileMailbox(Mailbox):
try:
os.rename(new_file.name, self._path)
except OSError as e:
- if e.errno == errno.EEXIST or \
- (os.name == 'os2' and e.errno == errno.EACCES):
+ if e.errno == errno.EEXIST:
os.remove(self._path)
os.rename(new_file.name, self._path)
else:
@@ -2097,8 +2092,7 @@ def _lock_file(f, dotlock=True):
os.rename(pre_lock.name, f.name + '.lock')
dotlock_done = True
except OSError as e:
- if e.errno == errno.EEXIST or \
- (os.name == 'os2' and e.errno == errno.EACCES):
+ if e.errno == errno.EEXIST:
os.remove(pre_lock.name)
raise ExternalClashError('dot lock unavailable: %s' %
f.name)
diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py
index 3f0bd0e..40faf8c 100644
--- a/Lib/mimetypes.py
+++ b/Lib/mimetypes.py
@@ -243,7 +243,7 @@ class MimeTypes:
while True:
try:
ctype = _winreg.EnumKey(mimedb, i)
- except EnvironmentError:
+ except OSError:
break
else:
yield ctype
@@ -256,7 +256,7 @@ class MimeTypes:
with _winreg.OpenKey(mimedb, ctype) as key:
suffix, datatype = _winreg.QueryValueEx(key,
'Extension')
- except EnvironmentError:
+ except OSError:
continue
if datatype != _winreg.REG_SZ:
continue
@@ -378,12 +378,14 @@ def _default_mime_types():
'.taz': '.tar.gz',
'.tz': '.tar.gz',
'.tbz2': '.tar.bz2',
+ '.txz': '.tar.xz',
}
encodings_map = {
'.gz': 'gzip',
'.Z': 'compress',
'.bz2': 'bzip2',
+ '.xz': 'xz',
}
# Before adding new types, make sure they are either registered with IANA,
diff --git a/Lib/multiprocessing/__init__.py b/Lib/multiprocessing/__init__.py
index 1f3e67c..efad532 100644
--- a/Lib/multiprocessing/__init__.py
+++ b/Lib/multiprocessing/__init__.py
@@ -40,6 +40,13 @@ from multiprocessing.process import Process, current_process, active_children
from multiprocessing.util import SUBDEBUG, SUBWARNING
#
+# Alias for main module -- will be reset by bootstrapping child processes
+#
+
+if '__main__' in sys.modules:
+ sys.modules['__mp_main__'] = sys.modules['__main__']
+
+#
# Exceptions
#
diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py
index c5501a2..a0b3d68 100644
--- a/Lib/multiprocessing/forking.py
+++ b/Lib/multiprocessing/forking.py
@@ -442,27 +442,17 @@ def prepare(data):
dirs = [os.path.dirname(main_path)]
assert main_name not in sys.modules, main_name
+ sys.modules.pop('__mp_main__', None)
file, path_name, etc = imp.find_module(main_name, dirs)
try:
- # We would like to do "imp.load_module('__main__', ...)"
- # here. However, that would cause 'if __name__ ==
- # "__main__"' clauses to be executed.
+ # We should not do 'imp.load_module("__main__", ...)'
+ # since that would execute 'if __name__ == "__main__"'
+ # clauses, potentially causing a psuedo fork bomb.
main_module = imp.load_module(
- '__parents_main__', file, path_name, etc
+ '__mp_main__', file, path_name, etc
)
finally:
if file:
file.close()
- sys.modules['__main__'] = main_module
- main_module.__name__ = '__main__'
-
- # Try to make the potentially picklable objects in
- # sys.modules['__main__'] realize they are in the main
- # module -- somewhat ugly.
- for obj in list(main_module.__dict__.values()):
- try:
- if obj.__module__ == '__parents_main__':
- obj.__module__ = '__main__'
- except Exception:
- pass
+ sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module
diff --git a/Lib/multiprocessing/queues.py b/Lib/multiprocessing/queues.py
index 37271fb..f6f02b6 100644
--- a/Lib/multiprocessing/queues.py
+++ b/Lib/multiprocessing/queues.py
@@ -243,10 +243,14 @@ class Queue(object):
if wacquire is None:
send(obj)
+ # Delete references to object. See issue16284
+ del obj
else:
wacquire()
try:
send(obj)
+ # Delete references to object. See issue16284
+ del obj
finally:
wrelease()
except IndexError:
diff --git a/Lib/ntpath.py b/Lib/ntpath.py
index 826be87..f70193f 100644
--- a/Lib/ntpath.py
+++ b/Lib/ntpath.py
@@ -30,9 +30,6 @@ altsep = '/'
defpath = '.;C:\\bin'
if 'ce' in sys.builtin_module_names:
defpath = '\\Windows'
-elif 'os2' in sys.builtin_module_names:
- # OS/2 w/ VACPP
- altsep = '/'
devnull = 'nul'
def _get_empty(path):
@@ -320,8 +317,7 @@ def dirname(p):
def islink(path):
"""Test whether a path is a symbolic link.
- This will always return false for Windows prior to 6.0
- and for OS/2.
+ This will always return false for Windows prior to 6.0.
"""
try:
st = os.lstat(path)
diff --git a/Lib/os.py b/Lib/os.py
index 84eeaeb..8241f36 100644
--- a/Lib/os.py
+++ b/Lib/os.py
@@ -1,9 +1,9 @@
r"""OS routines for Mac, NT, or Posix depending on what system we're on.
This exports:
- - all functions from posix, nt, os2, or ce, e.g. unlink, stat, etc.
+ - all functions from posix, nt or ce, e.g. unlink, stat, etc.
- os.path is either posixpath or ntpath
- - os.name is either 'posix', 'nt', 'os2' or 'ce'.
+ - os.name is either 'posix', 'nt' or 'ce'.
- os.curdir is a string representing the current directory ('.' or ':')
- os.pardir is a string representing the parent directory ('..' or '::')
- os.sep is the (or a most common) pathname separator ('/' or ':' or '\\')
@@ -81,30 +81,6 @@ elif 'nt' in _names:
except ImportError:
pass
-elif 'os2' in _names:
- name = 'os2'
- linesep = '\r\n'
- from os2 import *
- try:
- from os2 import _exit
- __all__.append('_exit')
- except ImportError:
- pass
- if sys.version.find('EMX GCC') == -1:
- import ntpath as path
- else:
- import os2emxpath as path
- from _emx_link import link
-
- import os2
- __all__.extend(_get_exports_list(os2))
- del os2
-
- try:
- from os2 import _have_functions
- except ImportError:
- pass
-
elif 'ce' in _names:
name = 'ce'
linesep = '\r\n'
@@ -715,7 +691,7 @@ else:
__all__.append("unsetenv")
def _createenviron():
- if name in ('os2', 'nt'):
+ if name == 'nt':
# Where Env Var Names Must Be UPPERCASE
def check_str(value):
if not isinstance(value, str):
@@ -755,7 +731,7 @@ def getenv(key, default=None):
key, default and the result are str."""
return environ.get(key, default)
-supports_bytes_environ = name not in ('os2', 'nt')
+supports_bytes_environ = (name != 'nt')
__all__.extend(("getenv", "supports_bytes_environ"))
if supports_bytes_environ:
diff --git a/Lib/os2emxpath.py b/Lib/os2emxpath.py
deleted file mode 100644
index 0ccbf8a..0000000
--- a/Lib/os2emxpath.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Module 'os2emxpath' -- common operations on OS/2 pathnames
-"""Common pathname manipulations, OS/2 EMX version.
-
-Instead of importing this module directly, import os and refer to this
-module as os.path.
-"""
-
-import os
-import stat
-from genericpath import *
-from ntpath import (expanduser, expandvars, isabs, islink, splitdrive,
- splitext, split)
-
-__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
- "basename","dirname","commonprefix","getsize","getmtime",
- "getatime","getctime", "islink","exists","lexists","isdir","isfile",
- "ismount","expanduser","expandvars","normpath","abspath",
- "splitunc","curdir","pardir","sep","pathsep","defpath","altsep",
- "extsep","devnull","realpath","supports_unicode_filenames"]
-
-# strings representing various path-related bits and pieces
-curdir = '.'
-pardir = '..'
-extsep = '.'
-sep = '/'
-altsep = '\\'
-pathsep = ';'
-defpath = '.;C:\\bin'
-devnull = 'nul'
-
-# Normalize the case of a pathname and map slashes to backslashes.
-# Other normalizations (such as optimizing '../' away) are not done
-# (this is done by normpath).
-
-def normcase(s):
- """Normalize case of pathname.
-
- Makes all characters lowercase and all altseps into seps."""
- if not isinstance(s, (bytes, str)):
- raise TypeError("normcase() argument must be str or bytes, "
- "not '{}'".format(s.__class__.__name__))
- return s.replace('\\', '/').lower()
-
-
-# Join two (or more) paths.
-
-def join(a, *p):
- """Join two or more pathname components, inserting sep as needed"""
- path = a
- for b in p:
- if isabs(b):
- path = b
- elif path == '' or path[-1:] in '/\\:':
- path = path + b
- else:
- path = path + '/' + b
- return path
-
-
-# Parse UNC paths
-def splitunc(p):
- """Split a pathname into UNC mount point and relative path specifiers.
-
- Return a 2-tuple (unc, rest); either part may be empty.
- If unc is not empty, it has the form '//host/mount' (or similar
- using backslashes). unc+rest is always the input path.
- Paths containing drive letters never have an UNC part.
- """
- if p[1:2] == ':':
- return '', p # Drive letter present
- firstTwo = p[0:2]
- if firstTwo == '/' * 2 or firstTwo == '\\' * 2:
- # is a UNC path:
- # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter
- # \\machine\mountpoint\directories...
- # directory ^^^^^^^^^^^^^^^
- normp = normcase(p)
- index = normp.find('/', 2)
- if index == -1:
- ##raise RuntimeError, 'illegal UNC path: "' + p + '"'
- return ("", p)
- index = normp.find('/', index + 1)
- if index == -1:
- index = len(p)
- return p[:index], p[index:]
- return '', p
-
-
-# Return the tail (basename) part of a path.
-
-def basename(p):
- """Returns the final component of a pathname"""
- return split(p)[1]
-
-
-# Return the head (dirname) part of a path.
-
-def dirname(p):
- """Returns the directory component of a pathname"""
- return split(p)[0]
-
-
-# alias exists to lexists
-lexists = exists
-
-
-# Is a path a directory?
-
-# Is a path a mount point? Either a root (with or without drive letter)
-# or an UNC path with at most a / or \ after the mount point.
-
-def ismount(path):
- """Test whether a path is a mount point (defined as root of drive)"""
- unc, rest = splitunc(path)
- if unc:
- return rest in ("", "/", "\\")
- p = splitdrive(path)[1]
- return len(p) == 1 and p[0] in '/\\'
-
-
-# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
-
-def normpath(path):
- """Normalize path, eliminating double slashes, etc."""
- path = path.replace('\\', '/')
- prefix, path = splitdrive(path)
- while path[:1] == '/':
- prefix = prefix + '/'
- path = path[1:]
- comps = path.split('/')
- i = 0
- while i < len(comps):
- if comps[i] == '.':
- del comps[i]
- elif comps[i] == '..' and i > 0 and comps[i-1] not in ('', '..'):
- del comps[i-1:i+1]
- i = i - 1
- elif comps[i] == '' and i > 0 and comps[i-1] != '':
- del comps[i]
- else:
- i = i + 1
- # If the path is now empty, substitute '.'
- if not prefix and not comps:
- comps.append('.')
- return prefix + '/'.join(comps)
-
-
-# Return an absolute path.
-def abspath(path):
- """Return the absolute version of a path"""
- if not isabs(path):
- path = join(os.getcwd(), path)
- return normpath(path)
-
-# realpath is a no-op on systems without islink support
-realpath = abspath
-
-supports_unicode_filenames = False
diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py
index 8bdeb32..c695cf1 100644
--- a/Lib/pkgutil.py
+++ b/Lib/pkgutil.py
@@ -121,8 +121,7 @@ def walk_packages(path=None, prefix='', onerror=None):
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
- for item in walk_packages(path, name+'.', onerror):
- yield item
+ yield from walk_packages(path, name+'.', onerror)
def iter_modules(path=None, prefix=''):
@@ -456,8 +455,7 @@ def iter_importers(fullname=""):
if path is None:
return
else:
- for importer in sys.meta_path:
- yield importer
+ yield from sys.meta_path
path = sys.path
for item in path:
yield get_importer(item)
diff --git a/Lib/plat-os2emx/IN.py b/Lib/plat-os2emx/IN.py
deleted file mode 100644
index 753ae24..0000000
--- a/Lib/plat-os2emx/IN.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Generated by h2py from f:/emx/include/netinet/in.h
-
-# Included from sys/param.h
-PAGE_SIZE = 0x1000
-HZ = 100
-MAXNAMLEN = 260
-MAXPATHLEN = 260
-def htonl(X): return _swapl(X)
-
-def ntohl(X): return _swapl(X)
-
-def htons(X): return _swaps(X)
-
-def ntohs(X): return _swaps(X)
-
-IPPROTO_IP = 0
-IPPROTO_ICMP = 1
-IPPROTO_IGMP = 2
-IPPROTO_GGP = 3
-IPPROTO_TCP = 6
-IPPROTO_EGP = 8
-IPPROTO_PUP = 12
-IPPROTO_UDP = 17
-IPPROTO_IDP = 22
-IPPROTO_TP = 29
-IPPROTO_EON = 80
-IPPROTO_RAW = 255
-IPPROTO_MAX = 256
-IPPORT_RESERVED = 1024
-IPPORT_USERRESERVED = 5000
-def IN_CLASSA(i): return (((int)(i) & 0x80000000) == 0)
-
-IN_CLASSA_NET = 0xff000000
-IN_CLASSA_NSHIFT = 24
-IN_CLASSA_HOST = 0x00ffffff
-IN_CLASSA_MAX = 128
-def IN_CLASSB(i): return (((int)(i) & 0xc0000000) == 0x80000000)
-
-IN_CLASSB_NET = 0xffff0000
-IN_CLASSB_NSHIFT = 16
-IN_CLASSB_HOST = 0x0000ffff
-IN_CLASSB_MAX = 65536
-def IN_CLASSC(i): return (((int)(i) & 0xe0000000) == 0xc0000000)
-
-IN_CLASSC_NET = 0xffffff00
-IN_CLASSC_NSHIFT = 8
-IN_CLASSC_HOST = 0x000000ff
-def IN_CLASSD(i): return (((int)(i) & 0xf0000000) == 0xe0000000)
-
-IN_CLASSD_NET = 0xf0000000
-IN_CLASSD_NSHIFT = 28
-IN_CLASSD_HOST = 0x0fffffff
-def IN_MULTICAST(i): return IN_CLASSD(i)
-
-def IN_EXPERIMENTAL(i): return (((int)(i) & 0xe0000000) == 0xe0000000)
-
-def IN_BADCLASS(i): return (((int)(i) & 0xf0000000) == 0xf0000000)
-
-INADDR_ANY = 0x00000000
-INADDR_LOOPBACK = 0x7f000001
-INADDR_BROADCAST = 0xffffffff
-INADDR_NONE = 0xffffffff
-INADDR_UNSPEC_GROUP = 0xe0000000
-INADDR_ALLHOSTS_GROUP = 0xe0000001
-INADDR_MAX_LOCAL_GROUP = 0xe00000ff
-IN_LOOPBACKNET = 127
-IP_OPTIONS = 1
-IP_MULTICAST_IF = 2
-IP_MULTICAST_TTL = 3
-IP_MULTICAST_LOOP = 4
-IP_ADD_MEMBERSHIP = 5
-IP_DROP_MEMBERSHIP = 6
-IP_HDRINCL = 2
-IP_TOS = 3
-IP_TTL = 4
-IP_RECVOPTS = 5
-IP_RECVRETOPTS = 6
-IP_RECVDSTADDR = 7
-IP_RETOPTS = 8
-IP_DEFAULT_MULTICAST_TTL = 1
-IP_DEFAULT_MULTICAST_LOOP = 1
-IP_MAX_MEMBERSHIPS = 20
diff --git a/Lib/plat-os2emx/SOCKET.py b/Lib/plat-os2emx/SOCKET.py
deleted file mode 100644
index dac594a..0000000
--- a/Lib/plat-os2emx/SOCKET.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Generated by h2py from f:/emx/include/sys/socket.h
-
-# Included from sys/types.h
-FD_SETSIZE = 256
-
-# Included from sys/uio.h
-FREAD = 1
-FWRITE = 2
-SOCK_STREAM = 1
-SOCK_DGRAM = 2
-SOCK_RAW = 3
-SOCK_RDM = 4
-SOCK_SEQPACKET = 5
-SO_DEBUG = 0x0001
-SO_ACCEPTCONN = 0x0002
-SO_REUSEADDR = 0x0004
-SO_KEEPALIVE = 0x0008
-SO_DONTROUTE = 0x0010
-SO_BROADCAST = 0x0020
-SO_USELOOPBACK = 0x0040
-SO_LINGER = 0x0080
-SO_OOBINLINE = 0x0100
-SO_L_BROADCAST = 0x0200
-SO_RCV_SHUTDOWN = 0x0400
-SO_SND_SHUTDOWN = 0x0800
-SO_SNDBUF = 0x1001
-SO_RCVBUF = 0x1002
-SO_SNDLOWAT = 0x1003
-SO_RCVLOWAT = 0x1004
-SO_SNDTIMEO = 0x1005
-SO_RCVTIMEO = 0x1006
-SO_ERROR = 0x1007
-SO_TYPE = 0x1008
-SO_OPTIONS = 0x1010
-SOL_SOCKET = 0xffff
-AF_UNSPEC = 0
-AF_UNIX = 1
-AF_INET = 2
-AF_IMPLINK = 3
-AF_PUP = 4
-AF_CHAOS = 5
-AF_NS = 6
-AF_NBS = 7
-AF_ISO = 7
-AF_OSI = AF_ISO
-AF_ECMA = 8
-AF_DATAKIT = 9
-AF_CCITT = 10
-AF_SNA = 11
-AF_DECnet = 12
-AF_DLI = 13
-AF_LAT = 14
-AF_HYLINK = 15
-AF_APPLETALK = 16
-AF_NB = 17
-AF_NETBIOS = AF_NB
-AF_OS2 = AF_UNIX
-AF_MAX = 18
-PF_UNSPEC = AF_UNSPEC
-PF_UNIX = AF_UNIX
-PF_INET = AF_INET
-PF_IMPLINK = AF_IMPLINK
-PF_PUP = AF_PUP
-PF_CHAOS = AF_CHAOS
-PF_NS = AF_NS
-PF_NBS = AF_NBS
-PF_ISO = AF_ISO
-PF_OSI = AF_ISO
-PF_ECMA = AF_ECMA
-PF_DATAKIT = AF_DATAKIT
-PF_CCITT = AF_CCITT
-PF_SNA = AF_SNA
-PF_DECnet = AF_DECnet
-PF_DLI = AF_DLI
-PF_LAT = AF_LAT
-PF_HYLINK = AF_HYLINK
-PF_APPLETALK = AF_APPLETALK
-PF_NB = AF_NB
-PF_NETBIOS = AF_NB
-PF_OS2 = AF_UNIX
-PF_MAX = AF_MAX
-SOMAXCONN = 5
-MSG_OOB = 0x1
-MSG_PEEK = 0x2
-MSG_DONTROUTE = 0x4
-MSG_EOR = 0x8
-MSG_TRUNC = 0x10
-MSG_CTRUNC = 0x20
-MSG_WAITALL = 0x40
-MSG_MAXIOVLEN = 16
-SCM_RIGHTS = 0x01
-MT_FREE = 0
-MT_DATA = 1
-MT_HEADER = 2
-MT_SOCKET = 3
-MT_PCB = 4
-MT_RTABLE = 5
-MT_HTABLE = 6
-MT_ATABLE = 7
-MT_SONAME = 8
-MT_ZOMBIE = 9
-MT_SOOPTS = 10
-MT_FTABLE = 11
-MT_RIGHTS = 12
-MT_IFADDR = 13
-MAXSOCKETS = 2048
diff --git a/Lib/plat-os2emx/_emx_link.py b/Lib/plat-os2emx/_emx_link.py
deleted file mode 100644
index 01e6b54..0000000
--- a/Lib/plat-os2emx/_emx_link.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# _emx_link.py
-
-# Written by Andrew I MacIntyre, December 2002.
-
-"""_emx_link.py is a simplistic emulation of the Unix link(2) library routine
-for creating so-called hard links. It is intended to be imported into
-the os module in place of the unimplemented (on OS/2) Posix link()
-function (os.link()).
-
-We do this on OS/2 by implementing a file copy, with link(2) semantics:-
- - the target cannot already exist;
- - we hope that the actual file open (if successful) is actually
- atomic...
-
-Limitations of this approach/implementation include:-
- - no support for correct link counts (EMX stat(target).st_nlink
- is always 1);
- - thread safety undefined;
- - default file permissions (r+w) used, can't be over-ridden;
- - implemented in Python so comparatively slow, especially for large
- source files;
- - need sufficient free disk space to store the copy.
-
-Behaviour:-
- - any exception should propagate to the caller;
- - want target to be an exact copy of the source, so use binary mode;
- - returns None, same as os.link() which is implemented in posixmodule.c;
- - target removed in the event of a failure where possible;
- - given the motivation to write this emulation came from trying to
- support a Unix resource lock implementation, where minimal overhead
- during creation of the target is desirable and the files are small,
- we read a source block before attempting to create the target so that
- we're ready to immediately write some data into it.
-"""
-
-import os
-import errno
-
-__all__ = ['link']
-
-def link(source, target):
- """link(source, target) -> None
-
- Attempt to hard link the source file to the target file name.
- On OS/2, this creates a complete copy of the source file.
- """
-
- s = os.open(source, os.O_RDONLY | os.O_BINARY)
- if os.isatty(s):
- raise OSError(errno.EXDEV, 'Cross-device link')
- data = os.read(s, 1024)
-
- try:
- t = os.open(target, os.O_WRONLY | os.O_BINARY | os.O_CREAT | os.O_EXCL)
- except OSError:
- os.close(s)
- raise
-
- try:
- while data:
- os.write(t, data)
- data = os.read(s, 1024)
- except OSError:
- os.close(s)
- os.close(t)
- os.unlink(target)
- raise
-
- os.close(s)
- os.close(t)
-
-if __name__ == '__main__':
- import sys
- try:
- link(sys.argv[1], sys.argv[2])
- except IndexError:
- print('Usage: emx_link <source> <target>')
- except OSError:
- print('emx_link: %s' % str(sys.exc_info()[1]))
diff --git a/Lib/plat-os2emx/grp.py b/Lib/plat-os2emx/grp.py
deleted file mode 100644
index ee63ef8..0000000
--- a/Lib/plat-os2emx/grp.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# this module is an OS/2 oriented replacement for the grp standard
-# extension module.
-
-# written by Andrew MacIntyre, April 2001.
-# updated July 2003, adding field accessor support
-
-# note that this implementation checks whether ":" or ";" as used as
-# the field separator character.
-
-"""Replacement for grp standard extension module, intended for use on
-OS/2 and similar systems which don't normally have an /etc/group file.
-
-The standard Unix group database is an ASCII text file with 4 fields per
-record (line), separated by a colon:
- - group name (string)
- - group password (optional encrypted string)
- - group id (integer)
- - group members (comma delimited list of userids, with no spaces)
-
-Note that members are only included in the group file for groups that
-aren't their primary groups.
-(see the section 8.2 of the Python Library Reference)
-
-This implementation differs from the standard Unix implementation by
-allowing use of the platform's native path separator character - ';' on OS/2,
-DOS and MS-Windows - as the field separator in addition to the Unix
-standard ":".
-
-The module looks for the group database at the following locations
-(in order first to last):
- - ${ETC_GROUP} (or %ETC_GROUP%)
- - ${ETC}/group (or %ETC%/group)
- - ${PYTHONHOME}/Etc/group (or %PYTHONHOME%/Etc/group)
-
-Classes
--------
-
-None
-
-Functions
----------
-
-getgrgid(gid) - return the record for group-id gid as a 4-tuple
-
-getgrnam(name) - return the record for group 'name' as a 4-tuple
-
-getgrall() - return a list of 4-tuples, each tuple being one record
- (NOTE: the order is arbitrary)
-
-Attributes
-----------
-
-group_file - the path of the group database file
-
-"""
-
-import os
-
-# try and find the group file
-__group_path = []
-if 'ETC_GROUP' in os.environ:
- __group_path.append(os.environ['ETC_GROUP'])
-if 'ETC' in os.environ:
- __group_path.append('%s/group' % os.environ['ETC'])
-if 'PYTHONHOME' in os.environ:
- __group_path.append('%s/Etc/group' % os.environ['PYTHONHOME'])
-
-group_file = None
-for __i in __group_path:
- try:
- __f = open(__i, 'r')
- __f.close()
- group_file = __i
- break
- except:
- pass
-
-# decide what field separator we can try to use - Unix standard, with
-# the platform's path separator as an option. No special field conversion
-# handlers are required for the group file.
-__field_sep = [':']
-if os.pathsep:
- if os.pathsep != ':':
- __field_sep.append(os.pathsep)
-
-# helper routine to identify which separator character is in use
-def __get_field_sep(record):
- fs = None
- for c in __field_sep:
- # there should be 3 delimiter characters (for 4 fields)
- if record.count(c) == 3:
- fs = c
- break
- if fs:
- return fs
- else:
- raise KeyError('>> group database fields not delimited <<')
-
-# class to match the new record field name accessors.
-# the resulting object is intended to behave like a read-only tuple,
-# with each member also accessible by a field name.
-class Group:
- def __init__(self, name, passwd, gid, mem):
- self.__dict__['gr_name'] = name
- self.__dict__['gr_passwd'] = passwd
- self.__dict__['gr_gid'] = gid
- self.__dict__['gr_mem'] = mem
- self.__dict__['_record'] = (self.gr_name, self.gr_passwd,
- self.gr_gid, self.gr_mem)
-
- def __len__(self):
- return 4
-
- def __getitem__(self, key):
- return self._record[key]
-
- def __setattr__(self, name, value):
- raise AttributeError('attribute read-only: %s' % name)
-
- def __repr__(self):
- return str(self._record)
-
- def __cmp__(self, other):
- this = str(self._record)
- if this == other:
- return 0
- elif this < other:
- return -1
- else:
- return 1
-
-
-# read the whole file, parsing each entry into tuple form
-# with dictionaries to speed recall by GID or group name
-def __read_group_file():
- if group_file:
- group = open(group_file, 'r')
- else:
- raise KeyError('>> no group database <<')
- gidx = {}
- namx = {}
- sep = None
- while 1:
- entry = group.readline().strip()
- if len(entry) > 3:
- if sep is None:
- sep = __get_field_sep(entry)
- fields = entry.split(sep)
- fields[2] = int(fields[2])
- fields[3] = [f.strip() for f in fields[3].split(',')]
- record = Group(*fields)
- if fields[2] not in gidx:
- gidx[fields[2]] = record
- if fields[0] not in namx:
- namx[fields[0]] = record
- elif len(entry) > 0:
- pass # skip empty or malformed records
- else:
- break
- group.close()
- if len(gidx) == 0:
- raise KeyError
- return (gidx, namx)
-
-# return the group database entry by GID
-def getgrgid(gid):
- g, n = __read_group_file()
- return g[gid]
-
-# return the group database entry by group name
-def getgrnam(name):
- g, n = __read_group_file()
- return n[name]
-
-# return all the group database entries
-def getgrall():
- g, n = __read_group_file()
- return g.values()
-
-# test harness
-if __name__ == '__main__':
- getgrall()
diff --git a/Lib/plat-os2emx/pwd.py b/Lib/plat-os2emx/pwd.py
deleted file mode 100644
index 2cb077f..0000000
--- a/Lib/plat-os2emx/pwd.py
+++ /dev/null
@@ -1,208 +0,0 @@
-# this module is an OS/2 oriented replacement for the pwd standard
-# extension module.
-
-# written by Andrew MacIntyre, April 2001.
-# updated July 2003, adding field accessor support
-
-# note that this implementation checks whether ":" or ";" as used as
-# the field separator character. Path conversions are are applied when
-# the database uses ":" as the field separator character.
-
-"""Replacement for pwd standard extension module, intended for use on
-OS/2 and similar systems which don't normally have an /etc/passwd file.
-
-The standard Unix password database is an ASCII text file with 7 fields
-per record (line), separated by a colon:
- - user name (string)
- - password (encrypted string, or "*" or "")
- - user id (integer)
- - group id (integer)
- - description (usually user's name)
- - home directory (path to user's home directory)
- - shell (path to the user's login shell)
-
-(see the section 8.1 of the Python Library Reference)
-
-This implementation differs from the standard Unix implementation by
-allowing use of the platform's native path separator character - ';' on OS/2,
-DOS and MS-Windows - as the field separator in addition to the Unix
-standard ":". Additionally, when ":" is the separator path conversions
-are applied to deal with any munging of the drive letter reference.
-
-The module looks for the password database at the following locations
-(in order first to last):
- - ${ETC_PASSWD} (or %ETC_PASSWD%)
- - ${ETC}/passwd (or %ETC%/passwd)
- - ${PYTHONHOME}/Etc/passwd (or %PYTHONHOME%/Etc/passwd)
-
-Classes
--------
-
-None
-
-Functions
----------
-
-getpwuid(uid) - return the record for user-id uid as a 7-tuple
-
-getpwnam(name) - return the record for user 'name' as a 7-tuple
-
-getpwall() - return a list of 7-tuples, each tuple being one record
- (NOTE: the order is arbitrary)
-
-Attributes
-----------
-
-passwd_file - the path of the password database file
-
-"""
-
-import os
-
-# try and find the passwd file
-__passwd_path = []
-if 'ETC_PASSWD' in os.environ:
- __passwd_path.append(os.environ['ETC_PASSWD'])
-if 'ETC' in os.environ:
- __passwd_path.append('%s/passwd' % os.environ['ETC'])
-if 'PYTHONHOME' in os.environ:
- __passwd_path.append('%s/Etc/passwd' % os.environ['PYTHONHOME'])
-
-passwd_file = None
-for __i in __passwd_path:
- try:
- __f = open(__i, 'r')
- __f.close()
- passwd_file = __i
- break
- except:
- pass
-
-# path conversion handlers
-def __nullpathconv(path):
- return path.replace(os.altsep, os.sep)
-
-def __unixpathconv(path):
- # two known drive letter variations: "x;" and "$x"
- if path[0] == '$':
- conv = path[1] + ':' + path[2:]
- elif path[1] == ';':
- conv = path[0] + ':' + path[2:]
- else:
- conv = path
- return conv.replace(os.altsep, os.sep)
-
-# decide what field separator we can try to use - Unix standard, with
-# the platform's path separator as an option. No special field conversion
-# handler is required when using the platform's path separator as field
-# separator, but are required for the home directory and shell fields when
-# using the standard Unix (":") field separator.
-__field_sep = {':': __unixpathconv}
-if os.pathsep:
- if os.pathsep != ':':
- __field_sep[os.pathsep] = __nullpathconv
-
-# helper routine to identify which separator character is in use
-def __get_field_sep(record):
- fs = None
- for c in __field_sep.keys():
- # there should be 6 delimiter characters (for 7 fields)
- if record.count(c) == 6:
- fs = c
- break
- if fs:
- return fs
- else:
- raise KeyError('>> passwd database fields not delimited <<')
-
-# class to match the new record field name accessors.
-# the resulting object is intended to behave like a read-only tuple,
-# with each member also accessible by a field name.
-class Passwd:
- def __init__(self, name, passwd, uid, gid, gecos, dir, shell):
- self.__dict__['pw_name'] = name
- self.__dict__['pw_passwd'] = passwd
- self.__dict__['pw_uid'] = uid
- self.__dict__['pw_gid'] = gid
- self.__dict__['pw_gecos'] = gecos
- self.__dict__['pw_dir'] = dir
- self.__dict__['pw_shell'] = shell
- self.__dict__['_record'] = (self.pw_name, self.pw_passwd,
- self.pw_uid, self.pw_gid,
- self.pw_gecos, self.pw_dir,
- self.pw_shell)
-
- def __len__(self):
- return 7
-
- def __getitem__(self, key):
- return self._record[key]
-
- def __setattr__(self, name, value):
- raise AttributeError('attribute read-only: %s' % name)
-
- def __repr__(self):
- return str(self._record)
-
- def __cmp__(self, other):
- this = str(self._record)
- if this == other:
- return 0
- elif this < other:
- return -1
- else:
- return 1
-
-
-# read the whole file, parsing each entry into tuple form
-# with dictionaries to speed recall by UID or passwd name
-def __read_passwd_file():
- if passwd_file:
- passwd = open(passwd_file, 'r')
- else:
- raise KeyError('>> no password database <<')
- uidx = {}
- namx = {}
- sep = None
- while True:
- entry = passwd.readline().strip()
- if len(entry) > 6:
- if sep is None:
- sep = __get_field_sep(entry)
- fields = entry.split(sep)
- for i in (2, 3):
- fields[i] = int(fields[i])
- for i in (5, 6):
- fields[i] = __field_sep[sep](fields[i])
- record = Passwd(*fields)
- if fields[2] not in uidx:
- uidx[fields[2]] = record
- if fields[0] not in namx:
- namx[fields[0]] = record
- elif len(entry) > 0:
- pass # skip empty or malformed records
- else:
- break
- passwd.close()
- if len(uidx) == 0:
- raise KeyError
- return (uidx, namx)
-
-# return the passwd database entry by UID
-def getpwuid(uid):
- u, n = __read_passwd_file()
- return u[uid]
-
-# return the passwd database entry by passwd name
-def getpwnam(name):
- u, n = __read_passwd_file()
- return n[name]
-
-# return all the passwd database entries
-def getpwall():
- u, n = __read_passwd_file()
- return n.values()
-
-# test harness
-if __name__ == '__main__':
- getpwall()
diff --git a/Lib/plat-os2emx/regen b/Lib/plat-os2emx/regen
deleted file mode 100755
index 3ecd2a8..0000000
--- a/Lib/plat-os2emx/regen
+++ /dev/null
@@ -1,7 +0,0 @@
-#! /bin/sh
-export INCLUDE=$C_INCLUDE_PATH
-set -v
-python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/fcntl.h
-python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/sys/socket.h
-python.exe ../../Tools/scripts/h2py.py -i '(u_long)' $C_INCLUDE_PATH/netinet/in.h
-#python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/termios.h
diff --git a/Lib/platform.py b/Lib/platform.py
index 2b8a24a..db08eab 100755
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -122,7 +122,7 @@ try:
except AttributeError:
# os.devnull was added in Python 2.4, so emulate it for earlier
# Python versions
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# Use the old CP/M NUL as device name
DEV_NULL = 'NUL'
else:
@@ -403,13 +403,13 @@ _ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) '
def _syscmd_ver(system='', release='', version='',
- supported_platforms=('win32','win16','dos','os2')):
+ supported_platforms=('win32','win16','dos')):
""" Tries to figure out the OS version used and returns
a tuple (system,release,version).
It uses the "ver" shell command for this which is known
- to exists on Windows, DOS and OS/2. XXX Others too ?
+ to exists on Windows, DOS. XXX Others too ?
In case this fails, the given parameters are used as
defaults.
@@ -901,7 +901,7 @@ def _syscmd_uname(option,default=''):
""" Interface to the system's uname command.
"""
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# XXX Others too ?
return default
try:
@@ -924,7 +924,7 @@ def _syscmd_file(target,default=''):
default in case the command should fail.
"""
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# XXX Others too ?
return default
target = _follow_symlinks(target)
diff --git a/Lib/poplib.py b/Lib/poplib.py
index d42d9dd..cbd1df9 100644
--- a/Lib/poplib.py
+++ b/Lib/poplib.py
@@ -13,7 +13,15 @@ Based on the J. Myers POP3 draft, Jan. 96
# Imports
-import re, socket
+import errno
+import re
+import socket
+
+try:
+ import ssl
+ HAVE_SSL = True
+except ImportError:
+ HAVE_SSL = False
__all__ = ["POP3","error_proto"]
@@ -55,6 +63,8 @@ class POP3:
APOP name digest apop(name, digest)
TOP msg n top(msg, n)
UIDL [msg] uidl(msg = None)
+ CAPA capa()
+ STLS stls()
Raises one exception: 'error_proto'.
@@ -81,6 +91,7 @@ class POP3:
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
self.port = port
+ self._tls_established = False
self.sock = self._create_socket(timeout)
self.file = self.sock.makefile('rb')
self._debugging = 0
@@ -259,7 +270,14 @@ class POP3:
if self.file is not None:
self.file.close()
if self.sock is not None:
- self.sock.close()
+ try:
+ self.sock.shutdown(socket.SHUT_RDWR)
+ except socket.error as e:
+ # The server might already have closed the connection
+ if e.errno != errno.ENOTCONN:
+ raise
+ finally:
+ self.sock.close()
self.file = self.sock = None
#__del__ = quit
@@ -315,21 +333,71 @@ class POP3:
return self._shortcmd('UIDL %s' % which)
return self._longcmd('UIDL')
-try:
- import ssl
-except ImportError:
- pass
-else:
+
+ def capa(self):
+ """Return server capabilities (RFC 2449) as a dictionary
+ >>> c=poplib.POP3('localhost')
+ >>> c.capa()
+ {'IMPLEMENTATION': ['Cyrus', 'POP3', 'server', 'v2.2.12'],
+ 'TOP': [], 'LOGIN-DELAY': ['0'], 'AUTH-RESP-CODE': [],
+ 'EXPIRE': ['NEVER'], 'USER': [], 'STLS': [], 'PIPELINING': [],
+ 'UIDL': [], 'RESP-CODES': []}
+ >>>
+
+ Really, according to RFC 2449, the cyrus folks should avoid
+ having the implementation splitted into multiple arguments...
+ """
+ def _parsecap(line):
+ lst = line.decode('ascii').split()
+ return lst[0], lst[1:]
+
+ caps = {}
+ try:
+ resp = self._longcmd('CAPA')
+ rawcaps = resp[1]
+ for capline in rawcaps:
+ capnm, capargs = _parsecap(capline)
+ caps[capnm] = capargs
+ except error_proto as _err:
+ raise error_proto('-ERR CAPA not supported by server')
+ return caps
+
+
+ def stls(self, context=None):
+ """Start a TLS session on the active connection as specified in RFC 2595.
+
+ context - a ssl.SSLContext
+ """
+ if not HAVE_SSL:
+ raise error_proto('-ERR TLS support missing')
+ if self._tls_established:
+ raise error_proto('-ERR TLS session already established')
+ caps = self.capa()
+ if not 'STLS' in caps:
+ raise error_proto('-ERR STLS not supported by server')
+ if context is None:
+ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ context.options |= ssl.OP_NO_SSLv2
+ resp = self._shortcmd('STLS')
+ self.sock = context.wrap_socket(self.sock)
+ self.file = self.sock.makefile('rb')
+ self._tls_established = True
+ return resp
+
+
+if HAVE_SSL:
class POP3_SSL(POP3):
"""POP3 client class over SSL connection
- Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None)
+ Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None,
+ context=None)
hostname - the hostname of the pop3 over ssl server
port - port number
keyfile - PEM formatted file that countains your private key
certfile - PEM formatted certificate chain file
+ context - a ssl.SSLContext
See the methods of the parent class POP3 for more documentation.
"""
@@ -355,6 +423,13 @@ else:
sock = ssl.wrap_socket(sock, self.keyfile, self.certfile)
return sock
+ def stls(self, keyfile=None, certfile=None, context=None):
+ """The method unconditionally raises an exception since the
+ STLS command doesn't make any sense on an already established
+ SSL/TLS session.
+ """
+ raise error_proto('-ERR TLS session already established')
+
__all__.append("POP3_SSL")
if __name__ == "__main__":
diff --git a/Lib/pty.py b/Lib/pty.py
index 3ccf619..3b79202 100644
--- a/Lib/pty.py
+++ b/Lib/pty.py
@@ -178,3 +178,4 @@ def spawn(argv, master_read=_read, stdin_read=_read):
tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
os.close(master_fd)
+ return os.waitpid(pid, 0)[1]
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index fa531e9..490eb21 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -1393,7 +1393,7 @@ def getpager():
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
- if sys.platform == 'win32' or sys.platform.startswith('os2'):
+ if sys.platform == 'win32':
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
diff --git a/Lib/random.py b/Lib/random.py
index 6388f29..2ad3809 100644
--- a/Lib/random.py
+++ b/Lib/random.py
@@ -252,10 +252,11 @@ class Random(_random.Random):
return seq[i]
def shuffle(self, x, random=None, int=int):
- """x, random=random.random -> shuffle list x in place; return None.
+ """Shuffle list x in place, and return None.
- Optional arg random is a 0-argument function returning a random
- float in [0.0, 1.0); by default, the standard random.random.
+ Optional argument random is a 0-argument function returning a
+ random float in [0.0, 1.0); if it is the default None, the
+ standard random.random will be used.
"""
randbelow = self._randbelow
diff --git a/Lib/shelve.py b/Lib/shelve.py
index cc1815e..cef580e 100644
--- a/Lib/shelve.py
+++ b/Lib/shelve.py
@@ -61,7 +61,7 @@ from io import BytesIO
import collections
-__all__ = ["Shelf","BsdDbShelf","DbfilenameShelf","open"]
+__all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"]
class _ClosedDict(collections.MutableMapping):
'Marker for a closed dict. Access attempts raise a ValueError.'
@@ -131,6 +131,12 @@ class Shelf(collections.MutableMapping):
except KeyError:
pass
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.close()
+
def close(self):
self.sync()
try:
@@ -147,6 +153,7 @@ class Shelf(collections.MutableMapping):
def __del__(self):
if not hasattr(self, 'writeback'):
# __init__ didn't succeed, so don't bother closing
+ # see http://bugs.python.org/issue1339007 for details
return
self.close()
diff --git a/Lib/shutil.py b/Lib/shutil.py
index 9c66008..1eb4733 100644
--- a/Lib/shutil.py
+++ b/Lib/shutil.py
@@ -39,17 +39,20 @@ __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
"ignore_patterns", "chown", "which"]
# disk_usage is added later, if available on the platform
-class Error(EnvironmentError):
+class Error(OSError):
pass
-class SpecialFileError(EnvironmentError):
+class SameFileError(Error):
+ """Raised when source and destination are the same file."""
+
+class SpecialFileError(OSError):
"""Raised when trying to do a kind of operation (e.g. copying) which is
not supported on a special file (e.g. a named pipe)"""
-class ExecError(EnvironmentError):
+class ExecError(OSError):
"""Raised when a command could not be executed"""
-class ReadError(EnvironmentError):
+class ReadError(OSError):
"""Raised when an archive cannot be read"""
class RegistryError(Exception):
@@ -90,7 +93,7 @@ def copyfile(src, dst, *, follow_symlinks=True):
"""
if _samefile(src, dst):
- raise Error("`%s` and `%s` are the same file" % (src, dst))
+ raise SameFileError("{!r} and {!r} are the same file".format(src, dst))
for fn in [src, dst]:
try:
@@ -215,6 +218,9 @@ def copy(src, dst, *, follow_symlinks=True):
If follow_symlinks is false, symlinks won't be followed. This
resembles GNU's "cp -P src dst".
+ If source and destination are the same file, a SameFileError will be
+ raised.
+
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
@@ -323,7 +329,7 @@ def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
# continue with other files
except Error as err:
errors.extend(err.args[0])
- except EnvironmentError as why:
+ except OSError as why:
errors.append((srcname, dstname, str(why)))
try:
copystat(src, dst)
diff --git a/Lib/site.py b/Lib/site.py
index b751006..5d63c75 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -300,9 +300,7 @@ def getsitepackages(prefixes=None):
continue
seen.add(prefix)
- if sys.platform in ('os2emx', 'riscos'):
- sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
- elif os.sep == '/':
+ if os.sep == '/':
sitepackages.append(os.path.join(prefix, "lib",
"python" + sys.version[:3],
"site-packages"))
@@ -329,23 +327,6 @@ def addsitepackages(known_paths, prefixes=None):
return known_paths
-def setBEGINLIBPATH():
- """The OS/2 EMX port has optional extension modules that do double duty
- as DLLs (and must use the .DLL file extension) for other extensions.
- The library search path needs to be amended so these will be found
- during module import. Use BEGINLIBPATH so that these are at the start
- of the library search path.
-
- """
- dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
- libpath = os.environ['BEGINLIBPATH'].split(';')
- if libpath[-1]:
- libpath.append(dllpath)
- else:
- libpath[-1] = dllpath
- os.environ['BEGINLIBPATH'] = ';'.join(libpath)
-
-
def setquit():
"""Define new builtins 'quit' and 'exit'.
@@ -593,8 +574,6 @@ def main():
ENABLE_USER_SITE = check_enableusersite()
known_paths = addusersitepackages(known_paths)
known_paths = addsitepackages(known_paths)
- if sys.platform == 'os2emx':
- setBEGINLIBPATH()
setquit()
setcopyright()
sethelper()
diff --git a/Lib/smtplib.py b/Lib/smtplib.py
index c949d77..44a144c 100644
--- a/Lib/smtplib.py
+++ b/Lib/smtplib.py
@@ -309,7 +309,7 @@ class SMTP:
try:
port = int(port)
except ValueError:
- raise socket.error("nonnumeric port")
+ raise OSError("nonnumeric port")
if not port:
port = self.default_port
if self.debuglevel > 0:
@@ -330,7 +330,7 @@ class SMTP:
s = s.encode("ascii")
try:
self.sock.sendall(s)
- except socket.error:
+ except OSError:
self.close()
raise SMTPServerDisconnected('Server not connected')
else:
@@ -363,7 +363,7 @@ class SMTP:
while 1:
try:
line = self.file.readline()
- except socket.error as e:
+ except OSError as e:
self.close()
raise SMTPServerDisconnected("Connection unexpectedly closed: "
+ str(e))
@@ -920,7 +920,7 @@ class LMTP(SMTP):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.file = None
self.sock.connect(host)
- except socket.error:
+ except OSError:
if self.debuglevel > 0:
print('connect fail:', host, file=stderr)
if self.sock:
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index d3d90ca..50d7cfd 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -396,8 +396,6 @@ if mswindows:
hStdOutput = None
hStdError = None
wShowWindow = 0
- class pywintypes:
- error = IOError
else:
import select
_has_poll = hasattr(select, 'poll')
@@ -821,7 +819,7 @@ class Popen(object):
for f in filter(None, (self.stdin, self.stdout, self.stderr)):
try:
f.close()
- except EnvironmentError:
+ except OSError:
pass # Ignore EBADF or other errors.
# Make sure the child pipes are closed as well.
@@ -835,7 +833,7 @@ class Popen(object):
for fd in to_close:
try:
os.close(fd)
- except EnvironmentError:
+ except OSError:
pass
raise
@@ -1042,9 +1040,9 @@ class Popen(object):
w9xpopen = os.path.join(os.path.dirname(sys.base_exec_prefix),
"w9xpopen.exe")
if not os.path.exists(w9xpopen):
- raise RuntimeError("Cannot locate w9xpopen.exe, which is "
- "needed for Popen to work with your "
- "shell or platform.")
+ raise SubprocessError(
+ "Cannot locate w9xpopen.exe, which is needed for "
+ "Popen to work with your shell or platform.")
return w9xpopen
@@ -1102,12 +1100,6 @@ class Popen(object):
env,
cwd,
startupinfo)
- except pywintypes.error as e:
- # Translate pywintypes.error to WindowsError, which is
- # a subclass of OSError. FIXME: We should really
- # translate errno using _sys_errlist (or similar), but
- # how can this be done from Python?
- raise WindowsError(*e.args)
finally:
# Child is launched. Close the parent's copy of those pipe
# handles that only the child should have open. You need
@@ -1412,13 +1404,13 @@ class Popen(object):
exception_name, hex_errno, err_msg = (
errpipe_data.split(b':', 2))
except ValueError:
- exception_name = b'RuntimeError'
+ exception_name = b'SubprocessError'
hex_errno = b'0'
err_msg = (b'Bad exception data from child: ' +
repr(errpipe_data))
child_exception_type = getattr(
builtins, exception_name.decode('ascii'),
- RuntimeError)
+ SubprocessError)
err_msg = err_msg.decode(errors="surrogatepass")
if issubclass(child_exception_type, OSError) and hex_errno:
errno_num = int(hex_errno, 16)
@@ -1448,7 +1440,7 @@ class Popen(object):
self.returncode = _WEXITSTATUS(sts)
else:
# Should never happen
- raise RuntimeError("Unknown child exit status!")
+ raise SubprocessError("Unknown child exit status!")
def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid,
@@ -1622,7 +1614,7 @@ class Popen(object):
raise TimeoutExpired(self.args, orig_timeout)
try:
ready = poller.poll(timeout)
- except select.error as e:
+ except OSError as e:
if e.args[0] == errno.EINTR:
continue
raise
@@ -1690,7 +1682,7 @@ class Popen(object):
(rlist, wlist, xlist) = \
select.select(self._read_set, self._write_set, [],
timeout)
- except select.error as e:
+ except OSError as e:
if e.args[0] == errno.EINTR:
continue
raise
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index 71da1db..295a0a6 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -52,25 +52,6 @@ _INSTALL_SCHEMES = {
'scripts': '{base}/Scripts',
'data': '{base}',
},
- 'os2': {
- 'stdlib': '{installed_base}/Lib',
- 'platstdlib': '{base}/Lib',
- 'purelib': '{base}/Lib/site-packages',
- 'platlib': '{base}/Lib/site-packages',
- 'include': '{installed_base}/Include',
- 'platinclude': '{installed_base}/Include',
- 'scripts': '{base}/Scripts',
- 'data': '{base}',
- },
- 'os2_home': {
- 'stdlib': '{userbase}/lib/python{py_version_short}',
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
- 'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'include': '{userbase}/include/python{py_version_short}',
- 'scripts': '{userbase}/bin',
- 'data': '{userbase}',
- },
'nt_user': {
'stdlib': '{userbase}/Python{py_version_nodot}',
'platstdlib': '{userbase}/Python{py_version_nodot}',
@@ -210,7 +191,6 @@ def _getuserbase():
def joinuser(*args):
return os.path.expanduser(os.path.join(*args))
- # what about 'os2emx', 'riscos' ?
if os.name == "nt":
base = os.environ.get("APPDATA") or "~"
if env_base:
@@ -551,7 +531,7 @@ def get_config_vars(*args):
# sys.abiflags may not be defined on all platforms.
_CONFIG_VARS['abiflags'] = ''
- if os.name in ('nt', 'os2'):
+ if os.name == 'nt':
_init_non_posix(_CONFIG_VARS)
if os.name == 'posix':
_init_posix(_CONFIG_VARS)
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index 11b4b68..c0ae8b1 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -2022,7 +2022,7 @@ class TarFile(object):
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
set_attrs=set_attrs)
- except EnvironmentError as e:
+ except OSError as e:
if self.errorlevel > 0:
raise
else:
@@ -2211,9 +2211,8 @@ class TarFile(object):
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
else:
- if sys.platform != "os2emx":
- os.chown(targetpath, u, g)
- except EnvironmentError as e:
+ os.chown(targetpath, u, g)
+ except OSError as e:
raise ExtractError("could not change owner")
def chmod(self, tarinfo, targetpath):
@@ -2222,7 +2221,7 @@ class TarFile(object):
if hasattr(os, 'chmod'):
try:
os.chmod(targetpath, tarinfo.mode)
- except EnvironmentError as e:
+ except OSError as e:
raise ExtractError("could not change mode")
def utime(self, tarinfo, targetpath):
@@ -2232,7 +2231,7 @@ class TarFile(object):
return
try:
os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
- except EnvironmentError as e:
+ except OSError as e:
raise ExtractError("could not change modification time")
#--------------------------------------------------------------------------
diff --git a/Lib/telnetlib.py b/Lib/telnetlib.py
index a59693e..b7c57da 100644
--- a/Lib/telnetlib.py
+++ b/Lib/telnetlib.py
@@ -313,7 +313,7 @@ class Telnet:
while i < 0 and not self.eof:
try:
ready = poller.poll(call_timeout)
- except select.error as e:
+ except OSError as e:
if e.errno == errno.EINTR:
if timeout is not None:
elapsed = time() - time_start
@@ -683,7 +683,7 @@ class Telnet:
while not m and not self.eof:
try:
ready = poller.poll(call_timeout)
- except select.error as e:
+ except OSError as e:
if e.errno == errno.EINTR:
if timeout is not None:
elapsed = time() - time_start
diff --git a/Lib/test/json_tests/test_indent.py b/Lib/test/json_tests/test_indent.py
index 4c70646..4eb4f89 100644
--- a/Lib/test/json_tests/test_indent.py
+++ b/Lib/test/json_tests/test_indent.py
@@ -32,6 +32,8 @@ class TestIndent:
d1 = self.dumps(h)
d2 = self.dumps(h, indent=2, sort_keys=True, separators=(',', ': '))
d3 = self.dumps(h, indent='\t', sort_keys=True, separators=(',', ': '))
+ d4 = self.dumps(h, indent=2, sort_keys=True)
+ d5 = self.dumps(h, indent='\t', sort_keys=True)
h1 = self.loads(d1)
h2 = self.loads(d2)
@@ -42,6 +44,8 @@ class TestIndent:
self.assertEqual(h3, h)
self.assertEqual(d2, expect.expandtabs(2))
self.assertEqual(d3, expect)
+ self.assertEqual(d4, d2)
+ self.assertEqual(d5, d3)
def test_indent0(self):
h = {3: 1}
diff --git a/Lib/test/mock_socket.py b/Lib/test/mock_socket.py
index d09e78c..8ef0ec8 100644
--- a/Lib/test/mock_socket.py
+++ b/Lib/test/mock_socket.py
@@ -140,12 +140,8 @@ def gethostbyname(name):
return ""
-class gaierror(Exception):
- pass
-
-
-class error(Exception):
- pass
+gaierror = socket_module.gaierror
+error = socket_module.error
# Constants
diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py
index 8f3b689..5d9d82d 100755
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -615,7 +615,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
sys.exit(2)
from queue import Queue
from subprocess import Popen, PIPE
- debug_output_pat = re.compile(r"\[\d+ refs\]$")
+ debug_output_pat = re.compile(r"\[\d+ refs, \d+ blocks\]$")
output = Queue()
pending = MultiprocessTests(tests)
opt_args = support.args_from_interpreter_flags()
@@ -763,20 +763,6 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
print(count(len(skipped), "test"), "skipped:")
printlist(skipped)
- e = _ExpectedSkips()
- plat = sys.platform
- if e.isvalid():
- surprise = set(skipped) - e.getexpected() - set(resource_denieds)
- if surprise:
- print(count(len(surprise), "skip"), \
- "unexpected on", plat + ":")
- printlist(surprise)
- else:
- print("Those skips are all expected on", plat + ".")
- else:
- print("Ask someone to teach regrtest.py about which tests are")
- print("expected to get skipped on", plat + ".")
-
if verbose2 and bad:
print("Re-running failed tests in verbose mode")
for test in bad:
@@ -1210,8 +1196,7 @@ def runtest_inner(test, verbose, quiet,
abstest = 'test.' + test
with saved_test_environment(test, verbose, quiet) as environment:
start_time = time.time()
- the_package = __import__(abstest, globals(), locals(), [])
- the_module = getattr(the_package, test)
+ the_module = importlib.import_module(abstest)
# If the test has a test_main, that will run the appropriate
# tests. If not, use normal unittest test loading.
test_runner = getattr(the_module, "test_main", None)
@@ -1335,33 +1320,50 @@ def dash_R(the_module, test, indirect_test, huntrleaks):
del sys.modules[the_module.__name__]
exec('import ' + the_module.__name__)
- deltas = []
nwarmup, ntracked, fname = huntrleaks
fname = os.path.join(support.SAVEDCWD, fname)
repcount = nwarmup + ntracked
+ rc_deltas = [0] * repcount
+ alloc_deltas = [0] * repcount
+
print("beginning", repcount, "repetitions", file=sys.stderr)
print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr)
sys.stderr.flush()
- dash_R_cleanup(fs, ps, pic, zdc, abcs)
for i in range(repcount):
- rc_before = sys.gettotalrefcount()
run_the_test()
+ alloc_after, rc_after = dash_R_cleanup(fs, ps, pic, zdc, abcs)
sys.stderr.write('.')
sys.stderr.flush()
- dash_R_cleanup(fs, ps, pic, zdc, abcs)
- rc_after = sys.gettotalrefcount()
if i >= nwarmup:
- deltas.append(rc_after - rc_before)
+ rc_deltas[i] = rc_after - rc_before
+ alloc_deltas[i] = alloc_after - alloc_before
+ alloc_before, rc_before = alloc_after, rc_after
print(file=sys.stderr)
- if any(deltas):
- msg = '%s leaked %s references, sum=%s' % (test, deltas, sum(deltas))
- print(msg, file=sys.stderr)
- sys.stderr.flush()
- with open(fname, "a") as refrep:
- print(msg, file=refrep)
- refrep.flush()
- return True
- return False
+ # These checkers return False on success, True on failure
+ def check_rc_deltas(deltas):
+ return any(deltas)
+ def check_alloc_deltas(deltas):
+ # At least 1/3rd of 0s
+ if 3 * deltas.count(0) < len(deltas):
+ return True
+ # Nothing else than 1s, 0s and -1s
+ if not set(deltas) <= {1,0,-1}:
+ return True
+ return False
+ failed = False
+ for deltas, item_name, checker in [
+ (rc_deltas, 'references', check_rc_deltas),
+ (alloc_deltas, 'memory blocks', check_alloc_deltas)]:
+ if checker(deltas):
+ msg = '%s leaked %s %s, sum=%s' % (
+ test, deltas[nwarmup:], item_name, sum(deltas))
+ print(msg, file=sys.stderr)
+ sys.stderr.flush()
+ with open(fname, "a") as refrep:
+ print(msg, file=refrep)
+ refrep.flush()
+ failed = True
+ return failed
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
import gc, copyreg
@@ -1427,8 +1429,11 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs):
else:
ctypes._reset_cache()
- # Collect cyclic trash.
+ # Collect cyclic trash and read memory statistics immediately after.
+ func1 = sys.getallocatedblocks
+ func2 = sys.gettotalrefcount
gc.collect()
+ return func1(), func2()
def warm_caches():
# char cache
@@ -1471,299 +1476,6 @@ def printlist(x, width=70, indent=4):
print(fill(' '.join(str(elt) for elt in sorted(x)), width,
initial_indent=blanks, subsequent_indent=blanks))
-# Map sys.platform to a string containing the basenames of tests
-# expected to be skipped on that platform.
-#
-# Special cases:
-# test_pep277
-# The _ExpectedSkips constructor adds this to the set of expected
-# skips if not os.path.supports_unicode_filenames.
-# test_timeout
-# Controlled by test_timeout.skip_expected. Requires the network
-# resource and a socket module.
-#
-# Tests that are expected to be skipped everywhere except on one platform
-# are also handled separately.
-
-_expectations = (
- ('win32',
- """
- test__locale
- test_crypt
- test_curses
- test_dbm
- test_devpoll
- test_fcntl
- test_fork1
- test_epoll
- test_dbm_gnu
- test_dbm_ndbm
- test_grp
- test_ioctl
- test_largefile
- test_kqueue
- test_openpty
- test_ossaudiodev
- test_pipes
- test_poll
- test_posix
- test_pty
- test_pwd
- test_resource
- test_signal
- test_syslog
- test_threadsignals
- test_wait3
- test_wait4
- """),
- ('linux',
- """
- test_curses
- test_devpoll
- test_largefile
- test_kqueue
- test_ossaudiodev
- """),
- ('unixware',
- """
- test_epoll
- test_largefile
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_sax
- test_sundry
- """),
- ('openunix',
- """
- test_epoll
- test_largefile
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_sax
- test_sundry
- """),
- ('sco_sv',
- """
- test_asynchat
- test_fork1
- test_epoll
- test_gettext
- test_largefile
- test_locale
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_queue
- test_sax
- test_sundry
- test_thread
- test_threaded_import
- test_threadedtempfile
- test_threading
- """),
- ('darwin',
- """
- test__locale
- test_curses
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_gdb
- test_largefile
- test_locale
- test_minidom
- test_ossaudiodev
- test_poll
- """),
- ('sunos',
- """
- test_curses
- test_dbm
- test_epoll
- test_kqueue
- test_dbm_gnu
- test_gzip
- test_openpty
- test_zipfile
- test_zlib
- """),
- ('hp-ux',
- """
- test_curses
- test_epoll
- test_dbm_gnu
- test_gzip
- test_largefile
- test_locale
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_sax
- test_zipfile
- test_zlib
- """),
- ('cygwin',
- """
- test_curses
- test_dbm
- test_devpoll
- test_epoll
- test_ioctl
- test_kqueue
- test_largefile
- test_locale
- test_ossaudiodev
- test_socketserver
- """),
- ('os2emx',
- """
- test_audioop
- test_curses
- test_epoll
- test_kqueue
- test_largefile
- test_mmap
- test_openpty
- test_ossaudiodev
- test_pty
- test_resource
- test_signal
- """),
- ('freebsd',
- """
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_locale
- test_ossaudiodev
- test_pep277
- test_pty
- test_socketserver
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_timeout
- test_urllibnet
- test_multiprocessing
- """),
- ('aix',
- """
- test_bz2
- test_epoll
- test_dbm_gnu
- test_gzip
- test_kqueue
- test_ossaudiodev
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_zipimport
- test_zlib
- """),
- ('openbsd',
- """
- test_ctypes
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_locale
- test_normalization
- test_ossaudiodev
- test_pep277
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_multiprocessing
- """),
- ('netbsd',
- """
- test_ctypes
- test_curses
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_locale
- test_ossaudiodev
- test_pep277
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_multiprocessing
- """),
-)
-
-class _ExpectedSkips:
- def __init__(self):
- import os.path
- from test import test_timeout
-
- self.valid = False
- expected = None
- for item in _expectations:
- if sys.platform.startswith(item[0]):
- expected = item[1]
- break
- if expected is not None:
- self.expected = set(expected.split())
-
- # These are broken tests, for now skipped on every platform.
- # XXX Fix these!
- self.expected.add('test_nis')
-
- # expected to be skipped on every platform, even Linux
- if not os.path.supports_unicode_filenames:
- self.expected.add('test_pep277')
-
- # doctest, profile and cProfile tests fail when the codec for the
- # fs encoding isn't built in because PyUnicode_Decode() adds two
- # calls into Python.
- encs = ("utf-8", "latin-1", "ascii", "mbcs", "utf-16", "utf-32")
- if sys.getfilesystemencoding().lower() not in encs:
- self.expected.add('test_profile')
- self.expected.add('test_cProfile')
- self.expected.add('test_doctest')
-
- if test_timeout.skip_expected:
- self.expected.add('test_timeout')
-
- if sys.platform != "win32":
- # test_sqlite is only reliable on Windows where the library
- # is distributed with Python
- WIN_ONLY = {"test_unicode_file", "test_winreg",
- "test_winsound", "test_startfile",
- "test_sqlite", "test_msilib"}
- self.expected |= WIN_ONLY
-
- if sys.platform != 'sunos5':
- self.expected.add('test_nis')
-
- if support.python_is_optimized():
- self.expected.add("test_gdb")
-
- self.valid = True
-
- def isvalid(self):
- "Return true iff _ExpectedSkips knows about the current platform."
- return self.valid
-
- def getexpected(self):
- """Return set of test names we expect to skip on current platform.
-
- self.isvalid() must be true.
- """
-
- assert self.isvalid()
- return self.expected
def _make_temp_dir_for_build(TEMPDIR):
# When tests are run from the Python build directory, it is best practice
diff --git a/Lib/test/support.py b/Lib/test/support.py
index d0a37ea..b98f2bf 100644
--- a/Lib/test/support.py
+++ b/Lib/test/support.py
@@ -93,7 +93,8 @@ def _ignore_deprecated_imports(ignore=True):
"""Context manager to suppress package and module deprecation
warnings when importing them.
- If ignore is False, this context manager has no effect."""
+ If ignore is False, this context manager has no effect.
+ """
if ignore:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", ".+ (module|package)",
@@ -103,23 +104,29 @@ def _ignore_deprecated_imports(ignore=True):
yield
-def import_module(name, deprecated=False):
+def import_module(name, deprecated=False, *, required_on=()):
"""Import and return the module to be tested, raising SkipTest if
it is not available.
If deprecated is True, any module or package deprecation messages
- will be suppressed."""
+ will be suppressed. If a module is required on a platform but optional for
+ others, set required_on to an iterable of platform prefixes which will be
+ compared against sys.platform.
+ """
with _ignore_deprecated_imports(deprecated):
try:
return importlib.import_module(name)
except ImportError as msg:
+ if sys.platform.startswith(tuple(required_on)):
+ raise
raise unittest.SkipTest(str(msg))
def _save_and_remove_module(name, orig_modules):
"""Helper function to save and remove a module from sys.modules
- Raise ImportError if the module can't be imported."""
+ Raise ImportError if the module can't be imported.
+ """
# try to import the module and raise an error if it can't be imported
if name not in sys.modules:
__import__(name)
@@ -710,6 +717,9 @@ for name in (
b'\xae\xd5'
# undecodable from UTF-8 (UNIX and Mac OS X)
b'\xed\xb2\x80', b'\xed\xb4\x80',
+ # undecodable from shift_jis, cp869, cp874, cp932, cp1250, cp1251, cp1252,
+ # cp1253, cp1254, cp1255, cp1257, cp1258
+ b'\x81\x98',
):
try:
name.decode(TESTFN_ENCODING)
@@ -1762,7 +1772,7 @@ def strip_python_stderr(stderr):
This will typically be run on the result of the communicate() method
of a subprocess.Popen object.
"""
- stderr = re.sub(br"\[\d+ refs\]\r?\n?", b"", stderr).strip()
+ stderr = re.sub(br"\[\d+ refs, \d+ blocks\]\r?\n?", b"", stderr).strip()
return stderr
def args_from_interpreter_flags():
diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py
index 653c957..3498524 100644
--- a/Lib/test/test_abc.py
+++ b/Lib/test/test_abc.py
@@ -96,6 +96,19 @@ class TestLegacyAPI(unittest.TestCase):
class TestABC(unittest.TestCase):
+ def test_ABC_helper(self):
+ # create an ABC using the helper class and perform basic checks
+ class C(abc.ABC):
+ @classmethod
+ @abc.abstractmethod
+ def foo(cls): return cls.__name__
+ self.assertEqual(type(C), abc.ABCMeta)
+ self.assertRaises(TypeError, C)
+ class D(C):
+ @classmethod
+ def foo(cls): return super().foo()
+ self.assertEqual(D.foo(), 'D')
+
def test_abstractmethod_basics(self):
@abc.abstractmethod
def foo(self): pass
diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py
index c06c940..00cde2e 100644
--- a/Lib/test/test_argparse.py
+++ b/Lib/test/test_argparse.py
@@ -14,6 +14,7 @@ import argparse
from io import StringIO
from test import support
+from unittest import mock
class StdIOBuffer(StringIO):
pass
@@ -1421,6 +1422,19 @@ class TestFileTypeRepr(TestCase):
type = argparse.FileType('wb', 1)
self.assertEqual("FileType('wb', 1)", repr(type))
+ def test_r_latin(self):
+ type = argparse.FileType('r', encoding='latin_1')
+ self.assertEqual("FileType('r', encoding='latin_1')", repr(type))
+
+ def test_w_big5_ignore(self):
+ type = argparse.FileType('w', encoding='big5', errors='ignore')
+ self.assertEqual("FileType('w', encoding='big5', errors='ignore')",
+ repr(type))
+
+ def test_r_1_replace(self):
+ type = argparse.FileType('r', 1, errors='replace')
+ self.assertEqual("FileType('r', 1, errors='replace')", repr(type))
+
class RFile(object):
seen = {}
@@ -1557,6 +1571,24 @@ class TestFileTypeWB(TempDirMixin, ParserTestCase):
]
+class TestFileTypeOpenArgs(TestCase):
+ """Test that open (the builtin) is correctly called"""
+
+ def test_open_args(self):
+ FT = argparse.FileType
+ cases = [
+ (FT('rb'), ('rb', -1, None, None)),
+ (FT('w', 1), ('w', 1, None, None)),
+ (FT('w', errors='replace'), ('w', -1, None, 'replace')),
+ (FT('wb', encoding='big5'), ('wb', -1, 'big5', None)),
+ (FT('w', 0, 'l1', 'strict'), ('w', 0, 'l1', 'strict')),
+ ]
+ with mock.patch('builtins.open') as m:
+ for type, args in cases:
+ type('foo')
+ m.assert_called_with('foo', *args)
+
+
class TestTypeCallable(ParserTestCase):
"""Test some callables as option/argument types"""
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index dc24126..36907b4 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -928,6 +928,9 @@ class ASTValidatorTests(unittest.TestCase):
def test_tuple(self):
self._sequence(ast.Tuple)
+ def test_nameconstant(self):
+ self.expr(ast.NameConstant(4), "singleton must be True, False, or None")
+
def test_stdlib_validates(self):
stdlib = os.path.dirname(ast.__file__)
tests = [fn for fn in os.listdir(stdlib) if fn.endswith(".py")]
@@ -959,13 +962,13 @@ def main():
#### EVERYTHING BELOW IS GENERATED #####
exec_results = [
-('Module', [('Expr', (1, 0), ('Name', (1, 0), 'None', ('Load',)))]),
+('Module', [('Expr', (1, 0), ('NameConstant', (1, 0), None))]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], None, None, [], []), [('Pass', (1, 9))], [], None)]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None)], None, None, [], None, None, [], []), [('Pass', (1, 10))], [], None)]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None)], None, None, [], None, None, [('Num', (1, 8), 0)], []), [('Pass', (1, 12))], [], None)]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], 'args', None, [], None, None, [], []), [('Pass', (1, 14))], [], None)]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], 'kwargs', None, [], []), [('Pass', (1, 17))], [], None)]),
-('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None), ('arg', 'b', None), ('arg', 'c', None), ('arg', 'd', None), ('arg', 'e', None)], 'args', None, [], 'kwargs', None, [('Num', (1, 11), 1), ('Name', (1, 16), 'None', ('Load',)), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])], []), [('Pass', (1, 52))], [], None)]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None), ('arg', 'b', None), ('arg', 'c', None), ('arg', 'd', None), ('arg', 'e', None)], 'args', None, [], 'kwargs', None, [('Num', (1, 11), 1), ('NameConstant', (1, 16), None), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])], []), [('Pass', (1, 52))], [], None)]),
('Module', [('ClassDef', (1, 0), 'C', [], [], None, None, [('Pass', (1, 8))], [])]),
('Module', [('ClassDef', (1, 0), 'C', [('Name', (1, 8), 'object', ('Load',))], [], None, None, [('Pass', (1, 17))], [])]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], None, None, [], []), [('Return', (1, 8), ('Num', (1, 15), 1))], [], None)]),
@@ -1002,14 +1005,14 @@ single_results = [
('Interactive', [('Expr', (1, 0), ('BinOp', (1, 0), ('Num', (1, 0), 1), ('Add',), ('Num', (1, 2), 2)))]),
]
eval_results = [
-('Expression', ('Name', (1, 0), 'None', ('Load',))),
+('Expression', ('NameConstant', (1, 0), None)),
('Expression', ('BoolOp', (1, 0), ('And',), [('Name', (1, 0), 'a', ('Load',)), ('Name', (1, 6), 'b', ('Load',))])),
('Expression', ('BinOp', (1, 0), ('Name', (1, 0), 'a', ('Load',)), ('Add',), ('Name', (1, 4), 'b', ('Load',)))),
('Expression', ('UnaryOp', (1, 0), ('Not',), ('Name', (1, 4), 'v', ('Load',)))),
-('Expression', ('Lambda', (1, 0), ('arguments', [], None, None, [], None, None, [], []), ('Name', (1, 7), 'None', ('Load',)))),
+('Expression', ('Lambda', (1, 0), ('arguments', [], None, None, [], None, None, [], []), ('NameConstant', (1, 7), None))),
('Expression', ('Dict', (1, 0), [('Num', (1, 2), 1)], [('Num', (1, 4), 2)])),
('Expression', ('Dict', (1, 0), [], [])),
-('Expression', ('Set', (1, 0), [('Name', (1, 1), 'None', ('Load',))])),
+('Expression', ('Set', (1, 0), [('NameConstant', (1, 1), None)])),
('Expression', ('Dict', (1, 0), [('Num', (2, 6), 1)], [('Num', (4, 10), 2)])),
('Expression', ('ListComp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), [('comprehension', ('Name', (1, 7), 'b', ('Store',)), ('Name', (1, 12), 'c', ('Load',)), [('Name', (1, 17), 'd', ('Load',))])])),
('Expression', ('GeneratorExp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), [('comprehension', ('Name', (1, 7), 'b', ('Store',)), ('Name', (1, 12), 'c', ('Load',)), [('Name', (1, 17), 'd', ('Load',))])])),
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index 19d7c70..2c5201e 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -462,6 +462,11 @@ class BuiltinTest(unittest.TestCase):
self.assertRaises(TypeError, eval, ())
self.assertRaises(SyntaxError, eval, bom[:2] + b'a')
+ class X:
+ def __getitem__(self, key):
+ raise ValueError
+ self.assertRaises(ValueError, eval, "foo", {}, X())
+
def test_general_eval(self):
# Tests that general mappings can be used for the locals argument
diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py
index fe6e939..8ae90e4 100644
--- a/Lib/test/test_bytes.py
+++ b/Lib/test/test_bytes.py
@@ -288,8 +288,22 @@ class BaseBytesTest(unittest.TestCase):
self.assertEqual(self.type2test(b"").join(lst), b"abc")
self.assertEqual(self.type2test(b"").join(tuple(lst)), b"abc")
self.assertEqual(self.type2test(b"").join(iter(lst)), b"abc")
- self.assertEqual(self.type2test(b".").join([b"ab", b"cd"]), b"ab.cd")
- # XXX more...
+ dot_join = self.type2test(b".:").join
+ self.assertEqual(dot_join([b"ab", b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([memoryview(b"ab"), b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([b"ab", memoryview(b"cd")]), b"ab.:cd")
+ self.assertEqual(dot_join([bytearray(b"ab"), b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([b"ab", bytearray(b"cd")]), b"ab.:cd")
+ # Stress it with many items
+ seq = [b"abc"] * 1000
+ expected = b"abc" + b".:abc" * 999
+ self.assertEqual(dot_join(seq), expected)
+ # Error handling and cleanup when some item in the middle of the
+ # sequence has the wrong type.
+ with self.assertRaises(TypeError):
+ dot_join([bytearray(b"ab"), "cd", b"ef"])
+ with self.assertRaises(TypeError):
+ dot_join([memoryview(b"ab"), "cd", b"ef"])
def test_count(self):
b = self.type2test(b'mississippi')
@@ -1267,6 +1281,11 @@ class BytearrayPEP3137Test(unittest.TestCase,
self.assertEqual(val, newval)
self.assertTrue(val is not newval,
expr+' returned val on a mutable object')
+ sep = self.marshal(b'')
+ newval = sep.join([val])
+ self.assertEqual(val, newval)
+ self.assertIsNot(val, newval)
+
class FixedStringTest(test.string_tests.BaseTest):
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index f4e81db..24a1813 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
from test import support
-from test.support import TESTFN, bigmemtest, _4G
+from test.support import bigmemtest, _4G
import unittest
from io import BytesIO
@@ -18,10 +18,10 @@ except ImportError:
bz2 = support.import_module('bz2')
from bz2 import BZ2File, BZ2Compressor, BZ2Decompressor
-has_cmdline_bunzip2 = sys.platform not in ("win32", "os2emx")
class BaseTest(unittest.TestCase):
"Base for other testcases."
+
TEXT_LINES = [
b'root:x:0:0:root:/root:/bin/bash\n',
b'bin:x:1:1:bin:/bin:\n',
@@ -49,13 +49,17 @@ class BaseTest(unittest.TestCase):
DATA = b'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`'
def setUp(self):
- self.filename = TESTFN
+ self.filename = support.TESTFN
def tearDown(self):
if os.path.isfile(self.filename):
os.unlink(self.filename)
- if has_cmdline_bunzip2:
+ if sys.platform == "win32":
+ # bunzip2 isn't available to run on Windows.
+ def decompress(self, data):
+ return bz2.decompress(data)
+ else:
def decompress(self, data):
pop = subprocess.Popen("bunzip2", shell=True,
stdin=subprocess.PIPE,
@@ -69,31 +73,21 @@ class BaseTest(unittest.TestCase):
ret = bz2.decompress(data)
return ret
- else:
- # bunzip2 isn't available to run on Windows.
- def decompress(self, data):
- return bz2.decompress(data)
class BZ2FileTest(BaseTest):
- "Test BZ2File type miscellaneous methods."
+ "Test the BZ2File class."
def createTempFile(self, streams=1):
with open(self.filename, "wb") as f:
f.write(self.DATA * streams)
def testBadArgs(self):
- with self.assertRaises(TypeError):
- BZ2File(123.456)
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "z")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "rx")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "rbt")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", compresslevel=0)
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", compresslevel=10)
+ self.assertRaises(TypeError, BZ2File, 123.456)
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "z")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "rx")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "rbt")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=0)
+ self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=10)
def testRead(self):
self.createTempFile()
@@ -214,9 +208,8 @@ class BZ2FileTest(BaseTest):
self.createTempFile()
bz2f = BZ2File(self.filename)
bz2f.close()
- self.assertRaises(ValueError, bz2f.__next__)
- # This call will deadlock if the above .__next__ call failed to
- # release the lock.
+ self.assertRaises(ValueError, next, bz2f)
+ # This call will deadlock if the above call failed to release the lock.
self.assertRaises(ValueError, bz2f.readlines)
def testWrite(self):
@@ -379,7 +372,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.seekable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertFalse(bz2f.seekable())
finally:
@@ -405,7 +398,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.readable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertFalse(bz2f.readable())
finally:
@@ -422,7 +415,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.writable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertTrue(bz2f.writable())
finally:
@@ -476,7 +469,7 @@ class BZ2FileTest(BaseTest):
# Issue #7205: Using a BZ2File from several threads shouldn't deadlock.
data = b"1" * 2**20
nthreads = 10
- with bz2.BZ2File(self.filename, 'wb') as f:
+ with BZ2File(self.filename, 'wb') as f:
def comp():
for i in range(5):
f.write(data)
@@ -487,28 +480,27 @@ class BZ2FileTest(BaseTest):
t.join()
def testWithoutThreading(self):
- bz2 = support.import_fresh_module("bz2", blocked=("threading",))
- with bz2.BZ2File(self.filename, "wb") as f:
+ module = support.import_fresh_module("bz2", blocked=("threading",))
+ with module.BZ2File(self.filename, "wb") as f:
f.write(b"abc")
- with bz2.BZ2File(self.filename, "rb") as f:
+ with module.BZ2File(self.filename, "rb") as f:
self.assertEqual(f.read(), b"abc")
def testMixedIterationAndReads(self):
self.createTempFile()
linelen = len(self.TEXT_LINES[0])
halflen = linelen // 2
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.read(halflen)
self.assertEqual(next(bz2f), self.TEXT_LINES[0][halflen:])
self.assertEqual(bz2f.read(), self.TEXT[linelen:])
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.readline()
self.assertEqual(next(bz2f), self.TEXT_LINES[1])
self.assertEqual(bz2f.readline(), self.TEXT_LINES[2])
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.readlines()
- with self.assertRaises(StopIteration):
- next(bz2f)
+ self.assertRaises(StopIteration, next, bz2f)
self.assertEqual(bz2f.readlines(), [])
def testMultiStreamOrdering(self):
@@ -576,6 +568,7 @@ class BZ2FileTest(BaseTest):
bz2f.seek(-150, 1)
self.assertEqual(bz2f.read(), self.TEXT[500-150:])
+
class BZ2CompressorTest(BaseTest):
def testCompress(self):
bz2c = BZ2Compressor()
@@ -688,97 +681,102 @@ class CompressDecompressTest(BaseTest):
class OpenTest(BaseTest):
+ "Test the open function."
+
+ def open(self, *args, **kwargs):
+ return bz2.open(*args, **kwargs)
+
def test_binary_modes(self):
- with bz2.open(self.filename, "wb") as f:
+ with self.open(self.filename, "wb") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT)
- with bz2.open(self.filename, "rb") as f:
+ with self.open(self.filename, "rb") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(self.filename, "ab") as f:
+ with self.open(self.filename, "ab") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT * 2)
def test_implicit_binary_modes(self):
# Test implicit binary modes (no "b" or "t" in mode string).
- with bz2.open(self.filename, "w") as f:
+ with self.open(self.filename, "w") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT)
- with bz2.open(self.filename, "r") as f:
+ with self.open(self.filename, "r") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(self.filename, "a") as f:
+ with self.open(self.filename, "a") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT * 2)
def test_text_modes(self):
text = self.TEXT.decode("ascii")
text_native_eol = text.replace("\n", os.linesep)
- with bz2.open(self.filename, "wt") as f:
+ with self.open(self.filename, "wt") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("ascii")
+ file_data = self.decompress(f.read()).decode("ascii")
self.assertEqual(file_data, text_native_eol)
- with bz2.open(self.filename, "rt") as f:
+ with self.open(self.filename, "rt") as f:
self.assertEqual(f.read(), text)
- with bz2.open(self.filename, "at") as f:
+ with self.open(self.filename, "at") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("ascii")
+ file_data = self.decompress(f.read()).decode("ascii")
self.assertEqual(file_data, text_native_eol * 2)
def test_fileobj(self):
- with bz2.open(BytesIO(self.DATA), "r") as f:
+ with self.open(BytesIO(self.DATA), "r") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(BytesIO(self.DATA), "rb") as f:
+ with self.open(BytesIO(self.DATA), "rb") as f:
self.assertEqual(f.read(), self.TEXT)
text = self.TEXT.decode("ascii")
- with bz2.open(BytesIO(self.DATA), "rt") as f:
+ with self.open(BytesIO(self.DATA), "rt") as f:
self.assertEqual(f.read(), text)
def test_bad_params(self):
# Test invalid parameter combinations.
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "wbt")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", encoding="utf-8")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", errors="ignore")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", newline="\n")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "wbt")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", encoding="utf-8")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", errors="ignore")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", newline="\n")
def test_encoding(self):
# Test non-default encoding.
text = self.TEXT.decode("ascii")
text_native_eol = text.replace("\n", os.linesep)
- with bz2.open(self.filename, "wt", encoding="utf-16-le") as f:
+ with self.open(self.filename, "wt", encoding="utf-16-le") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("utf-16-le")
+ file_data = self.decompress(f.read()).decode("utf-16-le")
self.assertEqual(file_data, text_native_eol)
- with bz2.open(self.filename, "rt", encoding="utf-16-le") as f:
+ with self.open(self.filename, "rt", encoding="utf-16-le") as f:
self.assertEqual(f.read(), text)
def test_encoding_error_handler(self):
# Test with non-default encoding error handler.
- with bz2.open(self.filename, "wb") as f:
+ with self.open(self.filename, "wb") as f:
f.write(b"foo\xffbar")
- with bz2.open(self.filename, "rt", encoding="ascii", errors="ignore") \
+ with self.open(self.filename, "rt", encoding="ascii", errors="ignore") \
as f:
self.assertEqual(f.read(), "foobar")
def test_newline(self):
# Test with explicit newline (universal newline mode disabled).
text = self.TEXT.decode("ascii")
- with bz2.open(self.filename, "wt", newline="\n") as f:
+ with self.open(self.filename, "wt", newline="\n") as f:
f.write(text)
- with bz2.open(self.filename, "rt", newline="\r") as f:
+ with self.open(self.filename, "rt", newline="\r") as f:
self.assertEqual(f.readlines(), [text])
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
index a89d7e4..6684e51 100644
--- a/Lib/test/test_cmd_line.py
+++ b/Lib/test/test_cmd_line.py
@@ -213,6 +213,23 @@ class CmdLineTest(unittest.TestCase):
self.assertIn(path1.encode('ascii'), out)
self.assertIn(path2.encode('ascii'), out)
+ def test_empty_PYTHONPATH_issue16309(self):
+ # On Posix, it is documented that setting PATH to the
+ # empty string is equivalent to not setting PATH at all,
+ # which is an exception to the rule that in a string like
+ # "/bin::/usr/bin" the empty string in the middle gets
+ # interpreted as '.'
+ code = """if 1:
+ import sys
+ path = ":".join(sys.path)
+ path = path.encode("ascii", "backslashreplace")
+ sys.stdout.buffer.write(path)"""
+ rc1, out1, err1 = assert_python_ok('-c', code, PYTHONPATH="")
+ rc2, out2, err2 = assert_python_ok('-c', code)
+ # regarding to Posix specification, outputs should be equal
+ # for empty and unset PYTHONPATH
+ self.assertEqual(out1, out2)
+
def test_displayhook_unencodable(self):
for encoding in ('ascii', 'latin-1', 'utf-8'):
env = os.environ.copy()
@@ -290,7 +307,7 @@ class CmdLineTest(unittest.TestCase):
rc, out, err = assert_python_ok('-c', code)
self.assertEqual(b'', out)
self.assertRegex(err.decode('ascii', 'ignore'),
- 'Exception OSError: .* ignored')
+ 'Exception ignored in.*\nOSError: .*')
def test_closed_stdout(self):
# Issue #13444: if stdout has been explicitly closed, we should
diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py
index f066204..6051e18 100644
--- a/Lib/test/test_cmd_line_script.py
+++ b/Lib/test/test_cmd_line_script.py
@@ -367,11 +367,10 @@ class CmdLineTest(unittest.TestCase):
# Mac OS X denies the creation of a file with an invalid UTF-8 name.
# Windows allows to create a name with an arbitrary bytes name, but
# Python cannot a undecodable bytes argument to a subprocess.
- #if (support.TESTFN_UNDECODABLE
- #and sys.platform not in ('win32', 'darwin')):
- # name = os.fsdecode(support.TESTFN_UNDECODABLE)
- #elif support.TESTFN_NONASCII:
- if support.TESTFN_NONASCII:
+ if (support.TESTFN_UNDECODABLE
+ and sys.platform not in ('win32', 'darwin')):
+ name = os.fsdecode(support.TESTFN_UNDECODABLE)
+ elif support.TESTFN_NONASCII:
name = support.TESTFN_NONASCII
else:
self.skipTest("need support.TESTFN_NONASCII")
diff --git a/Lib/test/test_complex.py b/Lib/test/test_complex.py
index 6b34ddc..2a85bf4 100644
--- a/Lib/test/test_complex.py
+++ b/Lib/test/test_complex.py
@@ -221,6 +221,8 @@ class ComplexTest(unittest.TestCase):
self.assertRaises(TypeError, complex, OS(None))
self.assertRaises(TypeError, complex, NS(None))
self.assertRaises(TypeError, complex, {})
+ self.assertRaises(TypeError, complex, NS(1.5))
+ self.assertRaises(TypeError, complex, NS(1))
self.assertAlmostEqual(complex("1+10j"), 1+10j)
self.assertAlmostEqual(complex(10), 10+0j)
diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py
index 6ae450d..4ad3309 100644
--- a/Lib/test/test_concurrent_futures.py
+++ b/Lib/test/test_concurrent_futures.py
@@ -15,6 +15,7 @@ import sys
import threading
import time
import unittest
+import weakref
from concurrent import futures
from concurrent.futures._base import (
@@ -52,6 +53,11 @@ def sleep_and_print(t, msg):
sys.stdout.flush()
+class MyObject(object):
+ def my_method(self):
+ pass
+
+
class ExecutorMixin:
worker_count = 5
@@ -396,6 +402,22 @@ class ExecutorTest(unittest.TestCase):
self.executor.map(str, [2] * (self.worker_count + 1))
self.executor.shutdown()
+ @test.support.cpython_only
+ def test_no_stale_references(self):
+ # Issue #16284: check that the executors don't unnecessarily hang onto
+ # references.
+ my_object = MyObject()
+ my_object_collected = threading.Event()
+ my_object_callback = weakref.ref(
+ my_object, lambda obj: my_object_collected.set())
+ # Deliberately discarding the future.
+ self.executor.submit(my_object.my_method)
+ del my_object
+
+ collected = my_object_collected.wait(timeout=5.0)
+ self.assertTrue(collected,
+ "Stale reference not collected within timeout.")
+
class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest):
def test_map_submits_without_iteration(self):
diff --git a/Lib/test/test_devpoll.py b/Lib/test/test_devpoll.py
index bef4e18..ec350cd 100644
--- a/Lib/test/test_devpoll.py
+++ b/Lib/test/test_devpoll.py
@@ -8,7 +8,7 @@ from test.support import TESTFN, run_unittest
try:
select.devpoll
except AttributeError:
- raise unittest.SkipTest("select.devpoll not defined -- skipping test_devpoll")
+ raise unittest.SkipTest("select.devpoll not defined")
def find_ready_matching(ready, flag):
diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py
index 8f8c7c7..3e36f19 100644
--- a/Lib/test/test_doctest.py
+++ b/Lib/test/test_doctest.py
@@ -1409,8 +1409,40 @@ However, output from `report_start` is not suppressed:
2
TestResults(failed=3, attempted=5)
-For the purposes of REPORT_ONLY_FIRST_FAILURE, unexpected exceptions
-count as failures:
+The FAIL_FAST flag causes the runner to exit after the first failing example,
+so subsequent examples are not even attempted:
+
+ >>> flags = doctest.FAIL_FAST
+ >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
+ ... # doctest: +ELLIPSIS
+ **********************************************************************
+ File ..., line 5, in f
+ Failed example:
+ print(2) # first failure
+ Expected:
+ 200
+ Got:
+ 2
+ TestResults(failed=1, attempted=2)
+
+Specifying both FAIL_FAST and REPORT_ONLY_FIRST_FAILURE is equivalent to
+FAIL_FAST only:
+
+ >>> flags = doctest.FAIL_FAST | doctest.REPORT_ONLY_FIRST_FAILURE
+ >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
+ ... # doctest: +ELLIPSIS
+ **********************************************************************
+ File ..., line 5, in f
+ Failed example:
+ print(2) # first failure
+ Expected:
+ 200
+ Got:
+ 2
+ TestResults(failed=1, attempted=2)
+
+For the purposes of both REPORT_ONLY_FIRST_FAILURE and FAIL_FAST, unexpected
+exceptions count as failures:
>>> def f(x):
... r'''
@@ -1437,6 +1469,17 @@ count as failures:
...
ValueError: 2
TestResults(failed=3, attempted=5)
+ >>> flags = doctest.FAIL_FAST
+ >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
+ ... # doctest: +ELLIPSIS
+ **********************************************************************
+ File ..., line 5, in f
+ Failed example:
+ raise ValueError(2) # first failure
+ Exception raised:
+ ...
+ ValueError: 2
+ TestResults(failed=1, attempted=2)
New option flags can also be registered, via register_optionflag(). Here
we reach into doctest's internals a bit.
diff --git a/Lib/test/test_enumerate.py b/Lib/test/test_enumerate.py
index 2e904cf..a2d18d0 100644
--- a/Lib/test/test_enumerate.py
+++ b/Lib/test/test_enumerate.py
@@ -1,4 +1,5 @@
import unittest
+import operator
import sys
import pickle
@@ -168,15 +169,12 @@ class TestReversed(unittest.TestCase, PickleTest):
x = range(1)
self.assertEqual(type(reversed(x)), type(iter(x)))
- @support.cpython_only
def test_len(self):
- # This is an implementation detail, not an interface requirement
- from test.test_iterlen import len
for s in ('hello', tuple('hello'), list('hello'), range(5)):
- self.assertEqual(len(reversed(s)), len(s))
+ self.assertEqual(operator.length_hint(reversed(s)), len(s))
r = reversed(s)
list(r)
- self.assertEqual(len(r), 0)
+ self.assertEqual(operator.length_hint(r), 0)
class SeqWithWeirdLen:
called = False
def __len__(self):
@@ -187,7 +185,7 @@ class TestReversed(unittest.TestCase, PickleTest):
def __getitem__(self, index):
return index
r = reversed(SeqWithWeirdLen())
- self.assertRaises(ZeroDivisionError, len, r)
+ self.assertRaises(ZeroDivisionError, operator.length_hint, r)
def test_gc(self):
diff --git a/Lib/test/test_epoll.py b/Lib/test/test_epoll.py
index 7f9547f..b9b1492 100644
--- a/Lib/test/test_epoll.py
+++ b/Lib/test/test_epoll.py
@@ -87,6 +87,13 @@ class TestEPoll(unittest.TestCase):
self.assertRaises(TypeError, select.epoll, ['foo'])
self.assertRaises(TypeError, select.epoll, {})
+ def test_context_manager(self):
+ with select.epoll(16) as ep:
+ self.assertGreater(ep.fileno(), 0)
+ self.assertFalse(ep.closed)
+ self.assertTrue(ep.closed)
+ self.assertRaises(ValueError, ep.fileno)
+
def test_add(self):
server, client = self._connected_pair()
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 1ad7f97..6fa1f5b 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -8,8 +8,8 @@ import weakref
import errno
from test.support import (TESTFN, captured_output, check_impl_detail,
- cpython_only, gc_collect, run_unittest, no_tracing,
- unlink)
+ check_warnings, cpython_only, gc_collect, run_unittest,
+ no_tracing, unlink)
class NaiveException(Exception):
def __init__(self, x):
@@ -255,11 +255,11 @@ class ExceptionTests(unittest.TestCase):
'errno' : 'foo', 'strerror' : 'bar'}),
(IOError, ('foo', 'bar', 'baz', 'quux'),
{'args' : ('foo', 'bar', 'baz', 'quux')}),
- (EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'),
+ (OSError, ('errnoStr', 'strErrorStr', 'filenameStr'),
{'args' : ('errnoStr', 'strErrorStr'),
'strerror' : 'strErrorStr', 'errno' : 'errnoStr',
'filename' : 'filenameStr'}),
- (EnvironmentError, (1, 'strErrorStr', 'filenameStr'),
+ (OSError, (1, 'strErrorStr', 'filenameStr'),
{'args' : (1, 'strErrorStr'), 'errno' : 1,
'strerror' : 'strErrorStr', 'filename' : 'filenameStr'}),
(SyntaxError, (), {'msg' : None, 'text' : None,
@@ -409,7 +409,7 @@ class ExceptionTests(unittest.TestCase):
self.assertIsNone(e.__context__)
self.assertIsNone(e.__cause__)
- class MyException(EnvironmentError):
+ class MyException(OSError):
pass
e = MyException()
@@ -947,9 +947,10 @@ class ImportErrorTests(unittest.TestCase):
def test_non_str_argument(self):
# Issue #15778
- arg = b'abc'
- exc = ImportError(arg)
- self.assertEqual(str(arg), str(exc))
+ with check_warnings(('', BytesWarning), quiet=True):
+ arg = b'abc'
+ exc = ImportError(arg)
+ self.assertEqual(str(arg), str(exc))
def test_main():
diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py
index 29af99c..6d9ee0b 100644
--- a/Lib/test/test_fcntl.py
+++ b/Lib/test/test_fcntl.py
@@ -1,7 +1,4 @@
"""Test program for the fcntl C module.
-
-OS/2+EMX doesn't support the file locking operations.
-
"""
import os
import struct
@@ -35,8 +32,6 @@ def get_lockdata():
fcntl.F_WRLCK, 0)
elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']:
lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
- elif sys.platform in ['os2emx']:
- lockdata = None
else:
lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0)
if lockdata:
@@ -62,18 +57,16 @@ class TestFcntl(unittest.TestCase):
rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
if verbose:
print('Status from fcntl with O_NONBLOCK: ', rv)
- if sys.platform not in ['os2emx']:
- rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata)
- if verbose:
- print('String from fcntl with F_SETLKW: ', repr(rv))
+ rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata)
+ if verbose:
+ print('String from fcntl with F_SETLKW: ', repr(rv))
self.f.close()
def test_fcntl_file_descriptor(self):
# again, but pass the file rather than numeric descriptor
self.f = open(TESTFN, 'wb')
rv = fcntl.fcntl(self.f, fcntl.F_SETFL, os.O_NONBLOCK)
- if sys.platform not in ['os2emx']:
- rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata)
+ rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata)
self.f.close()
def test_fcntl_64_bit(self):
diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py
index 0847961..9615c3a 100644
--- a/Lib/test/test_fileio.py
+++ b/Lib/test/test_fileio.py
@@ -285,7 +285,7 @@ class OtherFileTests(unittest.TestCase):
if sys.platform != "win32":
try:
f = _FileIO("/dev/tty", "a")
- except EnvironmentError:
+ except OSError:
# When run in a cron job there just aren't any
# ttys, so skip the test. This also handles other
# OS'es that don't support /dev/tty.
diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py
index b6e2540..e6b0d20 100644
--- a/Lib/test/test_format.py
+++ b/Lib/test/test_format.py
@@ -307,6 +307,22 @@ class FormatTest(unittest.TestCase):
finally:
locale.setlocale(locale.LC_ALL, oldloc)
+ @support.cpython_only
+ def test_optimisations(self):
+ text = "abcde" # 5 characters
+
+ self.assertIs("%s" % text, text)
+ self.assertIs("%.5s" % text, text)
+ self.assertIs("%.10s" % text, text)
+ self.assertIs("%1s" % text, text)
+ self.assertIs("%5s" % text, text)
+
+ self.assertIs("{0}".format(text), text)
+ self.assertIs("{0:s}".format(text), text)
+ self.assertIs("{0:.5s}".format(text), text)
+ self.assertIs("{0:.10s}".format(text), text)
+ self.assertIs("{0:1s}".format(text), text)
+ self.assertIs("{0:5s}".format(text), text)
def test_main():
diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py
index 1fad921..3336532 100644
--- a/Lib/test/test_fractions.py
+++ b/Lib/test/test_fractions.py
@@ -146,9 +146,10 @@ class FractionTest(unittest.TestCase):
self.assertEqual((0, 1), _components(F(-0.0)))
self.assertEqual((3602879701896397, 36028797018963968),
_components(F(0.1)))
- self.assertRaises(TypeError, F, float('nan'))
- self.assertRaises(TypeError, F, float('inf'))
- self.assertRaises(TypeError, F, float('-inf'))
+ # bug 16469: error types should be consistent with float -> int
+ self.assertRaises(ValueError, F, float('nan'))
+ self.assertRaises(OverflowError, F, float('inf'))
+ self.assertRaises(OverflowError, F, float('-inf'))
def testInitFromDecimal(self):
self.assertEqual((11, 10),
@@ -157,10 +158,11 @@ class FractionTest(unittest.TestCase):
_components(F(Decimal('3.5e-2'))))
self.assertEqual((0, 1),
_components(F(Decimal('.000e20'))))
- self.assertRaises(TypeError, F, Decimal('nan'))
- self.assertRaises(TypeError, F, Decimal('snan'))
- self.assertRaises(TypeError, F, Decimal('inf'))
- self.assertRaises(TypeError, F, Decimal('-inf'))
+ # bug 16469: error types should be consistent with decimal -> int
+ self.assertRaises(ValueError, F, Decimal('nan'))
+ self.assertRaises(ValueError, F, Decimal('snan'))
+ self.assertRaises(OverflowError, F, Decimal('inf'))
+ self.assertRaises(OverflowError, F, Decimal('-inf'))
def testFromString(self):
self.assertEqual((5, 1), _components(F("5")))
@@ -248,14 +250,15 @@ class FractionTest(unittest.TestCase):
inf = 1e1000
nan = inf - inf
+ # bug 16469: error types should be consistent with float -> int
self.assertRaisesMessage(
- TypeError, "Cannot convert inf to Fraction.",
+ OverflowError, "Cannot convert inf to Fraction.",
F.from_float, inf)
self.assertRaisesMessage(
- TypeError, "Cannot convert -inf to Fraction.",
+ OverflowError, "Cannot convert -inf to Fraction.",
F.from_float, -inf)
self.assertRaisesMessage(
- TypeError, "Cannot convert nan to Fraction.",
+ ValueError, "Cannot convert nan to Fraction.",
F.from_float, nan)
def testFromDecimal(self):
@@ -268,17 +271,18 @@ class FractionTest(unittest.TestCase):
self.assertEqual(1 - F(1, 10**30),
F.from_decimal(Decimal("0." + "9" * 30)))
+ # bug 16469: error types should be consistent with decimal -> int
self.assertRaisesMessage(
- TypeError, "Cannot convert Infinity to Fraction.",
+ OverflowError, "Cannot convert Infinity to Fraction.",
F.from_decimal, Decimal("inf"))
self.assertRaisesMessage(
- TypeError, "Cannot convert -Infinity to Fraction.",
+ OverflowError, "Cannot convert -Infinity to Fraction.",
F.from_decimal, Decimal("-inf"))
self.assertRaisesMessage(
- TypeError, "Cannot convert NaN to Fraction.",
+ ValueError, "Cannot convert NaN to Fraction.",
F.from_decimal, Decimal("nan"))
self.assertRaisesMessage(
- TypeError, "Cannot convert sNaN to Fraction.",
+ ValueError, "Cannot convert sNaN to Fraction.",
F.from_decimal, Decimal("snan"))
def testLimitDenominator(self):
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index c4910a7..ee1f5db 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -1,4 +1,3 @@
-import functools
import collections
import sys
import unittest
@@ -7,17 +6,31 @@ from weakref import proxy
import pickle
from random import choice
-@staticmethod
-def PythonPartial(func, *args, **keywords):
- 'Pure Python approximation of partial()'
- def newfunc(*fargs, **fkeywords):
- newkeywords = keywords.copy()
- newkeywords.update(fkeywords)
- return func(*(args + fargs), **newkeywords)
- newfunc.func = func
- newfunc.args = args
- newfunc.keywords = keywords
- return newfunc
+import functools
+
+original_functools = functools
+py_functools = support.import_fresh_module('functools', blocked=['_functools'])
+c_functools = support.import_fresh_module('functools', fresh=['_functools'])
+
+class BaseTest(unittest.TestCase):
+
+ """Base class required for testing C and Py implementations."""
+
+ def setUp(self):
+
+ # The module must be explicitly set so that the proper
+ # interaction between the c module and the python module
+ # can be controlled.
+ self.partial = self.module.partial
+ super(BaseTest, self).setUp()
+
+class BaseTestC(BaseTest):
+ module = c_functools
+
+class BaseTestPy(BaseTest):
+ module = py_functools
+
+PythonPartial = py_functools.partial
def capture(*args, **kw):
"""capture all positional and keyword arguments"""
@@ -27,31 +40,32 @@ def signature(part):
""" return the signature of a partial object """
return (part.func, part.args, part.keywords, part.__dict__)
-class TestPartial(unittest.TestCase):
+class TestPartial(object):
- thetype = functools.partial
+ partial = functools.partial
def test_basic_examples(self):
- p = self.thetype(capture, 1, 2, a=10, b=20)
+ p = self.partial(capture, 1, 2, a=10, b=20)
+ self.assertTrue(callable(p))
self.assertEqual(p(3, 4, b=30, c=40),
((1, 2, 3, 4), dict(a=10, b=30, c=40)))
- p = self.thetype(map, lambda x: x*10)
+ p = self.partial(map, lambda x: x*10)
self.assertEqual(list(p([1,2,3,4])), [10, 20, 30, 40])
def test_attributes(self):
- p = self.thetype(capture, 1, 2, a=10, b=20)
+ p = self.partial(capture, 1, 2, a=10, b=20)
# attributes should be readable
self.assertEqual(p.func, capture)
self.assertEqual(p.args, (1, 2))
self.assertEqual(p.keywords, dict(a=10, b=20))
# attributes should not be writable
- if not isinstance(self.thetype, type):
+ if not isinstance(self.partial, type):
return
self.assertRaises(AttributeError, setattr, p, 'func', map)
self.assertRaises(AttributeError, setattr, p, 'args', (1, 2))
self.assertRaises(AttributeError, setattr, p, 'keywords', dict(a=1, b=2))
- p = self.thetype(hex)
+ p = self.partial(hex)
try:
del p.__dict__
except TypeError:
@@ -60,9 +74,9 @@ class TestPartial(unittest.TestCase):
self.fail('partial object allowed __dict__ to be deleted')
def test_argument_checking(self):
- self.assertRaises(TypeError, self.thetype) # need at least a func arg
+ self.assertRaises(TypeError, self.partial) # need at least a func arg
try:
- self.thetype(2)()
+ self.partial(2)()
except TypeError:
pass
else:
@@ -73,7 +87,7 @@ class TestPartial(unittest.TestCase):
def func(a=10, b=20):
return a
d = {'a':3}
- p = self.thetype(func, a=5)
+ p = self.partial(func, a=5)
self.assertEqual(p(**d), 3)
self.assertEqual(d, {'a':3})
p(b=7)
@@ -82,20 +96,20 @@ class TestPartial(unittest.TestCase):
def test_arg_combinations(self):
# exercise special code paths for zero args in either partial
# object or the caller
- p = self.thetype(capture)
+ p = self.partial(capture)
self.assertEqual(p(), ((), {}))
self.assertEqual(p(1,2), ((1,2), {}))
- p = self.thetype(capture, 1, 2)
+ p = self.partial(capture, 1, 2)
self.assertEqual(p(), ((1,2), {}))
self.assertEqual(p(3,4), ((1,2,3,4), {}))
def test_kw_combinations(self):
# exercise special code paths for no keyword args in
# either the partial object or the caller
- p = self.thetype(capture)
+ p = self.partial(capture)
self.assertEqual(p(), ((), {}))
self.assertEqual(p(a=1), ((), {'a':1}))
- p = self.thetype(capture, a=1)
+ p = self.partial(capture, a=1)
self.assertEqual(p(), ((), {'a':1}))
self.assertEqual(p(b=2), ((), {'a':1, 'b':2}))
# keyword args in the call override those in the partial object
@@ -104,7 +118,7 @@ class TestPartial(unittest.TestCase):
def test_positional(self):
# make sure positional arguments are captured correctly
for args in [(), (0,), (0,1), (0,1,2), (0,1,2,3)]:
- p = self.thetype(capture, *args)
+ p = self.partial(capture, *args)
expected = args + ('x',)
got, empty = p('x')
self.assertTrue(expected == got and empty == {})
@@ -112,14 +126,14 @@ class TestPartial(unittest.TestCase):
def test_keyword(self):
# make sure keyword arguments are captured correctly
for a in ['a', 0, None, 3.5]:
- p = self.thetype(capture, a=a)
+ p = self.partial(capture, a=a)
expected = {'a':a,'x':None}
empty, got = p(x=None)
self.assertTrue(expected == got and empty == ())
def test_no_side_effects(self):
# make sure there are no side effects that affect subsequent calls
- p = self.thetype(capture, 0, a=1)
+ p = self.partial(capture, 0, a=1)
args1, kw1 = p(1, b=2)
self.assertTrue(args1 == (0,1) and kw1 == {'a':1,'b':2})
args2, kw2 = p()
@@ -128,13 +142,13 @@ class TestPartial(unittest.TestCase):
def test_error_propagation(self):
def f(x, y):
x / y
- self.assertRaises(ZeroDivisionError, self.thetype(f, 1, 0))
- self.assertRaises(ZeroDivisionError, self.thetype(f, 1), 0)
- self.assertRaises(ZeroDivisionError, self.thetype(f), 1, 0)
- self.assertRaises(ZeroDivisionError, self.thetype(f, y=0), 1)
+ self.assertRaises(ZeroDivisionError, self.partial(f, 1, 0))
+ self.assertRaises(ZeroDivisionError, self.partial(f, 1), 0)
+ self.assertRaises(ZeroDivisionError, self.partial(f), 1, 0)
+ self.assertRaises(ZeroDivisionError, self.partial(f, y=0), 1)
def test_weakref(self):
- f = self.thetype(int, base=16)
+ f = self.partial(int, base=16)
p = proxy(f)
self.assertEqual(f.func, p.func)
f = None
@@ -142,9 +156,9 @@ class TestPartial(unittest.TestCase):
def test_with_bound_and_unbound_methods(self):
data = list(map(str, range(10)))
- join = self.thetype(str.join, '')
+ join = self.partial(str.join, '')
self.assertEqual(join(data), '0123456789')
- join = self.thetype(''.join)
+ join = self.partial(''.join)
self.assertEqual(join(data), '0123456789')
def test_repr(self):
@@ -152,49 +166,57 @@ class TestPartial(unittest.TestCase):
args_repr = ', '.join(repr(a) for a in args)
kwargs = {'a': object(), 'b': object()}
kwargs_repr = ', '.join("%s=%r" % (k, v) for k, v in kwargs.items())
- if self.thetype is functools.partial:
+ if self.partial is functools.partial:
name = 'functools.partial'
else:
- name = self.thetype.__name__
+ name = self.partial.__name__
- f = self.thetype(capture)
+ f = self.partial(capture)
self.assertEqual('{}({!r})'.format(name, capture),
repr(f))
- f = self.thetype(capture, *args)
+ f = self.partial(capture, *args)
self.assertEqual('{}({!r}, {})'.format(name, capture, args_repr),
repr(f))
- f = self.thetype(capture, **kwargs)
+ f = self.partial(capture, **kwargs)
self.assertEqual('{}({!r}, {})'.format(name, capture, kwargs_repr),
repr(f))
- f = self.thetype(capture, *args, **kwargs)
+ f = self.partial(capture, *args, **kwargs)
self.assertEqual('{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr),
repr(f))
def test_pickle(self):
- f = self.thetype(signature, 'asdf', bar=True)
+ f = self.partial(signature, 'asdf', bar=True)
f.add_something_to__dict__ = True
f_copy = pickle.loads(pickle.dumps(f))
self.assertEqual(signature(f), signature(f_copy))
-class PartialSubclass(functools.partial):
+class TestPartialC(BaseTestC, TestPartial):
pass
-class TestPartialSubclass(TestPartial):
+class TestPartialPy(BaseTestPy, TestPartial):
- thetype = PartialSubclass
+ def test_pickle(self):
+ raise unittest.SkipTest("Python implementation of partial isn't picklable")
+
+ def test_repr(self):
+ raise unittest.SkipTest("Python implementation of partial uses own repr")
+
+class TestPartialCSubclass(BaseTestC, TestPartial):
+
+ class PartialSubclass(c_functools.partial):
+ pass
-class TestPythonPartial(TestPartial):
+ partial = staticmethod(PartialSubclass)
- thetype = PythonPartial
+class TestPartialPySubclass(TestPartialPy):
- # the python version hasn't a nice repr
- def test_repr(self): pass
+ class PartialSubclass(c_functools.partial):
+ pass
- # the python version isn't picklable
- def test_pickle(self): pass
+ partial = staticmethod(PartialSubclass)
class TestUpdateWrapper(unittest.TestCase):
@@ -320,7 +342,7 @@ class TestWraps(TestUpdateWrapper):
self.assertEqual(wrapper.__qualname__, f.__qualname__)
self.assertEqual(wrapper.attr, 'This is also a test')
- @unittest.skipIf(not sys.flags.optimize <= 1,
+ @unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_default_update_doc(self):
wrapper, _ = self._default_update()
@@ -441,24 +463,28 @@ class TestReduce(unittest.TestCase):
d = {"one": 1, "two": 2, "three": 3}
self.assertEqual(self.func(add, d), "".join(d.keys()))
-class TestCmpToKey(unittest.TestCase):
+class TestCmpToKey(object):
def test_cmp_to_key(self):
def cmp1(x, y):
return (x > y) - (x < y)
- key = functools.cmp_to_key(cmp1)
+ key = self.cmp_to_key(cmp1)
self.assertEqual(key(3), key(3))
self.assertGreater(key(3), key(1))
+ self.assertGreaterEqual(key(3), key(3))
+
def cmp2(x, y):
return int(x) - int(y)
- key = functools.cmp_to_key(cmp2)
+ key = self.cmp_to_key(cmp2)
self.assertEqual(key(4.0), key('4'))
self.assertLess(key(2), key('35'))
+ self.assertLessEqual(key(2), key('35'))
+ self.assertNotEqual(key(2), key('35'))
def test_cmp_to_key_arguments(self):
def cmp1(x, y):
return (x > y) - (x < y)
- key = functools.cmp_to_key(mycmp=cmp1)
+ key = self.cmp_to_key(mycmp=cmp1)
self.assertEqual(key(obj=3), key(obj=3))
self.assertGreater(key(obj=3), key(obj=1))
with self.assertRaises((TypeError, AttributeError)):
@@ -466,10 +492,10 @@ class TestCmpToKey(unittest.TestCase):
with self.assertRaises((TypeError, AttributeError)):
1 < key(3) # lhs is not a K object
with self.assertRaises(TypeError):
- key = functools.cmp_to_key() # too few args
+ key = self.cmp_to_key() # too few args
with self.assertRaises(TypeError):
- key = functools.cmp_to_key(cmp1, None) # too many args
- key = functools.cmp_to_key(cmp1)
+ key = self.module.cmp_to_key(cmp1, None) # too many args
+ key = self.cmp_to_key(cmp1)
with self.assertRaises(TypeError):
key() # too few args
with self.assertRaises(TypeError):
@@ -478,7 +504,7 @@ class TestCmpToKey(unittest.TestCase):
def test_bad_cmp(self):
def cmp1(x, y):
raise ZeroDivisionError
- key = functools.cmp_to_key(cmp1)
+ key = self.cmp_to_key(cmp1)
with self.assertRaises(ZeroDivisionError):
key(3) > key(1)
@@ -493,13 +519,13 @@ class TestCmpToKey(unittest.TestCase):
def test_obj_field(self):
def cmp1(x, y):
return (x > y) - (x < y)
- key = functools.cmp_to_key(mycmp=cmp1)
+ key = self.cmp_to_key(mycmp=cmp1)
self.assertEqual(key(50).obj, 50)
def test_sort_int(self):
def mycmp(x, y):
return y - x
- self.assertEqual(sorted(range(5), key=functools.cmp_to_key(mycmp)),
+ self.assertEqual(sorted(range(5), key=self.cmp_to_key(mycmp)),
[4, 3, 2, 1, 0])
def test_sort_int_str(self):
@@ -507,18 +533,24 @@ class TestCmpToKey(unittest.TestCase):
x, y = int(x), int(y)
return (x > y) - (x < y)
values = [5, '3', 7, 2, '0', '1', 4, '10', 1]
- values = sorted(values, key=functools.cmp_to_key(mycmp))
+ values = sorted(values, key=self.cmp_to_key(mycmp))
self.assertEqual([int(value) for value in values],
[0, 1, 1, 2, 3, 4, 5, 7, 10])
def test_hash(self):
def mycmp(x, y):
return y - x
- key = functools.cmp_to_key(mycmp)
+ key = self.cmp_to_key(mycmp)
k = key(10)
self.assertRaises(TypeError, hash, k)
self.assertNotIsInstance(k, collections.Hashable)
+class TestCmpToKeyC(BaseTestC, TestCmpToKey):
+ cmp_to_key = c_functools.cmp_to_key
+
+class TestCmpToKeyPy(BaseTestPy, TestCmpToKey):
+ cmp_to_key = staticmethod(py_functools.cmp_to_key)
+
class TestTotalOrdering(unittest.TestCase):
def test_total_ordering_lt(self):
@@ -623,7 +655,7 @@ class TestLRU(unittest.TestCase):
def test_lru(self):
def orig(x, y):
- return 3*x+y
+ return 3 * x + y
f = functools.lru_cache(maxsize=20)(orig)
hits, misses, maxsize, currsize = f.cache_info()
self.assertEqual(maxsize, 20)
@@ -728,7 +760,7 @@ class TestLRU(unittest.TestCase):
# Verify that user_function exceptions get passed through without
# creating a hard-to-read chained exception.
# http://bugs.python.org/issue13177
- for maxsize in (None, 100):
+ for maxsize in (None, 128):
@functools.lru_cache(maxsize)
def func(i):
return 'abc'[i]
@@ -741,7 +773,7 @@ class TestLRU(unittest.TestCase):
func(15)
def test_lru_with_types(self):
- for maxsize in (None, 100):
+ for maxsize in (None, 128):
@functools.lru_cache(maxsize=maxsize, typed=True)
def square(x):
return x * x
@@ -756,14 +788,46 @@ class TestLRU(unittest.TestCase):
self.assertEqual(square.cache_info().hits, 4)
self.assertEqual(square.cache_info().misses, 4)
+ def test_lru_with_keyword_args(self):
+ @functools.lru_cache()
+ def fib(n):
+ if n < 2:
+ return n
+ return fib(n=n-1) + fib(n=n-2)
+ self.assertEqual(
+ [fib(n=number) for number in range(16)],
+ [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]
+ )
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=28, misses=16, maxsize=128, currsize=16))
+ fib.cache_clear()
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=0, misses=0, maxsize=128, currsize=0))
+
+ def test_lru_with_keyword_args_maxsize_none(self):
+ @functools.lru_cache(maxsize=None)
+ def fib(n):
+ if n < 2:
+ return n
+ return fib(n=n-1) + fib(n=n-2)
+ self.assertEqual([fib(n=number) for number in range(16)],
+ [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610])
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16))
+ fib.cache_clear()
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0))
+
def test_main(verbose=None):
test_classes = (
- TestPartial,
- TestPartialSubclass,
- TestPythonPartial,
+ TestPartialC,
+ TestPartialPy,
+ TestPartialCSubclass,
+ TestPartialPySubclass,
TestUpdateWrapper,
TestTotalOrdering,
- TestCmpToKey,
+ TestCmpToKeyC,
+ TestCmpToKeyPy,
TestWraps,
TestReduce,
TestLRU,
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index c59b72e..85dbc97 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -610,6 +610,32 @@ class GCTests(unittest.TestCase):
stderr = run_command(code % "gc.DEBUG_SAVEALL")
self.assertNotIn(b"uncollectable objects at shutdown", stderr)
+ def test_get_stats(self):
+ stats = gc.get_stats()
+ self.assertEqual(len(stats), 3)
+ for st in stats:
+ self.assertIsInstance(st, dict)
+ self.assertEqual(set(st),
+ {"collected", "collections", "uncollectable"})
+ self.assertGreaterEqual(st["collected"], 0)
+ self.assertGreaterEqual(st["collections"], 0)
+ self.assertGreaterEqual(st["uncollectable"], 0)
+ # Check that collection counts are incremented correctly
+ if gc.isenabled():
+ self.addCleanup(gc.enable)
+ gc.disable()
+ old = gc.get_stats()
+ gc.collect(0)
+ new = gc.get_stats()
+ self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
+ self.assertEqual(new[1]["collections"], old[1]["collections"])
+ self.assertEqual(new[2]["collections"], old[2]["collections"])
+ gc.collect(2)
+ new = gc.get_stats()
+ self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
+ self.assertEqual(new[1]["collections"], old[1]["collections"])
+ self.assertEqual(new[2]["collections"], old[2]["collections"] + 1)
+
class GCCallbackTests(unittest.TestCase):
def setUp(self):
diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py
index 06f67c2..f5c1a7d 100644
--- a/Lib/test/test_generators.py
+++ b/Lib/test/test_generators.py
@@ -1728,9 +1728,7 @@ Our ill-behaved code should be invoked during GC:
>>> g = f()
>>> next(g)
>>> del g
->>> sys.stderr.getvalue().startswith(
-... "Exception RuntimeError: 'generator ignored GeneratorExit' in "
-... )
+>>> "RuntimeError: generator ignored GeneratorExit" in sys.stderr.getvalue()
True
>>> sys.stderr = old
@@ -1840,22 +1838,23 @@ to test.
... sys.stderr = io.StringIO()
... class Leaker:
... def __del__(self):
-... raise RuntimeError
+... def invoke(message):
+... raise RuntimeError(message)
+... invoke("test")
...
... l = Leaker()
... del l
... err = sys.stderr.getvalue().strip()
-... err.startswith(
-... "Exception RuntimeError: RuntimeError() in <"
-... )
-... err.endswith("> ignored")
-... len(err.splitlines())
+... "Exception ignored in" in err
+... "RuntimeError: test" in err
+... "Traceback" in err
+... "in invoke" in err
... finally:
... sys.stderr = old
True
True
-1
-
+True
+True
These refleak tests should perhaps be in a testfile of their own,
diff --git a/Lib/test/test_genericpath.py b/Lib/test/test_genericpath.py
index 3eadd58..084dbb2 100644
--- a/Lib/test/test_genericpath.py
+++ b/Lib/test/test_genericpath.py
@@ -308,19 +308,19 @@ class CommonTest(GenericTest):
for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'):
self.assertIsInstance(abspath(path), str)
- @unittest.skipIf(sys.platform == 'darwin',
- "Mac OS X denies the creation of a directory with an invalid utf8 name")
def test_nonascii_abspath(self):
- name = b'\xe7w\xf0'
- if sys.platform == 'win32':
- try:
- os.fsdecode(name)
- except UnicodeDecodeError:
- self.skipTest("the filename %a is not decodable "
- "from the ANSI code page %s"
- % (name, sys.getfilesystemencoding()))
-
- # Test non-ASCII, non-UTF8 bytes in the path.
+ if (support.TESTFN_UNDECODABLE
+ # Mac OS X denies the creation of a directory with an invalid
+ # UTF-8 name. Windows allows to create a directory with an
+ # arbitrary bytes name, but fails to enter this directory
+ # (when the bytes name is used).
+ and sys.platform not in ('win32', 'darwin')):
+ name = support.TESTFN_UNDECODABLE
+ elif support.TESTFN_NONASCII:
+ name = support.TESTFN_NONASCII
+ else:
+ self.skipTest("need support.TESTFN_NONASCII")
+
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with support.temp_cwd(name):
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index 32f85e9..54201a1 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -36,7 +36,10 @@ def hexstr(s):
class HashLibTestCase(unittest.TestCase):
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
'sha224', 'SHA224', 'sha256', 'SHA256',
- 'sha384', 'SHA384', 'sha512', 'SHA512' )
+ 'sha384', 'SHA384', 'sha512', 'SHA512',
+ 'sha3_224', 'sha3_256', 'sha3_384',
+ 'sha3_512', 'SHA3_224', 'SHA3_256',
+ 'SHA3_384', 'SHA3_512' )
# Issue #14693: fallback modules are always compiled under POSIX
_warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG
@@ -93,6 +96,12 @@ class HashLibTestCase(unittest.TestCase):
if _sha512:
self.constructors_to_test['sha384'].add(_sha512.sha384)
self.constructors_to_test['sha512'].add(_sha512.sha512)
+ _sha3 = self._conditional_import_module('_sha3')
+ if _sha3:
+ self.constructors_to_test['sha3_224'].add(_sha3.sha3_224)
+ self.constructors_to_test['sha3_256'].add(_sha3.sha3_256)
+ self.constructors_to_test['sha3_384'].add(_sha3.sha3_384)
+ self.constructors_to_test['sha3_512'].add(_sha3.sha3_512)
super(HashLibTestCase, self).__init__(*args, **kwargs)
@@ -158,6 +167,7 @@ class HashLibTestCase(unittest.TestCase):
self.assertEqual(m1.digest(), m2.digest())
def check(self, name, data, digest):
+ digest = digest.lower()
constructors = self.constructors_to_test[name]
# 2 is for hashlib.name(...) and hashlib.new(name, ...)
self.assertGreaterEqual(len(constructors), 2)
@@ -183,6 +193,10 @@ class HashLibTestCase(unittest.TestCase):
self.check_no_unicode('sha256')
self.check_no_unicode('sha384')
self.check_no_unicode('sha512')
+ self.check_no_unicode('sha3_224')
+ self.check_no_unicode('sha3_256')
+ self.check_no_unicode('sha3_384')
+ self.check_no_unicode('sha3_512')
def test_case_md5_0(self):
self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e')
@@ -318,11 +332,122 @@ class HashLibTestCase(unittest.TestCase):
"e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
"de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
+ # SHA-3 family
+ def test_case_sha3_224_0(self):
+ self.check('sha3_224', b"",
+ "F71837502BA8E10837BDD8D365ADB85591895602FC552B48B7390ABD")
+
+ def test_case_sha3_224_1(self):
+ self.check('sha3_224', bytes.fromhex("CC"),
+ "A9CAB59EB40A10B246290F2D6086E32E3689FAF1D26B470C899F2802")
+
+ def test_case_sha3_224_2(self):
+ self.check('sha3_224', bytes.fromhex("41FB"),
+ "615BA367AFDC35AAC397BC7EB5D58D106A734B24986D5D978FEFD62C")
+
+ def test_case_sha3_224_3(self):
+ self.check('sha3_224', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "62B10F1B6236EBC2DA72957742A8D4E48E213B5F8934604BFD4D2C3A")
+
+ @bigmemtest(size=_4G + 5, memuse=1)
+ def test_case_sha3_224_huge(self, size):
+ if size == _4G + 5:
+ try:
+ self.check('sha3_224', b'A'*size,
+ '58ef60057c9dddb6a87477e9ace5a26f0d9db01881cf9b10a9f8c224')
+ except OverflowError:
+ pass # 32-bit arch
+
+
+ def test_case_sha3_256_0(self):
+ self.check('sha3_256', b"",
+ "C5D2460186F7233C927E7DB2DCC703C0E500B653CA82273B7BFAD8045D85A470")
+
+ def test_case_sha3_256_1(self):
+ self.check('sha3_256', bytes.fromhex("CC"),
+ "EEAD6DBFC7340A56CAEDC044696A168870549A6A7F6F56961E84A54BD9970B8A")
+
+ def test_case_sha3_256_2(self):
+ self.check('sha3_256', bytes.fromhex("41FB"),
+ "A8EACEDA4D47B3281A795AD9E1EA2122B407BAF9AABCB9E18B5717B7873537D2")
+
+ def test_case_sha3_256_3(self):
+ self.check('sha3_256', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "CE87A5173BFFD92399221658F801D45C294D9006EE9F3F9D419C8D427748DC41")
+
+
+ def test_case_sha3_384_0(self):
+ self.check('sha3_384', b"",
+ "2C23146A63A29ACF99E73B88F8C24EAA7DC60AA771780CCC006AFBFA8FE2479B"+
+ "2DD2B21362337441AC12B515911957FF")
+
+ def test_case_sha3_384_1(self):
+ self.check('sha3_384', bytes.fromhex("CC"),
+ "1B84E62A46E5A201861754AF5DC95C4A1A69CAF4A796AE405680161E29572641"+
+ "F5FA1E8641D7958336EE7B11C58F73E9")
+
+ def test_case_sha3_384_2(self):
+ self.check('sha3_384', bytes.fromhex("41FB"),
+ "495CCE2714CD72C8C53C3363D22C58B55960FE26BE0BF3BBC7A3316DD563AD1D"+
+ "B8410E75EEFEA655E39D4670EC0B1792")
+
+ def test_case_sha3_384_3(self):
+ self.check('sha3_384', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "135114508DD63E279E709C26F7817C0482766CDE49132E3EDF2EEDD8996F4E35"+
+ "96D184100B384868249F1D8B8FDAA2C9")
+
+
+ def test_case_sha3_512_0(self):
+ self.check('sha3_512', b"",
+ "0EAB42DE4C3CEB9235FC91ACFFE746B29C29A8C366B7C60E4E67C466F36A4304"+
+ "C00FA9CAF9D87976BA469BCBE06713B435F091EF2769FB160CDAB33D3670680E")
+
+ def test_case_sha3_512_1(self):
+ self.check('sha3_512', bytes.fromhex("CC"),
+ "8630C13CBD066EA74BBE7FE468FEC1DEE10EDC1254FB4C1B7C5FD69B646E4416"+
+ "0B8CE01D05A0908CA790DFB080F4B513BC3B6225ECE7A810371441A5AC666EB9")
+
+ def test_case_sha3_512_2(self):
+ self.check('sha3_512', bytes.fromhex("41FB"),
+ "551DA6236F8B96FCE9F97F1190E901324F0B45E06DBBB5CDB8355D6ED1DC34B3"+
+ "F0EAE7DCB68622FF232FA3CECE0D4616CDEB3931F93803662A28DF1CD535B731")
+
+ def test_case_sha3_512_3(self):
+ self.check('sha3_512', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "527D28E341E6B14F4684ADB4B824C496C6482E51149565D3D17226828884306B"+
+ "51D6148A72622C2B75F5D3510B799D8BDC03EAEDE453676A6EC8FE03A1AD0EAB")
+
+
def test_gil(self):
# Check things work fine with an input larger than the size required
# for multithreaded operation (which is hardwired to 2048).
gil_minsize = 2048
+ for name in self.supported_hash_names:
+ m = hashlib.new(name)
+ m.update(b'1')
+ m.update(b'#' * gil_minsize)
+ m.update(b'1')
+
+ m = hashlib.new(name, b'x' * gil_minsize)
+ m.update(b'1')
+
m = hashlib.md5()
m.update(b'1')
m.update(b'#' * gil_minsize)
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
index 75133c9..c5db620 100644
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -92,6 +92,9 @@ class BaseHTTPServerTestCase(BaseTestCase):
def do_KEYERROR(self):
self.send_error(999)
+ def do_NOTFOUND(self):
+ self.send_error(404)
+
def do_CUSTOM(self):
self.send_response(999)
self.send_header('Content-Type', 'text/html')
@@ -211,6 +214,14 @@ class BaseHTTPServerTestCase(BaseTestCase):
self.assertEqual(res.getheader('X-Special'), 'Dängerous Mind')
self.assertEqual(res.read(), 'Ärger mit Unicode'.encode('utf-8'))
+ def test_error_content_length(self):
+ # Issue #16088: standard error responses should have a content-length
+ self.con.request('NOTFOUND', '/')
+ res = self.con.getresponse()
+ self.assertEqual(res.status, 404)
+ data = res.read()
+ self.assertEqual(int(res.getheader('Content-Length')), len(data))
+
class SimpleHTTPServerTestCase(BaseTestCase):
class request_handler(NoLogRequestHandler, SimpleHTTPRequestHandler):
diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py
index 65c9f25..52c4399 100644
--- a/Lib/test/test_imp.py
+++ b/Lib/test/test_imp.py
@@ -217,6 +217,20 @@ class ImportTests(unittest.TestCase):
mod = imp.load_module(example, *x)
self.assertEqual(mod.__name__, example)
+ def test_issue16421_multiple_modules_in_one_dll(self):
+ # Issue 16421: loading several modules from the same compiled file fails
+ m = '_testimportmultiple'
+ fileobj, pathname, description = imp.find_module(m)
+ fileobj.close()
+ mod0 = imp.load_dynamic(m, pathname)
+ mod1 = imp.load_dynamic('_testimportmultiple_foo', pathname)
+ mod2 = imp.load_dynamic('_testimportmultiple_bar', pathname)
+ self.assertEqual(mod0.__name__, m)
+ self.assertEqual(mod1.__name__, '_testimportmultiple_foo')
+ self.assertEqual(mod2.__name__, '_testimportmultiple_bar')
+ with self.assertRaises(ImportError):
+ imp.load_dynamic('nonexistent', pathname)
+
def test_load_dynamic_ImportError_path(self):
# Issue #1559549 added `name` and `path` attributes to ImportError
# in order to provide better detail. Issue #10854 implemented those
diff --git a/Lib/test/test_importlib/import_/test_fromlist.py b/Lib/test/test_importlib/import_/test_fromlist.py
index 27b2270..c16c337 100644
--- a/Lib/test/test_importlib/import_/test_fromlist.py
+++ b/Lib/test/test_importlib/import_/test_fromlist.py
@@ -80,7 +80,7 @@ class HandlingFromlist(unittest.TestCase):
with util.import_state(meta_path=[importer]):
with self.assertRaises(ImportError) as exc:
import_util.import_('pkg', fromlist=['mod'])
- self.assertEquals('i_do_not_exist', exc.exception.name)
+ self.assertEqual('i_do_not_exist', exc.exception.name)
def test_empty_string(self):
with util.mock_modules('pkg.__init__', 'pkg.mod') as importer:
diff --git a/Lib/test/test_importlib/source/test_abc_loader.py b/Lib/test/test_importlib/source/test_abc_loader.py
index 0d912b6..cce7b07 100644
--- a/Lib/test/test_importlib/source/test_abc_loader.py
+++ b/Lib/test/test_importlib/source/test_abc_loader.py
@@ -70,483 +70,9 @@ class SourceLoaderMock(SourceOnlyLoaderMock):
return path == self.bytecode_path
-class PyLoaderMock(abc.PyLoader):
-
- # Globals that should be defined for all modules.
- source = (b"_ = '::'.join([__name__, __file__, __package__, "
- b"repr(__loader__)])")
-
- def __init__(self, data):
- """Take a dict of 'module_name: path' pairings.
-
- Paths should have no file extension, allowing packages to be denoted by
- ending in '__init__'.
-
- """
- self.module_paths = data
- self.path_to_module = {val:key for key,val in data.items()}
-
- def get_data(self, path):
- if path not in self.path_to_module:
- raise IOError
- return self.source
-
- def is_package(self, name):
- filename = os.path.basename(self.get_filename(name))
- return os.path.splitext(filename)[0] == '__init__'
-
- def source_path(self, name):
- try:
- return self.module_paths[name]
- except KeyError:
- raise ImportError
-
- def get_filename(self, name):
- """Silence deprecation warning."""
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always")
- path = super().get_filename(name)
- assert len(w) == 1
- assert issubclass(w[0].category, DeprecationWarning)
- return path
-
- def module_repr(self):
- return '<module>'
-
-
-class PyLoaderCompatMock(PyLoaderMock):
-
- """Mock that matches what is suggested to have a loader that is compatible
- from Python 3.1 onwards."""
-
- def get_filename(self, fullname):
- try:
- return self.module_paths[fullname]
- except KeyError:
- raise ImportError
-
- def source_path(self, fullname):
- try:
- return self.get_filename(fullname)
- except ImportError:
- return None
-
-
-class PyPycLoaderMock(abc.PyPycLoader, PyLoaderMock):
-
- default_mtime = 1
-
- def __init__(self, source, bc={}):
- """Initialize mock.
-
- 'bc' is a dict keyed on a module's name. The value is dict with
- possible keys of 'path', 'mtime', 'magic', and 'bc'. Except for 'path',
- each of those keys control if any part of created bytecode is to
- deviate from default values.
-
- """
- super().__init__(source)
- self.module_bytecode = {}
- self.path_to_bytecode = {}
- self.bytecode_to_path = {}
- for name, data in bc.items():
- self.path_to_bytecode[data['path']] = name
- self.bytecode_to_path[name] = data['path']
- magic = data.get('magic', imp.get_magic())
- mtime = importlib._w_long(data.get('mtime', self.default_mtime))
- source_size = importlib._w_long(len(self.source) & 0xFFFFFFFF)
- if 'bc' in data:
- bc = data['bc']
- else:
- bc = self.compile_bc(name)
- self.module_bytecode[name] = magic + mtime + source_size + bc
-
- def compile_bc(self, name):
- source_path = self.module_paths.get(name, '<test>') or '<test>'
- code = compile(self.source, source_path, 'exec')
- return marshal.dumps(code)
-
- def source_mtime(self, name):
- if name in self.module_paths:
- return self.default_mtime
- elif name in self.module_bytecode:
- return None
- else:
- raise ImportError
-
- def bytecode_path(self, name):
- try:
- return self.bytecode_to_path[name]
- except KeyError:
- if name in self.module_paths:
- return None
- else:
- raise ImportError
-
- def write_bytecode(self, name, bytecode):
- self.module_bytecode[name] = bytecode
- return True
-
- def get_data(self, path):
- if path in self.path_to_module:
- return super().get_data(path)
- elif path in self.path_to_bytecode:
- name = self.path_to_bytecode[path]
- return self.module_bytecode[name]
- else:
- raise IOError
-
- def is_package(self, name):
- try:
- return super().is_package(name)
- except TypeError:
- return '__init__' in self.bytecode_to_path[name]
-
- def get_code(self, name):
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always")
- code_object = super().get_code(name)
- assert len(w) == 1
- assert issubclass(w[0].category, DeprecationWarning)
- return code_object
-
-class PyLoaderTests(testing_abc.LoaderTests):
-
- """Tests for importlib.abc.PyLoader."""
-
- mocker = PyLoaderMock
-
- def eq_attrs(self, ob, **kwargs):
- for attr, val in kwargs.items():
- found = getattr(ob, attr)
- self.assertEqual(found, val,
- "{} attribute: {} != {}".format(attr, found, val))
-
- def test_module(self):
- name = '<module>'
- path = os.path.join('', 'path', 'to', 'module')
- mock = self.mocker({name: path})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertIn(name, sys.modules)
- self.eq_attrs(module, __name__=name, __file__=path, __package__='',
- __loader__=mock)
- self.assertTrue(not hasattr(module, '__path__'))
- return mock, name
-
- def test_package(self):
- name = '<pkg>'
- path = os.path.join('path', 'to', name, '__init__')
- mock = self.mocker({name: path})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertIn(name, sys.modules)
- self.eq_attrs(module, __name__=name, __file__=path,
- __path__=[os.path.dirname(path)], __package__=name,
- __loader__=mock)
- return mock, name
-
- def test_lacking_parent(self):
- name = 'pkg.mod'
- path = os.path.join('path', 'to', 'pkg', 'mod')
- mock = self.mocker({name: path})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertIn(name, sys.modules)
- self.eq_attrs(module, __name__=name, __file__=path, __package__='pkg',
- __loader__=mock)
- self.assertFalse(hasattr(module, '__path__'))
- return mock, name
-
- def test_module_reuse(self):
- name = 'mod'
- path = os.path.join('path', 'to', 'mod')
- module = imp.new_module(name)
- mock = self.mocker({name: path})
- with util.uncache(name):
- sys.modules[name] = module
- loaded_module = mock.load_module(name)
- self.assertIs(loaded_module, module)
- self.assertIs(sys.modules[name], module)
- return mock, name
-
- def test_state_after_failure(self):
- name = "mod"
- module = imp.new_module(name)
- module.blah = None
- mock = self.mocker({name: os.path.join('path', 'to', 'mod')})
- mock.source = b"1/0"
- with util.uncache(name):
- sys.modules[name] = module
- with self.assertRaises(ZeroDivisionError):
- mock.load_module(name)
- self.assertIs(sys.modules[name], module)
- self.assertTrue(hasattr(module, 'blah'))
- return mock
-
- def test_unloadable(self):
- name = "mod"
- mock = self.mocker({name: os.path.join('path', 'to', 'mod')})
- mock.source = b"1/0"
- with util.uncache(name):
- with self.assertRaises(ZeroDivisionError):
- mock.load_module(name)
- self.assertNotIn(name, sys.modules)
- return mock
-
-
-class PyLoaderCompatTests(PyLoaderTests):
-
- """Test that the suggested code to make a loader that is compatible from
- Python 3.1 forward works."""
-
- mocker = PyLoaderCompatMock
-
-
-class PyLoaderInterfaceTests(unittest.TestCase):
-
- """Tests for importlib.abc.PyLoader to make sure that when source_path()
- doesn't return a path everything works as expected."""
-
- def test_no_source_path(self):
- # No source path should lead to ImportError.
- name = 'mod'
- mock = PyLoaderMock({})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.load_module(name)
-
- def test_source_path_is_None(self):
- name = 'mod'
- mock = PyLoaderMock({name: None})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.load_module(name)
-
- def test_get_filename_with_source_path(self):
- # get_filename() should return what source_path() returns.
- name = 'mod'
- path = os.path.join('path', 'to', 'source')
- mock = PyLoaderMock({name: path})
- with util.uncache(name):
- self.assertEqual(mock.get_filename(name), path)
-
- def test_get_filename_no_source_path(self):
- # get_filename() should raise ImportError if source_path returns None.
- name = 'mod'
- mock = PyLoaderMock({name: None})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.get_filename(name)
-
-
-class PyPycLoaderTests(PyLoaderTests):
-
- """Tests for importlib.abc.PyPycLoader."""
-
- mocker = PyPycLoaderMock
-
- @source_util.writes_bytecode_files
- def verify_bytecode(self, mock, name):
- assert name in mock.module_paths
- self.assertIn(name, mock.module_bytecode)
- magic = mock.module_bytecode[name][:4]
- self.assertEqual(magic, imp.get_magic())
- mtime = importlib._r_long(mock.module_bytecode[name][4:8])
- self.assertEqual(mtime, 1)
- source_size = mock.module_bytecode[name][8:12]
- self.assertEqual(len(mock.source) & 0xFFFFFFFF,
- importlib._r_long(source_size))
- bc = mock.module_bytecode[name][12:]
- self.assertEqual(bc, mock.compile_bc(name))
-
- def test_module(self):
- mock, name = super().test_module()
- self.verify_bytecode(mock, name)
-
- def test_package(self):
- mock, name = super().test_package()
- self.verify_bytecode(mock, name)
-
- def test_lacking_parent(self):
- mock, name = super().test_lacking_parent()
- self.verify_bytecode(mock, name)
-
- def test_module_reuse(self):
- mock, name = super().test_module_reuse()
- self.verify_bytecode(mock, name)
-
- def test_state_after_failure(self):
- super().test_state_after_failure()
-
- def test_unloadable(self):
- super().test_unloadable()
-
-
-class PyPycLoaderInterfaceTests(unittest.TestCase):
-
- """Test for the interface of importlib.abc.PyPycLoader."""
-
- def get_filename_check(self, src_path, bc_path, expect):
- name = 'mod'
- mock = PyPycLoaderMock({name: src_path}, {name: {'path': bc_path}})
- with util.uncache(name):
- assert mock.source_path(name) == src_path
- assert mock.bytecode_path(name) == bc_path
- self.assertEqual(mock.get_filename(name), expect)
-
- def test_filename_with_source_bc(self):
- # When source and bytecode paths present, return the source path.
- self.get_filename_check('source_path', 'bc_path', 'source_path')
-
- def test_filename_with_source_no_bc(self):
- # With source but no bc, return source path.
- self.get_filename_check('source_path', None, 'source_path')
-
- def test_filename_with_no_source_bc(self):
- # With not source but bc, return the bc path.
- self.get_filename_check(None, 'bc_path', 'bc_path')
-
- def test_filename_with_no_source_or_bc(self):
- # With no source or bc, raise ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({name: None}, {name: {'path': None}})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.get_filename(name)
-
-
-class SkipWritingBytecodeTests(unittest.TestCase):
-
- """Test that bytecode is properly handled based on
- sys.dont_write_bytecode."""
-
- @source_util.writes_bytecode_files
- def run_test(self, dont_write_bytecode):
- name = 'mod'
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')})
- sys.dont_write_bytecode = dont_write_bytecode
- with util.uncache(name):
- mock.load_module(name)
- self.assertIsNot(name in mock.module_bytecode, dont_write_bytecode)
-
- def test_no_bytecode_written(self):
- self.run_test(True)
-
- def test_bytecode_written(self):
- self.run_test(False)
-
-
-class RegeneratedBytecodeTests(unittest.TestCase):
-
- """Test that bytecode is regenerated as expected."""
-
- @source_util.writes_bytecode_files
- def test_different_magic(self):
- # A different magic number should lead to new bytecode.
- name = 'mod'
- bad_magic = b'\x00\x00\x00\x00'
- assert bad_magic != imp.get_magic()
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')},
- {name: {'path': os.path.join('path', 'to',
- 'mod.bytecode'),
- 'magic': bad_magic}})
- with util.uncache(name):
- mock.load_module(name)
- self.assertIn(name, mock.module_bytecode)
- magic = mock.module_bytecode[name][:4]
- self.assertEqual(magic, imp.get_magic())
-
- @source_util.writes_bytecode_files
- def test_old_mtime(self):
- # Bytecode with an older mtime should be regenerated.
- name = 'mod'
- old_mtime = PyPycLoaderMock.default_mtime - 1
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')},
- {name: {'path': 'path/to/mod.bytecode', 'mtime': old_mtime}})
- with util.uncache(name):
- mock.load_module(name)
- self.assertIn(name, mock.module_bytecode)
- mtime = importlib._r_long(mock.module_bytecode[name][4:8])
- self.assertEqual(mtime, PyPycLoaderMock.default_mtime)
-
-
-class BadBytecodeFailureTests(unittest.TestCase):
-
- """Test import failures when there is no source and parts of the bytecode
- is bad."""
-
- def test_bad_magic(self):
- # A bad magic number should lead to an ImportError.
- name = 'mod'
- bad_magic = b'\x00\x00\x00\x00'
- bc = {name:
- {'path': os.path.join('path', 'to', 'mod'),
- 'magic': bad_magic}}
- mock = PyPycLoaderMock({name: None}, bc)
- with util.uncache(name), self.assertRaises(ImportError) as cm:
- mock.load_module(name)
- self.assertEqual(cm.exception.name, name)
-
- def test_no_bytecode(self):
- # Missing code object bytecode should lead to an EOFError.
- name = 'mod'
- bc = {name: {'path': os.path.join('path', 'to', 'mod'), 'bc': b''}}
- mock = PyPycLoaderMock({name: None}, bc)
- with util.uncache(name), self.assertRaises(EOFError):
- mock.load_module(name)
-
- def test_bad_bytecode(self):
- # Malformed code object bytecode should lead to a ValueError.
- name = 'mod'
- bc = {name: {'path': os.path.join('path', 'to', 'mod'), 'bc': b'1234'}}
- mock = PyPycLoaderMock({name: None}, bc)
- with util.uncache(name), self.assertRaises(ValueError):
- mock.load_module(name)
-
-
def raise_ImportError(*args, **kwargs):
raise ImportError
-class MissingPathsTests(unittest.TestCase):
-
- """Test what happens when a source or bytecode path does not exist (either
- from *_path returning None or raising ImportError)."""
-
- def test_source_path_None(self):
- # Bytecode should be used when source_path returns None, along with
- # __file__ being set to the bytecode path.
- name = 'mod'
- bytecode_path = 'path/to/mod'
- mock = PyPycLoaderMock({name: None}, {name: {'path': bytecode_path}})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertEqual(module.__file__, bytecode_path)
-
- # Testing for bytecode_path returning None handled by all tests where no
- # bytecode initially exists.
-
- def test_all_paths_None(self):
- # If all *_path methods return None, raise ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({name: None})
- with util.uncache(name), self.assertRaises(ImportError) as cm:
- mock.load_module(name)
- self.assertEqual(cm.exception.name, name)
-
- def test_source_path_ImportError(self):
- # An ImportError from source_path should trigger an ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({}, {name: {'path': os.path.join('path', 'to',
- 'mod')}})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.load_module(name)
-
- def test_bytecode_path_ImportError(self):
- # An ImportError from bytecode_path should trigger an ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')})
- bad_meth = types.MethodType(raise_ImportError, mock)
- mock.bytecode_path = bad_meth
- with util.uncache(name), self.assertRaises(ImportError) as cm:
- mock.load_module(name)
-
class SourceLoaderTestHarness(unittest.TestCase):
@@ -622,6 +148,11 @@ class SourceOnlyLoaderTests(SourceLoaderTestHarness):
code_object = self.loader.get_code(self.name)
self.verify_code(code_object)
+ def test_source_to_code(self):
+ # Verify the compiled code object.
+ code = self.loader.source_to_code(self.loader.source, self.path)
+ self.verify_code(code)
+
def test_load_module(self):
# Loading a module should set __name__, __loader__, __package__,
# __path__ (for packages), __file__, and __cached__.
@@ -801,6 +332,7 @@ class AbstractMethodImplTests(unittest.TestCase):
class Loader(abc.Loader):
def load_module(self, fullname):
super().load_module(fullname)
+
def module_repr(self, module):
super().module_repr(module)
@@ -825,20 +357,6 @@ class AbstractMethodImplTests(unittest.TestCase):
class SourceLoader(ResourceLoader, ExecutionLoader, abc.SourceLoader):
pass
- class PyLoader(ResourceLoader, InspectLoader, abc.PyLoader):
- def source_path(self, _):
- super().source_path(_)
-
- class PyPycLoader(PyLoader, abc.PyPycLoader):
- def bytecode_path(self, _):
- super().bytecode_path(_)
-
- def source_mtime(self, _):
- super().source_mtime(_)
-
- def write_bytecode(self, _, _2):
- super().write_bytecode(_, _2)
-
def raises_NotImplementedError(self, ins, *args):
for method_name in args:
method = getattr(ins, method_name)
@@ -877,29 +395,15 @@ class AbstractMethodImplTests(unittest.TestCase):
# Required abstractmethods.
self.raises_NotImplementedError(ins, 'get_filename', 'get_data')
# Optional abstractmethods.
- self.raises_NotImplementedError(ins,'path_stats', 'set_data')
-
- def test_PyLoader(self):
- self.raises_NotImplementedError(self.PyLoader(), 'source_path',
- 'get_data', 'is_package')
-
- def test_PyPycLoader(self):
- self.raises_NotImplementedError(self.PyPycLoader(), 'source_path',
- 'source_mtime', 'bytecode_path',
- 'write_bytecode')
+ self.raises_NotImplementedError(ins, 'path_stats', 'set_data')
def test_main():
from test.support import run_unittest
- run_unittest(PyLoaderTests, PyLoaderCompatTests,
- PyLoaderInterfaceTests,
- PyPycLoaderTests, PyPycLoaderInterfaceTests,
- SkipWritingBytecodeTests, RegeneratedBytecodeTests,
- BadBytecodeFailureTests, MissingPathsTests,
- SourceOnlyLoaderTests,
- SourceLoaderBytecodeTests,
- SourceLoaderGetSourceTests,
- AbstractMethodImplTests)
+ run_unittest(SourceOnlyLoaderTests,
+ SourceLoaderBytecodeTests,
+ SourceLoaderGetSourceTests,
+ AbstractMethodImplTests)
if __name__ == '__main__':
diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py
index c620c37..a8d8c2e 100644
--- a/Lib/test/test_importlib/test_abc.py
+++ b/Lib/test/test_importlib/test_abc.py
@@ -43,11 +43,6 @@ class PathEntryFinder(InheritanceTests, unittest.TestCase):
subclasses = [machinery.FileFinder]
-class Loader(InheritanceTests, unittest.TestCase):
-
- subclasses = [abc.PyLoader]
-
-
class ResourceLoader(InheritanceTests, unittest.TestCase):
superclasses = [abc.Loader]
@@ -56,14 +51,13 @@ class ResourceLoader(InheritanceTests, unittest.TestCase):
class InspectLoader(InheritanceTests, unittest.TestCase):
superclasses = [abc.Loader]
- subclasses = [abc.PyLoader, machinery.BuiltinImporter,
+ subclasses = [machinery.BuiltinImporter,
machinery.FrozenImporter, machinery.ExtensionFileLoader]
class ExecutionLoader(InheritanceTests, unittest.TestCase):
superclasses = [abc.InspectLoader]
- subclasses = [abc.PyLoader]
class FileLoader(InheritanceTests, unittest.TestCase):
@@ -78,16 +72,6 @@ class SourceLoader(InheritanceTests, unittest.TestCase):
subclasses = [machinery.SourceFileLoader]
-class PyLoader(InheritanceTests, unittest.TestCase):
-
- superclasses = [abc.Loader, abc.ResourceLoader, abc.ExecutionLoader]
-
-
-class PyPycLoader(InheritanceTests, unittest.TestCase):
-
- superclasses = [abc.PyLoader]
-
-
def test_main():
from test.support import run_unittest
classes = []
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index 555ad74..c99ba34 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -815,6 +815,14 @@ class SizeofTest:
bufio = self.tp(rawio, buffer_size=bufsize2)
self.assertEqual(sys.getsizeof(bufio), size + bufsize2)
+ @support.cpython_only
+ def test_buffer_freeing(self) :
+ bufsize = 4096
+ rawio = self.MockRawIO()
+ bufio = self.tp(rawio, buffer_size=bufsize)
+ size = sys.getsizeof(bufio) - bufsize
+ bufio.close()
+ self.assertEqual(sys.getsizeof(bufio), size)
class BufferedReaderTest(unittest.TestCase, CommonBufferedTests):
read_mode = "rb"
@@ -2817,7 +2825,7 @@ class MiscIOTest(unittest.TestCase):
for fd in fds:
try:
os.close(fd)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EBADF:
raise
self.addCleanup(cleanup_fds)
diff --git a/Lib/test/test_iterlen.py b/Lib/test/test_iterlen.py
index 7469a31..57f7101 100644
--- a/Lib/test/test_iterlen.py
+++ b/Lib/test/test_iterlen.py
@@ -45,31 +45,21 @@ import unittest
from test import support
from itertools import repeat
from collections import deque
-from builtins import len as _len
+from operator import length_hint
n = 10
-def len(obj):
- try:
- return _len(obj)
- except TypeError:
- try:
- # note: this is an internal undocumented API,
- # don't rely on it in your own programs
- return obj.__length_hint__()
- except AttributeError:
- raise TypeError
class TestInvariantWithoutMutations(unittest.TestCase):
def test_invariant(self):
it = self.it
for i in reversed(range(1, n+1)):
- self.assertEqual(len(it), i)
+ self.assertEqual(length_hint(it), i)
next(it)
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
self.assertRaises(StopIteration, next, it)
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
class TestTemporarilyImmutable(TestInvariantWithoutMutations):
@@ -78,12 +68,12 @@ class TestTemporarilyImmutable(TestInvariantWithoutMutations):
# length immutability during iteration
it = self.it
- self.assertEqual(len(it), n)
+ self.assertEqual(length_hint(it), n)
next(it)
- self.assertEqual(len(it), n-1)
+ self.assertEqual(length_hint(it), n-1)
self.mutate()
self.assertRaises(RuntimeError, next, it)
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
## ------- Concrete Type Tests -------
@@ -92,10 +82,6 @@ class TestRepeat(TestInvariantWithoutMutations):
def setUp(self):
self.it = repeat(None, n)
- def test_no_len_for_infinite_repeat(self):
- # The repeat() object can also be infinite
- self.assertRaises(TypeError, len, repeat(None))
-
class TestXrange(TestInvariantWithoutMutations):
def setUp(self):
@@ -167,14 +153,15 @@ class TestList(TestInvariantWithoutMutations):
it = iter(d)
next(it)
next(it)
- self.assertEqual(len(it), n-2)
+ self.assertEqual(length_hint(it), n - 2)
d.append(n)
- self.assertEqual(len(it), n-1) # grow with append
+ self.assertEqual(length_hint(it), n - 1) # grow with append
d[1:] = []
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), [])
d.extend(range(20))
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
+
class TestListReversed(TestInvariantWithoutMutations):
@@ -186,32 +173,41 @@ class TestListReversed(TestInvariantWithoutMutations):
it = reversed(d)
next(it)
next(it)
- self.assertEqual(len(it), n-2)
+ self.assertEqual(length_hint(it), n - 2)
d.append(n)
- self.assertEqual(len(it), n-2) # ignore append
+ self.assertEqual(length_hint(it), n - 2) # ignore append
d[1:] = []
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), []) # confirm invariant
d.extend(range(20))
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
## -- Check to make sure exceptions are not suppressed by __length_hint__()
class BadLen(object):
- def __iter__(self): return iter(range(10))
+ def __iter__(self):
+ return iter(range(10))
+
def __len__(self):
raise RuntimeError('hello')
+
class BadLengthHint(object):
- def __iter__(self): return iter(range(10))
+ def __iter__(self):
+ return iter(range(10))
+
def __length_hint__(self):
raise RuntimeError('hello')
+
class NoneLengthHint(object):
- def __iter__(self): return iter(range(10))
+ def __iter__(self):
+ return iter(range(10))
+
def __length_hint__(self):
- return None
+ return NotImplemented
+
class TestLengthHintExceptions(unittest.TestCase):
diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py
index 90e85a9..ece7f99 100644
--- a/Lib/test/test_itertools.py
+++ b/Lib/test/test_itertools.py
@@ -1723,9 +1723,8 @@ class TestVariousIteratorArgs(unittest.TestCase):
class LengthTransparency(unittest.TestCase):
def test_repeat(self):
- from test.test_iterlen import len
- self.assertEqual(len(repeat(None, 50)), 50)
- self.assertRaises(TypeError, len, repeat(None))
+ self.assertEqual(operator.length_hint(repeat(None, 50)), 50)
+ self.assertEqual(operator.length_hint(repeat(None), 12), 12)
class RegressionTests(unittest.TestCase):
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index cb908fb..9b76055 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -26,6 +26,7 @@ import logging.handlers
import logging.config
import codecs
+import configparser
import datetime
import pickle
import io
@@ -81,7 +82,7 @@ class BaseTest(unittest.TestCase):
"""Base class for logging tests."""
log_format = "%(name)s -> %(levelname)s: %(message)s"
- expected_log_pat = r"^([\w.]+) -> ([\w]+): ([\d]+)$"
+ expected_log_pat = r"^([\w.]+) -> (\w+): (\d+)$"
message_num = 0
def setUp(self):
@@ -150,14 +151,17 @@ class BaseTest(unittest.TestCase):
finally:
logging._releaseLock()
- def assert_log_lines(self, expected_values, stream=None):
+ def assert_log_lines(self, expected_values, stream=None, pat=None):
"""Match the collected log lines against the regular expression
self.expected_log_pat, and compare the extracted group values to
the expected_values list of tuples."""
stream = stream or self.stream
- pat = re.compile(self.expected_log_pat)
+ pat = re.compile(pat or self.expected_log_pat)
try:
- stream.reset()
+ if hasattr(stream, 'reset'):
+ stream.reset()
+ elif hasattr(stream, 'seek'):
+ stream.seek(0)
actual_lines = stream.readlines()
except AttributeError:
# StringIO.StringIO lacks a reset() method.
@@ -435,7 +439,7 @@ class CustomLevelsAndFiltersTest(BaseTest):
"""Test various filtering possibilities with custom logging levels."""
# Skip the logger name group.
- expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
+ expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$"
def setUp(self):
BaseTest.setUp(self)
@@ -769,7 +773,7 @@ if threading:
"""
try:
asyncore.loop(poll_interval, map=self.sockmap)
- except select.error:
+ except OSError:
# On FreeBSD 8, closing the server repeatably
# raises this error. We swallow it if the
# server has been closed.
@@ -996,7 +1000,7 @@ class MemoryHandlerTest(BaseTest):
"""Tests for the MemoryHandler."""
# Do not bother with a logger name group.
- expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
+ expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$"
def setUp(self):
BaseTest.setUp(self)
@@ -1049,7 +1053,7 @@ class ConfigFileTest(BaseTest):
"""Reading logging config from a .ini-style config file."""
- expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$"
+ expected_log_pat = r"^(\w+) \+\+ (\w+)$"
# config0 is a standard configuration.
config0 = """
@@ -1276,6 +1280,24 @@ class ConfigFileTest(BaseTest):
# Original logger output is empty.
self.assert_log_lines([])
+ def test_config0_using_cp_ok(self):
+ # A simple config file which overrides the default settings.
+ with captured_stdout() as output:
+ file = io.StringIO(textwrap.dedent(self.config0))
+ cp = configparser.ConfigParser()
+ cp.read_file(file)
+ logging.config.fileConfig(cp)
+ logger = logging.getLogger()
+ # Won't output anything
+ logger.info(self.next_message())
+ # Outputs a message
+ logger.error(self.next_message())
+ self.assert_log_lines([
+ ('ERROR', '2'),
+ ], stream=output)
+ # Original logger output is empty.
+ self.assert_log_lines([])
+
def test_config1_ok(self, config=config1):
# A config file defining a sub-parser as well.
with captured_stdout() as output:
@@ -1419,16 +1441,19 @@ class SocketHandlerTest(BaseTest):
self.assertEqual(self.log_output, "spam\neggs\n")
def test_noserver(self):
+ # Avoid timing-related failures due to SocketHandler's own hard-wired
+ # one-second timeout on socket.create_connection() (issue #16264).
+ self.sock_hdlr.retryStart = 2.5
# Kill the server
self.server.stop(2.0)
- #The logging call should try to connect, which should fail
+ # The logging call should try to connect, which should fail
try:
raise RuntimeError('Deliberate mistake')
except RuntimeError:
self.root_logger.exception('Never sent')
self.root_logger.error('Never sent, either')
now = time.time()
- self.assertTrue(self.sock_hdlr.retryTime > now)
+ self.assertGreater(self.sock_hdlr.retryTime, now)
time.sleep(self.sock_hdlr.retryTime - now + 0.001)
self.root_logger.error('Nor this')
@@ -1754,7 +1779,7 @@ class WarningsTest(BaseTest):
logger.removeHandler(h)
s = stream.getvalue()
h.close()
- self.assertTrue(s.find("UserWarning: I'm warning you...\n") > 0)
+ self.assertGreater(s.find("UserWarning: I'm warning you...\n"), 0)
#See if an explicit file uses the original implementation
a_file = io.StringIO()
@@ -1792,7 +1817,7 @@ class ConfigDictTest(BaseTest):
"""Reading logging config from a dictionary."""
- expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$"
+ expected_log_pat = r"^(\w+) \+\+ (\w+)$"
# config0 is a standard configuration.
config0 = {
@@ -2364,6 +2389,32 @@ class ConfigDictTest(BaseTest):
},
}
+ # As config0, but with properties
+ config14 = {
+ 'version': 1,
+ 'formatters': {
+ 'form1' : {
+ 'format' : '%(levelname)s ++ %(message)s',
+ },
+ },
+ 'handlers' : {
+ 'hand1' : {
+ 'class' : 'logging.StreamHandler',
+ 'formatter' : 'form1',
+ 'level' : 'NOTSET',
+ 'stream' : 'ext://sys.stdout',
+ '.': {
+ 'foo': 'bar',
+ 'terminator': '!\n',
+ }
+ },
+ },
+ 'root' : {
+ 'level' : 'WARNING',
+ 'handlers' : ['hand1'],
+ },
+ }
+
def apply_config(self, conf):
logging.config.dictConfig(conf)
@@ -2600,11 +2651,20 @@ class ConfigDictTest(BaseTest):
def test_config13_failure(self):
self.assertRaises(Exception, self.apply_config, self.config13)
+ def test_config14_ok(self):
+ with captured_stdout() as output:
+ self.apply_config(self.config14)
+ h = logging._handlers['hand1']
+ self.assertEqual(h.foo, 'bar')
+ self.assertEqual(h.terminator, '!\n')
+ logging.warning('Exclamation')
+ self.assertTrue(output.getvalue().endswith('Exclamation!\n'))
+
@unittest.skipUnless(threading, 'listen() needs threading to work')
- def setup_via_listener(self, text):
+ def setup_via_listener(self, text, verify=None):
text = text.encode("utf-8")
# Ask for a randomly assigned port (by using port 0)
- t = logging.config.listen(0)
+ t = logging.config.listen(0, verify)
t.start()
t.ready.wait()
# Now get the port allocated
@@ -2664,6 +2724,69 @@ class ConfigDictTest(BaseTest):
# Original logger output is empty.
self.assert_log_lines([])
+ @unittest.skipUnless(threading, 'Threading required for this test.')
+ def test_listen_verify(self):
+
+ def verify_fail(stuff):
+ return None
+
+ def verify_reverse(stuff):
+ return stuff[::-1]
+
+ logger = logging.getLogger("compiler.parser")
+ to_send = textwrap.dedent(ConfigFileTest.config1)
+ # First, specify a verification function that will fail.
+ # We expect to see no output, since our configuration
+ # never took effect.
+ with captured_stdout() as output:
+ self.setup_via_listener(to_send, verify_fail)
+ # Both will output a message
+ logger.info(self.next_message())
+ logger.error(self.next_message())
+ self.assert_log_lines([], stream=output)
+ # Original logger output has the stuff we logged.
+ self.assert_log_lines([
+ ('INFO', '1'),
+ ('ERROR', '2'),
+ ], pat=r"^[\w.]+ -> (\w+): (\d+)$")
+
+ # Now, perform no verification. Our configuration
+ # should take effect.
+
+ with captured_stdout() as output:
+ self.setup_via_listener(to_send) # no verify callable specified
+ logger = logging.getLogger("compiler.parser")
+ # Both will output a message
+ logger.info(self.next_message())
+ logger.error(self.next_message())
+ self.assert_log_lines([
+ ('INFO', '3'),
+ ('ERROR', '4'),
+ ], stream=output)
+ # Original logger output still has the stuff we logged before.
+ self.assert_log_lines([
+ ('INFO', '1'),
+ ('ERROR', '2'),
+ ], pat=r"^[\w.]+ -> (\w+): (\d+)$")
+
+ # Now, perform verification which transforms the bytes.
+
+ with captured_stdout() as output:
+ self.setup_via_listener(to_send[::-1], verify_reverse)
+ logger = logging.getLogger("compiler.parser")
+ # Both will output a message
+ logger.info(self.next_message())
+ logger.error(self.next_message())
+ self.assert_log_lines([
+ ('INFO', '5'),
+ ('ERROR', '6'),
+ ], stream=output)
+ # Original logger output still has the stuff we logged before.
+ self.assert_log_lines([
+ ('INFO', '1'),
+ ('ERROR', '2'),
+ ], pat=r"^[\w.]+ -> (\w+): (\d+)$")
+
def test_baseconfig(self):
d = {
'atuple': (1, 2, 3),
@@ -2716,14 +2839,14 @@ class ChildLoggerTest(BaseTest):
l2 = logging.getLogger('def.ghi')
c1 = r.getChild('xyz')
c2 = r.getChild('uvw.xyz')
- self.assertTrue(c1 is logging.getLogger('xyz'))
- self.assertTrue(c2 is logging.getLogger('uvw.xyz'))
+ self.assertIs(c1, logging.getLogger('xyz'))
+ self.assertIs(c2, logging.getLogger('uvw.xyz'))
c1 = l1.getChild('def')
c2 = c1.getChild('ghi')
c3 = l1.getChild('def.ghi')
- self.assertTrue(c1 is logging.getLogger('abc.def'))
- self.assertTrue(c2 is logging.getLogger('abc.def.ghi'))
- self.assertTrue(c2 is c3)
+ self.assertIs(c1, logging.getLogger('abc.def'))
+ self.assertIs(c2, logging.getLogger('abc.def.ghi'))
+ self.assertIs(c2, c3)
class DerivedLogRecord(logging.LogRecord):
@@ -2766,7 +2889,7 @@ class LogRecordFactoryTest(BaseTest):
class QueueHandlerTest(BaseTest):
# Do not bother with a logger name group.
- expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
+ expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$"
def setUp(self):
BaseTest.setUp(self)
@@ -3804,7 +3927,7 @@ class NTEventLogHandlerTest(BaseTest):
h.handle(r)
h.close()
# Now see if the event is recorded
- self.assertTrue(num_recs < win32evtlog.GetNumberOfEventLogRecords(elh))
+ self.assertLess(num_recs, win32evtlog.GetNumberOfEventLogRecords(elh))
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | \
win32evtlog.EVENTLOG_SEQUENTIAL_READ
found = False
diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py
index 6b1e933..d1cc92e 100644
--- a/Lib/test/test_mailbox.py
+++ b/Lib/test/test_mailbox.py
@@ -596,7 +596,7 @@ class TestMaildir(TestMailbox, unittest.TestCase):
def setUp(self):
TestMailbox.setUp(self)
- if os.name in ('nt', 'os2') or sys.platform == 'cygwin':
+ if (os.name == 'nt') or (sys.platform == 'cygwin'):
self._box.colon = '!'
def assertMailboxEmpty(self):
diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py
index 91da289..593fdb0 100644
--- a/Lib/test/test_mimetypes.py
+++ b/Lib/test/test_mimetypes.py
@@ -22,6 +22,8 @@ class MimeTypesTestCase(unittest.TestCase):
eq(self.db.guess_type("foo.tgz"), ("application/x-tar", "gzip"))
eq(self.db.guess_type("foo.tar.gz"), ("application/x-tar", "gzip"))
eq(self.db.guess_type("foo.tar.Z"), ("application/x-tar", "compress"))
+ eq(self.db.guess_type("foo.tar.bz2"), ("application/x-tar", "bzip2"))
+ eq(self.db.guess_type("foo.tar.xz"), ("application/x-tar", "xz"))
def test_data_urls(self):
eq = self.assertEqual
diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py
index a3de398..e3b7909 100644
--- a/Lib/test/test_mmap.py
+++ b/Lib/test/test_mmap.py
@@ -245,7 +245,7 @@ class MmapTests(unittest.TestCase):
def test_bad_file_desc(self):
# Try opening a bad file descriptor...
- self.assertRaises(mmap.error, mmap.mmap, -2, 4096)
+ self.assertRaises(OSError, mmap.mmap, -2, 4096)
def test_tougher_find(self):
# Do a tougher .find() test. SF bug 515943 pointed out that, in 2.2,
@@ -673,7 +673,7 @@ class MmapTests(unittest.TestCase):
# parameters to _get_osfhandle.
s = socket.socket()
try:
- with self.assertRaises(mmap.error):
+ with self.assertRaises(OSError):
m = mmap.mmap(s.fileno(), 10)
finally:
s.close()
diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py
index b2a964c..9c7a202 100644
--- a/Lib/test/test_multiprocessing.py
+++ b/Lib/test/test_multiprocessing.py
@@ -19,6 +19,7 @@ import socket
import random
import logging
import struct
+import operator
import test.support
import test.script_helper
@@ -1617,6 +1618,18 @@ def mul(x, y):
class _TestPool(BaseTestCase):
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.pool = cls.Pool(4)
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.pool.terminate()
+ cls.pool.join()
+ cls.pool = None
+ super().tearDownClass()
+
def test_apply(self):
papply = self.pool.apply
self.assertEqual(papply(sqr, (5,)), sqr(5))
@@ -1708,15 +1721,6 @@ class _TestPool(BaseTestCase):
p.join()
def test_terminate(self):
- if self.TYPE == 'manager':
- # On Unix a forked process increfs each shared object to
- # which its parent process held a reference. If the
- # forked process gets terminated then there is likely to
- # be a reference leak. So to prevent
- # _TestZZZNumberOfObjects from failing we skip this test
- # when using a manager.
- return
-
result = self.pool.map_async(
time.sleep, [0.1 for i in range(10000)], chunksize=1
)
@@ -1838,35 +1842,6 @@ class _TestPoolWorkerLifetime(BaseTestCase):
for (j, res) in enumerate(results):
self.assertEqual(res.get(), sqr(j))
-
-#
-# Test that manager has expected number of shared objects left
-#
-
-class _TestZZZNumberOfObjects(BaseTestCase):
- # Because test cases are sorted alphabetically, this one will get
- # run after all the other tests for the manager. It tests that
- # there have been no "reference leaks" for the manager's shared
- # objects. Note the comment in _TestPool.test_terminate().
-
- # If some other test using ManagerMixin.manager fails, then the
- # raised exception may keep alive a frame which holds a reference
- # to a managed object. This will cause test_number_of_objects to
- # also fail.
- ALLOWED_TYPES = ('manager',)
-
- def test_number_of_objects(self):
- EXPECTED_NUMBER = 1 # the pool object is still alive
- multiprocessing.active_children() # discard dead process objs
- gc.collect() # do garbage collection
- refs = self.manager._number_of_objects()
- debug_info = self.manager._debug_info()
- if refs != EXPECTED_NUMBER:
- print(self.manager._debug_info())
- print(debug_info)
-
- self.assertEqual(refs, EXPECTED_NUMBER)
-
#
# Test of creating a customized manager class
#
@@ -2903,15 +2878,6 @@ class TestInvalidHandle(unittest.TestCase):
# Functions used to create test cases from the base ones in this module
#
-def get_attributes(Source, names):
- d = {}
- for name in names:
- obj = getattr(Source, name)
- if type(obj) == type(get_attributes):
- obj = staticmethod(obj)
- d[name] = obj
- return d
-
def create_test_cases(Mixin, type):
result = {}
glob = globals()
@@ -2924,10 +2890,10 @@ def create_test_cases(Mixin, type):
assert set(base.ALLOWED_TYPES) <= ALL_TYPES, set(base.ALLOWED_TYPES)
if type in base.ALLOWED_TYPES:
newname = 'With' + Type + name[1:]
- class Temp(base, unittest.TestCase, Mixin):
+ class Temp(base, Mixin, unittest.TestCase):
pass
result[newname] = Temp
- Temp.__name__ = newname
+ Temp.__name__ = Temp.__qualname__ = newname
Temp.__module__ = Mixin.__module__
return result
@@ -2938,12 +2904,24 @@ def create_test_cases(Mixin, type):
class ProcessesMixin(object):
TYPE = 'processes'
Process = multiprocessing.Process
- locals().update(get_attributes(multiprocessing, (
- 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore',
- 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'RawValue',
- 'RawArray', 'current_process', 'active_children', 'Pipe',
- 'connection', 'JoinableQueue', 'Pool'
- )))
+ connection = multiprocessing.connection
+ current_process = staticmethod(multiprocessing.current_process)
+ active_children = staticmethod(multiprocessing.active_children)
+ Pool = staticmethod(multiprocessing.Pool)
+ Pipe = staticmethod(multiprocessing.Pipe)
+ Queue = staticmethod(multiprocessing.Queue)
+ JoinableQueue = staticmethod(multiprocessing.JoinableQueue)
+ Lock = staticmethod(multiprocessing.Lock)
+ RLock = staticmethod(multiprocessing.RLock)
+ Semaphore = staticmethod(multiprocessing.Semaphore)
+ BoundedSemaphore = staticmethod(multiprocessing.BoundedSemaphore)
+ Condition = staticmethod(multiprocessing.Condition)
+ Event = staticmethod(multiprocessing.Event)
+ Barrier = staticmethod(multiprocessing.Barrier)
+ Value = staticmethod(multiprocessing.Value)
+ Array = staticmethod(multiprocessing.Array)
+ RawValue = staticmethod(multiprocessing.RawValue)
+ RawArray = staticmethod(multiprocessing.RawArray)
testcases_processes = create_test_cases(ProcessesMixin, type='processes')
globals().update(testcases_processes)
@@ -2952,12 +2930,42 @@ globals().update(testcases_processes)
class ManagerMixin(object):
TYPE = 'manager'
Process = multiprocessing.Process
- manager = object.__new__(multiprocessing.managers.SyncManager)
- locals().update(get_attributes(manager, (
- 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore',
- 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'list', 'dict',
- 'Namespace', 'JoinableQueue', 'Pool'
- )))
+ Queue = property(operator.attrgetter('manager.Queue'))
+ JoinableQueue = property(operator.attrgetter('manager.JoinableQueue'))
+ Lock = property(operator.attrgetter('manager.Lock'))
+ RLock = property(operator.attrgetter('manager.RLock'))
+ Semaphore = property(operator.attrgetter('manager.Semaphore'))
+ BoundedSemaphore = property(operator.attrgetter('manager.BoundedSemaphore'))
+ Condition = property(operator.attrgetter('manager.Condition'))
+ Event = property(operator.attrgetter('manager.Event'))
+ Barrier = property(operator.attrgetter('manager.Barrier'))
+ Value = property(operator.attrgetter('manager.Value'))
+ Array = property(operator.attrgetter('manager.Array'))
+ list = property(operator.attrgetter('manager.list'))
+ dict = property(operator.attrgetter('manager.dict'))
+ Namespace = property(operator.attrgetter('manager.Namespace'))
+
+ @classmethod
+ def Pool(cls, *args, **kwds):
+ return cls.manager.Pool(*args, **kwds)
+
+ @classmethod
+ def setUpClass(cls):
+ cls.manager = multiprocessing.Manager()
+
+ @classmethod
+ def tearDownClass(cls):
+ multiprocessing.active_children() # discard dead process objs
+ gc.collect() # do garbage collection
+ if cls.manager._number_of_objects() != 0:
+ # This is not really an error since some tests do not
+ # ensure that all processes which hold a reference to a
+ # managed object have been joined.
+ print('Shared objects which still exist at manager shutdown:')
+ print(cls.manager._debug_info())
+ cls.manager.shutdown()
+ cls.manager.join()
+ cls.manager = None
testcases_manager = create_test_cases(ManagerMixin, type='manager')
globals().update(testcases_manager)
@@ -2966,16 +2974,27 @@ globals().update(testcases_manager)
class ThreadsMixin(object):
TYPE = 'threads'
Process = multiprocessing.dummy.Process
- locals().update(get_attributes(multiprocessing.dummy, (
- 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore',
- 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'current_process',
- 'active_children', 'Pipe', 'connection', 'dict', 'list',
- 'Namespace', 'JoinableQueue', 'Pool'
- )))
+ connection = multiprocessing.dummy.connection
+ current_process = staticmethod(multiprocessing.dummy.current_process)
+ active_children = staticmethod(multiprocessing.dummy.active_children)
+ Pool = staticmethod(multiprocessing.Pool)
+ Pipe = staticmethod(multiprocessing.dummy.Pipe)
+ Queue = staticmethod(multiprocessing.dummy.Queue)
+ JoinableQueue = staticmethod(multiprocessing.dummy.JoinableQueue)
+ Lock = staticmethod(multiprocessing.dummy.Lock)
+ RLock = staticmethod(multiprocessing.dummy.RLock)
+ Semaphore = staticmethod(multiprocessing.dummy.Semaphore)
+ BoundedSemaphore = staticmethod(multiprocessing.dummy.BoundedSemaphore)
+ Condition = staticmethod(multiprocessing.dummy.Condition)
+ Event = staticmethod(multiprocessing.dummy.Event)
+ Barrier = staticmethod(multiprocessing.dummy.Barrier)
+ Value = staticmethod(multiprocessing.dummy.Value)
+ Array = staticmethod(multiprocessing.dummy.Array)
testcases_threads = create_test_cases(ThreadsMixin, type='threads')
globals().update(testcases_threads)
+
class OtherTest(unittest.TestCase):
# TODO: add more tests for deliver/answer challenge.
def test_deliver_challenge_auth_failure(self):
@@ -3407,12 +3426,6 @@ def test_main(run=None):
multiprocessing.get_logger().setLevel(LOG_LEVEL)
- ProcessesMixin.pool = multiprocessing.Pool(4)
- ThreadsMixin.pool = multiprocessing.dummy.Pool(4)
- ManagerMixin.manager.__init__()
- ManagerMixin.manager.start()
- ManagerMixin.pool = ManagerMixin.manager.Pool(4)
-
testcases = (
sorted(testcases_processes.values(), key=lambda tc:tc.__name__) +
sorted(testcases_threads.values(), key=lambda tc:tc.__name__) +
@@ -3422,18 +3435,7 @@ def test_main(run=None):
loadTestsFromTestCase = unittest.defaultTestLoader.loadTestsFromTestCase
suite = unittest.TestSuite(loadTestsFromTestCase(tc) for tc in testcases)
- try:
- run(suite)
- finally:
- ThreadsMixin.pool.terminate()
- ProcessesMixin.pool.terminate()
- ManagerMixin.pool.terminate()
- ManagerMixin.pool.join()
- ManagerMixin.manager.shutdown()
- ManagerMixin.manager.join()
- ThreadsMixin.pool.join()
- ProcessesMixin.pool.join()
- del ProcessesMixin.pool, ThreadsMixin.pool, ManagerMixin.pool
+ run(suite)
def main():
test_main(unittest.TextTestRunner(verbosity=2).run)
diff --git a/Lib/test/test_openpty.py b/Lib/test/test_openpty.py
index e8175ff..8713d34 100644
--- a/Lib/test/test_openpty.py
+++ b/Lib/test/test_openpty.py
@@ -4,7 +4,7 @@ import os, unittest
from test.support import run_unittest
if not hasattr(os, "openpty"):
- raise unittest.SkipTest("No openpty() available.")
+ raise unittest.SkipTest("os.openpty() not available.")
class OpenptyTest(unittest.TestCase):
diff --git a/Lib/test/test_operator.py b/Lib/test/test_operator.py
index fa608b9..b445ee0 100644
--- a/Lib/test/test_operator.py
+++ b/Lib/test/test_operator.py
@@ -410,6 +410,31 @@ class OperatorTestCase(unittest.TestCase):
self.assertEqual(operator.__ixor__ (c, 5), "ixor")
self.assertEqual(operator.__iconcat__ (c, c), "iadd")
+ def test_length_hint(self):
+ class X(object):
+ def __init__(self, value):
+ self.value = value
+
+ def __length_hint__(self):
+ if type(self.value) is type:
+ raise self.value
+ else:
+ return self.value
+
+ self.assertEqual(operator.length_hint([], 2), 0)
+ self.assertEqual(operator.length_hint(iter([1, 2, 3])), 3)
+
+ self.assertEqual(operator.length_hint(X(2)), 2)
+ self.assertEqual(operator.length_hint(X(NotImplemented), 4), 4)
+ self.assertEqual(operator.length_hint(X(TypeError), 12), 12)
+ with self.assertRaises(TypeError):
+ operator.length_hint(X("abc"))
+ with self.assertRaises(ValueError):
+ operator.length_hint(X(-2))
+ with self.assertRaises(LookupError):
+ operator.length_hint(X(LookupError))
+
+
def test_main(verbose=None):
import sys
test_classes = (
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
index 13a2b38..13f9e3a 100644
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -2050,6 +2050,103 @@ class TermsizeTests(unittest.TestCase):
self.assertEqual(expected, actual)
+class OSErrorTests(unittest.TestCase):
+ def setUp(self):
+ class Str(str):
+ pass
+
+ self.bytes_filenames = []
+ self.unicode_filenames = []
+ if support.TESTFN_UNENCODABLE is not None:
+ decoded = support.TESTFN_UNENCODABLE
+ else:
+ decoded = support.TESTFN
+ self.unicode_filenames.append(decoded)
+ self.unicode_filenames.append(Str(decoded))
+ if support.TESTFN_UNDECODABLE is not None:
+ encoded = support.TESTFN_UNDECODABLE
+ else:
+ encoded = os.fsencode(support.TESTFN)
+ self.bytes_filenames.append(encoded)
+ self.bytes_filenames.append(memoryview(encoded))
+
+ self.filenames = self.bytes_filenames + self.unicode_filenames
+
+ def test_oserror_filename(self):
+ funcs = [
+ (self.filenames, os.chdir,),
+ (self.filenames, os.chmod, 0o777),
+ (self.filenames, os.lstat,),
+ (self.filenames, os.open, os.O_RDONLY),
+ (self.filenames, os.rmdir,),
+ (self.filenames, os.stat,),
+ (self.filenames, os.unlink,),
+ ]
+ if sys.platform == "win32":
+ funcs.extend((
+ (self.bytes_filenames, os.rename, b"dst"),
+ (self.bytes_filenames, os.replace, b"dst"),
+ (self.unicode_filenames, os.rename, "dst"),
+ (self.unicode_filenames, os.replace, "dst"),
+ # Issue #16414: Don't test undecodable names with listdir()
+ # because of a Windows bug.
+ #
+ # With the ANSI code page 932, os.listdir(b'\xe7') return an
+ # empty list (instead of failing), whereas os.listdir(b'\xff')
+ # raises a FileNotFoundError. It looks like a Windows bug:
+ # b'\xe7' directory does not exist, FindFirstFileA(b'\xe7')
+ # fails with ERROR_FILE_NOT_FOUND (2), instead of
+ # ERROR_PATH_NOT_FOUND (3).
+ (self.unicode_filenames, os.listdir,),
+ ))
+ else:
+ funcs.extend((
+ (self.filenames, os.listdir,),
+ (self.filenames, os.rename, "dst"),
+ (self.filenames, os.replace, "dst"),
+ ))
+ if hasattr(os, "chown"):
+ funcs.append((self.filenames, os.chown, 0, 0))
+ if hasattr(os, "lchown"):
+ funcs.append((self.filenames, os.lchown, 0, 0))
+ if hasattr(os, "truncate"):
+ funcs.append((self.filenames, os.truncate, 0))
+ if hasattr(os, "chflags"):
+ funcs.append((self.filenames, os.chflags, 0))
+ if hasattr(os, "lchflags"):
+ funcs.append((self.filenames, os.lchflags, 0))
+ if hasattr(os, "chroot"):
+ funcs.append((self.filenames, os.chroot,))
+ if hasattr(os, "link"):
+ if sys.platform == "win32":
+ funcs.append((self.bytes_filenames, os.link, b"dst"))
+ funcs.append((self.unicode_filenames, os.link, "dst"))
+ else:
+ funcs.append((self.filenames, os.link, "dst"))
+ if hasattr(os, "listxattr"):
+ funcs.extend((
+ (self.filenames, os.listxattr,),
+ (self.filenames, os.getxattr, "user.test"),
+ (self.filenames, os.setxattr, "user.test", b'user'),
+ (self.filenames, os.removexattr, "user.test"),
+ ))
+ if hasattr(os, "lchmod"):
+ funcs.append((self.filenames, os.lchmod, 0o777))
+ if hasattr(os, "readlink"):
+ if sys.platform == "win32":
+ funcs.append((self.unicode_filenames, os.readlink,))
+ else:
+ funcs.append((self.filenames, os.readlink,))
+
+ for filenames, func, *func_args in funcs:
+ for name in filenames:
+ try:
+ func(name, *func_args)
+ except OSError as err:
+ self.assertIs(err.filename, name)
+ else:
+ self.fail("No exception thrown by {}".format(func))
+
@support.reap_threads
def test_main():
support.run_unittest(
@@ -2078,6 +2175,7 @@ def test_main():
ExtendedAttributeTests,
Win32DeprecatedBytesAPI,
TermsizeTests,
+ OSErrorTests,
)
if __name__ == "__main__":
diff --git a/Lib/test/test_pep277.py b/Lib/test/test_pep277.py
index 4b16cbb..9bae6dc 100644
--- a/Lib/test/test_pep277.py
+++ b/Lib/test/test_pep277.py
@@ -99,10 +99,6 @@ class UnicodeFileTests(unittest.TestCase):
with self.assertRaises(expected_exception) as c:
fn(filename)
exc_filename = c.exception.filename
- # listdir may append a wildcard to the filename
- if fn is os.listdir and sys.platform == 'win32':
- exc_filename, _, wildcard = exc_filename.rpartition(os.sep)
- self.assertEqual(wildcard, '*.*')
if check_filename:
self.assertEqual(exc_filename, filename, "Function '%s(%a) failed "
"with bad filename in the exception: %a" %
diff --git a/Lib/test/test_poll.py b/Lib/test/test_poll.py
index 7ab3b84..3b7926d 100644
--- a/Lib/test/test_poll.py
+++ b/Lib/test/test_poll.py
@@ -1,12 +1,12 @@
# Test case for the os.poll() function
-import os, select, random, unittest
+import os, select, random, unittest, subprocess
from test.support import TESTFN, run_unittest
try:
select.poll
except AttributeError:
- raise unittest.SkipTest("select.poll not defined -- skipping test_poll")
+ raise unittest.SkipTest("select.poll not defined")
def find_ready_matching(ready, flag):
@@ -67,13 +67,11 @@ class PollTests(unittest.TestCase):
self.assertEqual(bufs, [MSG] * NUM_PIPES)
- def poll_unit_tests(self):
+ def test_poll_unit_tests(self):
# returns NVAL for invalid file descriptor
- FD = 42
- try:
- os.close(FD)
- except OSError:
- pass
+ FD, w = os.pipe()
+ os.close(FD)
+ os.close(w)
p = select.poll()
p.register(FD)
r = p.poll()
@@ -116,7 +114,9 @@ class PollTests(unittest.TestCase):
def test_poll2(self):
cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do echo testing...; sleep 1; done'
- p = os.popen(cmd, 'r')
+ proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
+ bufsize=0)
+ p = proc.stdout
pollster = select.poll()
pollster.register( p, select.POLLIN )
for tout in (0, 1000, 2000, 4000, 8000, 16000) + (-1,)*10:
@@ -126,7 +126,7 @@ class PollTests(unittest.TestCase):
fd, flags = fdlist[0]
if flags & select.POLLHUP:
line = p.readline()
- if line != "":
+ if line != b"":
self.fail('error: pipe seems to be closed, but still returns data')
continue
@@ -134,6 +134,7 @@ class PollTests(unittest.TestCase):
line = p.readline()
if not line:
break
+ self.assertEqual(line, b'testing...\n')
continue
else:
self.fail('Unexpected return value from select.poll: %s' % fdlist)
diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py
index c0929a0..e85dd2a 100644
--- a/Lib/test/test_poplib.py
+++ b/Lib/test/test_poplib.py
@@ -18,6 +18,13 @@ threading = test_support.import_module('threading')
HOST = test_support.HOST
PORT = 0
+SUPPORTS_SSL = False
+if hasattr(poplib, 'POP3_SSL'):
+ import ssl
+
+ SUPPORTS_SSL = True
+ CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert.pem")
+
# the dummy data returned by server when LIST and RETR commands are issued
LIST_RESP = b'1 1\r\n2 2\r\n3 3\r\n4 4\r\n5 5\r\n.\r\n'
RETR_RESP = b"""From: postmaster@python.org\
@@ -33,11 +40,15 @@ line3\r\n\
class DummyPOP3Handler(asynchat.async_chat):
+ CAPAS = {'UIDL': [], 'IMPLEMENTATION': ['python-testlib-pop-server']}
+
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
+ self.tls_active = False
+ self.tls_starting = False
def collect_incoming_data(self, data):
self.in_buffer.append(data)
@@ -112,6 +123,65 @@ class DummyPOP3Handler(asynchat.async_chat):
self.push('+OK closing.')
self.close_when_done()
+ def _get_capas(self):
+ _capas = dict(self.CAPAS)
+ if not self.tls_active and SUPPORTS_SSL:
+ _capas['STLS'] = []
+ return _capas
+
+ def cmd_capa(self, arg):
+ self.push('+OK Capability list follows')
+ if self._get_capas():
+ for cap, params in self._get_capas().items():
+ _ln = [cap]
+ if params:
+ _ln.extend(params)
+ self.push(' '.join(_ln))
+ self.push('.')
+
+ if SUPPORTS_SSL:
+
+ def cmd_stls(self, arg):
+ if self.tls_active is False:
+ self.push('+OK Begin TLS negotiation')
+ tls_sock = ssl.wrap_socket(self.socket, certfile=CERTFILE,
+ server_side=True,
+ do_handshake_on_connect=False,
+ suppress_ragged_eofs=False)
+ self.del_channel()
+ self.set_socket(tls_sock)
+ self.tls_active = True
+ self.tls_starting = True
+ self.in_buffer = []
+ self._do_tls_handshake()
+ else:
+ self.push('-ERR Command not permitted when TLS active')
+
+ def _do_tls_handshake(self):
+ try:
+ self.socket.do_handshake()
+ except ssl.SSLError as err:
+ if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
+ ssl.SSL_ERROR_WANT_WRITE):
+ return
+ elif err.args[0] == ssl.SSL_ERROR_EOF:
+ return self.handle_close()
+ raise
+ except socket.error as err:
+ if err.args[0] == errno.ECONNABORTED:
+ return self.handle_close()
+ else:
+ self.tls_active = True
+ self.tls_starting = False
+
+ def handle_read(self):
+ if self.tls_starting:
+ self._do_tls_handshake()
+ else:
+ try:
+ asynchat.async_chat.handle_read(self)
+ except ssl.SSLEOFError:
+ self.handle_close()
class DummyPOP3Server(asyncore.dispatcher, threading.Thread):
@@ -232,19 +302,35 @@ class TestPOP3Class(TestCase):
self.client.uidl()
self.client.uidl('foo')
+ def test_capa(self):
+ capa = self.client.capa()
+ self.assertTrue('IMPLEMENTATION' in capa.keys())
+
def test_quit(self):
resp = self.client.quit()
self.assertTrue(resp)
self.assertIsNone(self.client.sock)
self.assertIsNone(self.client.file)
+ if SUPPORTS_SSL:
-SUPPORTS_SSL = False
-if hasattr(poplib, 'POP3_SSL'):
- import ssl
+ def test_stls_capa(self):
+ capa = self.client.capa()
+ self.assertTrue('STLS' in capa.keys())
- SUPPORTS_SSL = True
- CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert.pem")
+ def test_stls(self):
+ expected = b'+OK Begin TLS negotiation'
+ resp = self.client.stls()
+ self.assertEqual(resp, expected)
+
+ def test_stls_context(self):
+ expected = b'+OK Begin TLS negotiation'
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ resp = self.client.stls(context=ctx)
+ self.assertEqual(resp, expected)
+
+
+if SUPPORTS_SSL:
class DummyPOP3_SSLHandler(DummyPOP3Handler):
@@ -256,34 +342,13 @@ if hasattr(poplib, 'POP3_SSL'):
self.del_channel()
self.set_socket(ssl_socket)
# Must try handshake before calling push()
- self._ssl_accepting = True
- self._do_ssl_handshake()
+ self.tls_active = True
+ self.tls_starting = True
+ self._do_tls_handshake()
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
- def _do_ssl_handshake(self):
- try:
- self.socket.do_handshake()
- except ssl.SSLError as err:
- if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
- ssl.SSL_ERROR_WANT_WRITE):
- return
- elif err.args[0] == ssl.SSL_ERROR_EOF:
- return self.handle_close()
- raise
- except socket.error as err:
- if err.args[0] == errno.ECONNABORTED:
- return self.handle_close()
- else:
- self._ssl_accepting = False
-
- def handle_read(self):
- if self._ssl_accepting:
- self._do_ssl_handshake()
- else:
- DummyPOP3Handler.handle_read(self)
-
class TestPOP3_SSLClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3_SSL
@@ -314,6 +379,39 @@ if hasattr(poplib, 'POP3_SSL'):
self.assertIs(self.client.sock.context, ctx)
self.assertTrue(self.client.noop().startswith(b'+OK'))
+ def test_stls(self):
+ self.assertRaises(poplib.error_proto, self.client.stls)
+
+ test_stls_context = test_stls
+
+ def test_stls_capa(self):
+ capa = self.client.capa()
+ self.assertFalse('STLS' in capa.keys())
+
+
+ class TestPOP3_TLSClass(TestPOP3Class):
+ # repeat previous tests by using poplib.POP3.stls()
+
+ def setUp(self):
+ self.server = DummyPOP3Server((HOST, PORT))
+ self.server.start()
+ self.client = poplib.POP3(self.server.host, self.server.port, timeout=3)
+ self.client.stls()
+
+ def tearDown(self):
+ if self.client.file is not None and self.client.sock is not None:
+ self.client.quit()
+ self.server.stop()
+
+ def test_stls(self):
+ self.assertRaises(poplib.error_proto, self.client.stls)
+
+ test_stls_context = test_stls
+
+ def test_stls_capa(self):
+ capa = self.client.capa()
+ self.assertFalse(b'STLS' in capa.keys())
+
class TestTimeouts(TestCase):
@@ -373,6 +471,7 @@ def test_main():
tests = [TestPOP3Class, TestTimeouts]
if SUPPORTS_SSL:
tests.append(TestPOP3_SSLClass)
+ tests.append(TestPOP3_TLSClass)
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py
index b5931ba..a9aec70 100644
--- a/Lib/test/test_random.py
+++ b/Lib/test/test_random.py
@@ -46,6 +46,39 @@ class TestBasicOps(unittest.TestCase):
self.assertRaises(TypeError, self.gen.seed, 1, 2, 3, 4)
self.assertRaises(TypeError, type(self.gen), [])
+ def test_shuffle(self):
+ shuffle = self.gen.shuffle
+ lst = []
+ shuffle(lst)
+ self.assertEqual(lst, [])
+ lst = [37]
+ shuffle(lst)
+ self.assertEqual(lst, [37])
+ seqs = [list(range(n)) for n in range(10)]
+ shuffled_seqs = [list(range(n)) for n in range(10)]
+ for shuffled_seq in shuffled_seqs:
+ shuffle(shuffled_seq)
+ for (seq, shuffled_seq) in zip(seqs, shuffled_seqs):
+ self.assertEqual(len(seq), len(shuffled_seq))
+ self.assertEqual(set(seq), set(shuffled_seq))
+
+ # The above tests all would pass if the shuffle was a
+ # no-op. The following non-deterministic test covers that. It
+ # asserts that the shuffled sequence of 1000 distinct elements
+ # must be different from the original one. Although there is
+ # mathematically a non-zero probability that this could
+ # actually happen in a genuinely random shuffle, it is
+ # completely negligible, given that the number of possible
+ # permutations of 1000 objects is 1000! (factorial of 1000),
+ # which is considerably larger than the number of atoms in the
+ # universe...
+ lst = list(range(1000))
+ shuffled_lst = list(range(1000))
+ shuffle(shuffled_lst)
+ self.assertTrue(lst != shuffled_lst)
+ shuffle(lst)
+ self.assertTrue(lst != shuffled_lst)
+
def test_choice(self):
choice = self.gen.choice
with self.assertRaises(IndexError):
diff --git a/Lib/test/test_range.py b/Lib/test/test_range.py
index 2a13bfe..f088387 100644
--- a/Lib/test/test_range.py
+++ b/Lib/test/test_range.py
@@ -313,7 +313,7 @@ class RangeTest(unittest.TestCase):
self.assertRaises(TypeError, range, IN())
# Test use of user-defined classes in slice indices.
- self.assertEqual(list(range(10)[:I(5)]), list(range(5)))
+ self.assertEqual(range(10)[:I(5)], range(5))
with self.assertRaises(RuntimeError):
range(0, 10)[:IX()]
diff --git a/Lib/test/test_select.py b/Lib/test/test_select.py
index ddb9a0f..8f9a1c9 100644
--- a/Lib/test/test_select.py
+++ b/Lib/test/test_select.py
@@ -5,7 +5,7 @@ import sys
import unittest
from test import support
-@unittest.skipIf(sys.platform[:3] in ('win', 'os2', 'riscos'),
+@unittest.skipIf((sys.platform[:3]=='win'),
"can't easily test on this system")
class SelectTestCase(unittest.TestCase):
@@ -32,7 +32,7 @@ class SelectTestCase(unittest.TestCase):
fp.close()
try:
select.select([fd], [], [], 0)
- except select.error as err:
+ except OSError as err:
self.assertEqual(err.errno, errno.EBADF)
else:
self.fail("exception not raised")
diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py
index 8e9e587..da62723 100644
--- a/Lib/test/test_set.py
+++ b/Lib/test/test_set.py
@@ -848,8 +848,6 @@ class TestBasicOps(unittest.TestCase):
for v in self.set:
self.assertIn(v, self.values)
setiter = iter(self.set)
- # note: __length_hint__ is an internal undocumented API,
- # don't rely on it in your own programs
self.assertEqual(setiter.__length_hint__(), len(self.set))
def test_pickling(self):
diff --git a/Lib/test/test_shelve.py b/Lib/test/test_shelve.py
index 13c1265..bd51d86 100644
--- a/Lib/test/test_shelve.py
+++ b/Lib/test/test_shelve.py
@@ -148,6 +148,19 @@ class TestCase(unittest.TestCase):
p2 = d[encodedkey]
self.assertNotEqual(p1, p2) # Write creates new object in store
+ def test_with(self):
+ d1 = {}
+ with shelve.Shelf(d1, protocol=2, writeback=False) as s:
+ s['key1'] = [1,2,3,4]
+ self.assertEqual(s['key1'], [1,2,3,4])
+ self.assertEqual(len(s), 1)
+ self.assertRaises(ValueError, len, s)
+ try:
+ s['key1']
+ except ValueError:
+ pass
+ else:
+ self.fail('Closed shelf should not find a key')
from test import mapping_tests
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
index 01b93fc..ad62280 100644
--- a/Lib/test/test_shutil.py
+++ b/Lib/test/test_shutil.py
@@ -18,7 +18,8 @@ from shutil import (_make_tarball, _make_zipfile, make_archive,
register_archive_format, unregister_archive_format,
get_archive_formats, Error, unpack_archive,
register_unpack_format, RegistryError,
- unregister_unpack_format, get_unpack_formats)
+ unregister_unpack_format, get_unpack_formats,
+ SameFileError)
import tarfile
import warnings
@@ -728,7 +729,7 @@ class TestShutil(unittest.TestCase):
with open(src, 'w') as f:
f.write('cheddar')
os.link(src, dst)
- self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+ self.assertRaises(shutil.SameFileError, shutil.copyfile, src, dst)
with open(src, 'r') as f:
self.assertEqual(f.read(), 'cheddar')
os.remove(dst)
@@ -748,7 +749,7 @@ class TestShutil(unittest.TestCase):
# to TESTFN/TESTFN/cheese, while it should point at
# TESTFN/cheese.
os.symlink('cheese', dst)
- self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+ self.assertRaises(shutil.SameFileError, shutil.copyfile, src, dst)
with open(src, 'r') as f:
self.assertEqual(f.read(), 'cheddar')
os.remove(dst)
@@ -1255,6 +1256,16 @@ class TestShutil(unittest.TestCase):
self.assertTrue(os.path.exists(rv))
self.assertEqual(read_file(src_file), read_file(dst_file))
+ def test_copyfile_same_file(self):
+ # copyfile() should raise SameFileError if the source and destination
+ # are the same.
+ src_dir = self.mkdtemp()
+ src_file = os.path.join(src_dir, 'foo')
+ write_file(src_file, 'foo')
+ self.assertRaises(SameFileError, shutil.copyfile, src_file, src_file)
+ # But Error should work too, to stay backward compatible.
+ self.assertRaises(Error, shutil.copyfile, src_file, src_file)
+
def test_copytree_return_value(self):
# copytree returns its destination path.
src_dir = self.mkdtemp()
diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py
index 6be259b..c04e52b 100644
--- a/Lib/test/test_signal.py
+++ b/Lib/test/test_signal.py
@@ -15,9 +15,6 @@ try:
except ImportError:
threading = None
-if sys.platform in ('os2', 'riscos'):
- raise unittest.SkipTest("Can't test signal on %s" % sys.platform)
-
class HandlerBCalled(Exception):
pass
@@ -36,7 +33,7 @@ def exit_subprocess():
def ignoring_eintr(__func, *args, **kwargs):
try:
return __func(*args, **kwargs)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EINTR:
raise
return None
@@ -302,10 +299,10 @@ class WakeupSignalTests(unittest.TestCase):
# We attempt to get a signal during the select call
try:
select.select([read], [], [], TIMEOUT_FULL)
- except select.error:
+ except OSError:
pass
else:
- raise Exception("select.error not raised")
+ raise Exception("OSError not raised")
after_time = time.time()
dt = after_time - before_time
if dt >= TIMEOUT_HALF:
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
index 29286c7..9c7840f 100644
--- a/Lib/test/test_site.py
+++ b/Lib/test/test_site.py
@@ -222,11 +222,7 @@ class HelperFunctionsTests(unittest.TestCase):
site.PREFIXES = ['xoxo']
dirs = site.getsitepackages()
- if sys.platform in ('os2emx', 'riscos'):
- self.assertEqual(len(dirs), 1)
- wanted = os.path.join('xoxo', 'Lib', 'site-packages')
- self.assertEqual(dirs[0], wanted)
- elif (sys.platform == "darwin" and
+ if (sys.platform == "darwin" and
sysconfig.get_config_var("PYTHONFRAMEWORK")):
# OS X framework builds
site.PREFIXES = ['Python.framework']
diff --git a/Lib/test/test_slice.py b/Lib/test/test_slice.py
index 2df9271..9203d5e 100644
--- a/Lib/test/test_slice.py
+++ b/Lib/test/test_slice.py
@@ -4,8 +4,70 @@ import unittest
from test import support
from pickle import loads, dumps
+import itertools
+import operator
import sys
+
+def evaluate_slice_index(arg):
+ """
+ Helper function to convert a slice argument to an integer, and raise
+ TypeError with a suitable message on failure.
+
+ """
+ if hasattr(arg, '__index__'):
+ return operator.index(arg)
+ else:
+ raise TypeError(
+ "slice indices must be integers or "
+ "None or have an __index__ method")
+
+def slice_indices(slice, length):
+ """
+ Reference implementation for the slice.indices method.
+
+ """
+ # Compute step and length as integers.
+ length = operator.index(length)
+ step = 1 if slice.step is None else evaluate_slice_index(slice.step)
+
+ # Raise ValueError for negative length or zero step.
+ if length < 0:
+ raise ValueError("length should not be negative")
+ if step == 0:
+ raise ValueError("slice step cannot be zero")
+
+ # Find lower and upper bounds for start and stop.
+ lower = -1 if step < 0 else 0
+ upper = length - 1 if step < 0 else length
+
+ # Compute start.
+ if slice.start is None:
+ start = upper if step < 0 else lower
+ else:
+ start = evaluate_slice_index(slice.start)
+ start = max(start + length, lower) if start < 0 else min(start, upper)
+
+ # Compute stop.
+ if slice.stop is None:
+ stop = lower if step < 0 else upper
+ else:
+ stop = evaluate_slice_index(slice.stop)
+ stop = max(stop + length, lower) if stop < 0 else min(stop, upper)
+
+ return start, stop, step
+
+
+# Class providing an __index__ method. Used for testing slice.indices.
+
+class MyIndexable(object):
+ def __init__(self, value):
+ self.value = value
+
+ def __index__(self):
+ return self.value
+
+
class SliceTest(unittest.TestCase):
def test_constructor(self):
@@ -75,6 +137,22 @@ class SliceTest(unittest.TestCase):
s = slice(obj)
self.assertTrue(s.stop is obj)
+ def check_indices(self, slice, length):
+ try:
+ actual = slice.indices(length)
+ except ValueError:
+ actual = "valueerror"
+ try:
+ expected = slice_indices(slice, length)
+ except ValueError:
+ expected = "valueerror"
+ self.assertEqual(actual, expected)
+
+ if length >= 0 and slice.step != 0:
+ actual = range(*slice.indices(length))
+ expected = range(length)[slice]
+ self.assertEqual(actual, expected)
+
def test_indices(self):
self.assertEqual(slice(None ).indices(10), (0, 10, 1))
self.assertEqual(slice(None, None, 2).indices(10), (0, 10, 2))
@@ -108,7 +186,41 @@ class SliceTest(unittest.TestCase):
self.assertEqual(list(range(10))[::sys.maxsize - 1], [0])
- self.assertRaises(OverflowError, slice(None).indices, 1<<100)
+ # Check a variety of start, stop, step and length values, including
+ # values exceeding sys.maxsize (see issue #14794).
+ vals = [None, -2**100, -2**30, -53, -7, -1, 0, 1, 7, 53, 2**30, 2**100]
+ lengths = [0, 1, 7, 53, 2**30, 2**100]
+ for slice_args in itertools.product(vals, repeat=3):
+ s = slice(*slice_args)
+ for length in lengths:
+ self.check_indices(s, length)
+ self.check_indices(slice(0, 10, 1), -3)
+
+ # Negative length should raise ValueError
+ with self.assertRaises(ValueError):
+ slice(None).indices(-1)
+
+ # Zero step should raise ValueError
+ with self.assertRaises(ValueError):
+ slice(0, 10, 0).indices(5)
+
+ # Using a start, stop or step or length that can't be interpreted as an
+ # integer should give a TypeError ...
+ with self.assertRaises(TypeError):
+ slice(0.0, 10, 1).indices(5)
+ with self.assertRaises(TypeError):
+ slice(0, 10.0, 1).indices(5)
+ with self.assertRaises(TypeError):
+ slice(0, 10, 1.0).indices(5)
+ with self.assertRaises(TypeError):
+ slice(0, 10, 1).indices(5.0)
+
+ # ... but it should be fine to use a custom class that provides index.
+ self.assertEqual(slice(0, 10, 1).indices(5), (0, 5, 1))
+ self.assertEqual(slice(MyIndexable(0), 10, 1).indices(5), (0, 5, 1))
+ self.assertEqual(slice(0, MyIndexable(10), 1).indices(5), (0, 5, 1))
+ self.assertEqual(slice(0, 10, MyIndexable(1)).indices(5), (0, 5, 1))
+ self.assertEqual(slice(0, 10, 1).indices(MyIndexable(5)), (0, 5, 1))
def test_setslice_without_getslice(self):
tmp = []
diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py
index befc49e..5a086c8 100644
--- a/Lib/test/test_smtplib.py
+++ b/Lib/test/test_smtplib.py
@@ -524,12 +524,6 @@ class DebuggingServerTests(unittest.TestCase):
class NonConnectingTests(unittest.TestCase):
- def setUp(self):
- smtplib.socket = mock_socket
-
- def tearDown(self):
- smtplib.socket = socket
-
def testNotConnected(self):
# Test various operations on an unconnected SMTP object that
# should raise exceptions (at present the attempt in SMTP.send
@@ -542,9 +536,9 @@ class NonConnectingTests(unittest.TestCase):
def testNonnumericPort(self):
# check that non-numeric port raises socket.error
- self.assertRaises(mock_socket.error, smtplib.SMTP,
+ self.assertRaises(OSError, smtplib.SMTP,
"localhost", "bogus")
- self.assertRaises(mock_socket.error, smtplib.SMTP,
+ self.assertRaises(OSError, smtplib.SMTP,
"localhost:bogus")
diff --git a/Lib/test/test_socketserver.py b/Lib/test/test_socketserver.py
index 353f8ff..58fc5d03 100644
--- a/Lib/test/test_socketserver.py
+++ b/Lib/test/test_socketserver.py
@@ -27,7 +27,7 @@ TEST_STR = b"hello world\n"
HOST = test.support.HOST
HAVE_UNIX_SOCKETS = hasattr(socket, "AF_UNIX")
-HAVE_FORKING = hasattr(os, "fork") and os.name != "os2"
+HAVE_FORKING = hasattr(os, "fork")
def signal_alarm(n):
"""Call signal.alarm when it exists (i.e. not on Windows)."""
@@ -93,21 +93,7 @@ class SocketServerTest(unittest.TestCase):
# XXX: We need a way to tell AF_UNIX to pick its own name
# like AF_INET provides port==0.
dir = None
- if os.name == 'os2':
- dir = '\socket'
fn = tempfile.mktemp(prefix='unix_socket.', dir=dir)
- if os.name == 'os2':
- # AF_UNIX socket names on OS/2 require a specific prefix
- # which can't include a drive letter and must also use
- # backslashes as directory separators
- if fn[1] == ':':
- fn = fn[2:]
- if fn[0] in (os.sep, os.altsep):
- fn = fn[1:]
- if os.sep == '/':
- fn = fn.replace(os.sep, os.altsep)
- else:
- fn = fn.replace(os.altsep, os.sep)
self.test_files.append(fn)
return fn
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index b0cd63c..c5a513e 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -925,8 +925,7 @@ class ProcessTestCase(BaseTestCase):
# value for that limit, but Windows has 2048, so we loop
# 1024 times (each call leaked two fds).
for i in range(1024):
- # Windows raises IOError. Others raise OSError.
- with self.assertRaises(EnvironmentError) as c:
+ with self.assertRaises(OSError) as c:
subprocess.Popen(['nonexisting_i_hope'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@@ -1179,7 +1178,7 @@ class POSIXProcessTestCase(BaseTestCase):
try:
p = subprocess.Popen([sys.executable, "-c", ""],
preexec_fn=raise_it)
- except RuntimeError as e:
+ except subprocess.SubprocessError as e:
self.assertTrue(
subprocess._posixsubprocess,
"Expected a ValueError from the preexec_fn")
@@ -1218,9 +1217,10 @@ class POSIXProcessTestCase(BaseTestCase):
"""Issue16140: Don't double close pipes on preexec error."""
def raise_it():
- raise RuntimeError("force the _execute_child() errpipe_data path.")
+ raise subprocess.SubprocessError(
+ "force the _execute_child() errpipe_data path.")
- with self.assertRaises(RuntimeError):
+ with self.assertRaises(subprocess.SubprocessError):
self._TestExecuteChildPopen(
self, [sys.executable, "-c", "pass"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
@@ -1581,12 +1581,12 @@ class POSIXProcessTestCase(BaseTestCase):
# Pure Python implementations keeps the message
self.assertIsNone(subprocess._posixsubprocess)
self.assertEqual(str(err), "surrogate:\uDCff")
- except RuntimeError as err:
+ except subprocess.SubprocessError as err:
# _posixsubprocess uses a default message
self.assertIsNotNone(subprocess._posixsubprocess)
self.assertEqual(str(err), "Exception occurred in preexec_fn.")
else:
- self.fail("Expected ValueError or RuntimeError")
+ self.fail("Expected ValueError or subprocess.SubprocessError")
def test_undecodable_env(self):
for key, value in (('test', 'abc\uDCFF'), ('test\uDCFF', '42')):
@@ -1873,7 +1873,7 @@ class POSIXProcessTestCase(BaseTestCase):
# let some time for the process to exit, and create a new Popen: this
# should trigger the wait() of p
time.sleep(0.2)
- with self.assertRaises(EnvironmentError) as c:
+ with self.assertRaises(OSError) as c:
with subprocess.Popen(['nonexisting_i_hope'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE) as proc:
@@ -2154,7 +2154,7 @@ class ContextManagerTests(BaseTestCase):
self.assertEqual(proc.returncode, 1)
def test_invalid_args(self):
- with self.assertRaises(EnvironmentError) as c:
+ with self.assertRaises(OSError) as c:
with subprocess.Popen(['nonexisting_i_hope'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE) as proc:
diff --git a/Lib/test/test_sundry.py b/Lib/test/test_sundry.py
index fcccdf7..a364194 100644
--- a/Lib/test/test_sundry.py
+++ b/Lib/test/test_sundry.py
@@ -1,19 +1,26 @@
"""Do a minimal test of all the modules that aren't otherwise tested."""
-
-from test import support
+import importlib
import sys
+from test import support
import unittest
class TestUntestedModules(unittest.TestCase):
- def test_at_least_import_untested_modules(self):
+ def test_untested_modules_can_be_imported(self):
+ untested = ('bdb', 'encodings', 'formatter', 'getpass', 'imghdr',
+ 'keyword', 'macurl2path', 'nturl2path', 'tabnanny')
with support.check_warnings(quiet=True):
- import bdb
- import cgitb
+ for name in untested:
+ try:
+ support.import_module('test.test_{}'.format(name))
+ except unittest.SkipTest:
+ importlib.import_module(name)
+ else:
+ self.fail('{} has tests even though test_sundry claims '
+ 'otherwise'.format(name))
import distutils.bcppcompiler
import distutils.ccompiler
import distutils.cygwinccompiler
- import distutils.emxccompiler
import distutils.filelist
if sys.platform.startswith('win'):
import distutils.msvccompiler
@@ -39,22 +46,9 @@ class TestUntestedModules(unittest.TestCase):
import distutils.command.sdist
import distutils.command.upload
- import encodings
- import formatter
- import getpass
import html.entities
- import imghdr
- import keyword
- import macurl2path
- import mailcap
- import nturl2path
- import os2emxpath
- import pstats
- import py_compile
- import sndhdr
- import tabnanny
try:
- import tty # not available on Windows
+ import tty # Not available on Windows
except ImportError:
if support.verbose:
print("skipping tty")
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index 5926b69..a9d3628 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -33,7 +33,7 @@ SyntaxError: invalid syntax
>>> None = 1
Traceback (most recent call last):
-SyntaxError: assignment to keyword
+SyntaxError: can't assign to keyword
It's a syntax error to assign to the empty tuple. Why isn't it an
error to assign to the empty list? It will always raise some error at
@@ -233,7 +233,7 @@ Traceback (most recent call last):
SyntaxError: can't assign to generator expression
>>> None += 1
Traceback (most recent call last):
-SyntaxError: assignment to keyword
+SyntaxError: can't assign to keyword
>>> f() += 1
Traceback (most recent call last):
SyntaxError: can't assign to function call
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index e5ec85c..41f1439 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -6,6 +6,8 @@ import textwrap
import warnings
import operator
import codecs
+import gc
+import sysconfig
# count the number of test runs, used to create unique
# strings to intern in test_intern()
@@ -482,7 +484,7 @@ class SysModuleTest(unittest.TestCase):
def test_thread_info(self):
info = sys.thread_info
self.assertEqual(len(info), 3)
- self.assertIn(info.name, ('nt', 'os2', 'pthread', 'solaris', None))
+ self.assertIn(info.name, ('nt', 'pthread', 'solaris', None))
self.assertIn(info.lock, ('semaphore', 'mutex+cond', None))
def test_43581(self):
@@ -611,6 +613,33 @@ class SysModuleTest(unittest.TestCase):
ret, out, err = assert_python_ok(*args)
self.assertIn(b"free PyDictObjects", err)
+ @unittest.skipUnless(hasattr(sys, "getallocatedblocks"),
+ "sys.getallocatedblocks unavailable on this build")
+ def test_getallocatedblocks(self):
+ # Some sanity checks
+ with_pymalloc = sysconfig.get_config_var('WITH_PYMALLOC')
+ a = sys.getallocatedblocks()
+ self.assertIs(type(a), int)
+ if with_pymalloc:
+ self.assertGreater(a, 0)
+ else:
+ self.assertEqual(a, 0)
+ try:
+ # While we could imagine a Python session where the number of
+ # multiple buffer objects would exceed the sharing of references,
+ # it is unlikely to happen in a normal test run.
+ self.assertLess(a, sys.gettotalrefcount())
+ except AttributeError:
+ # gettotalrefcount() not available
+ pass
+ gc.collect()
+ b = sys.getallocatedblocks()
+ self.assertLessEqual(b, a)
+ gc.collect()
+ c = sys.getallocatedblocks()
+ self.assertIn(c, range(b - 50, b + 50))
+
+
class SizeofTest(unittest.TestCase):
def setUp(self):
diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
index 9219360..5293649 100644
--- a/Lib/test/test_sysconfig.py
+++ b/Lib/test/test_sysconfig.py
@@ -234,7 +234,7 @@ class TestSysConfig(unittest.TestCase):
self.assertTrue(os.path.isfile(config_h), config_h)
def test_get_scheme_names(self):
- wanted = ('nt', 'nt_user', 'os2', 'os2_home', 'osx_framework_user',
+ wanted = ('nt', 'nt_user', 'osx_framework_user',
'posix_home', 'posix_prefix', 'posix_user')
self.assertEqual(get_scheme_names(), wanted)
@@ -305,14 +305,13 @@ class TestSysConfig(unittest.TestCase):
if 'MACOSX_DEPLOYMENT_TARGET' in env:
del env['MACOSX_DEPLOYMENT_TARGET']
- with open('/dev/null', 'w') as devnull_fp:
- p = subprocess.Popen([
- sys.executable, '-c',
- 'import sysconfig; print(sysconfig.get_platform())',
- ],
- stdout=subprocess.PIPE,
- stderr=devnull_fp,
- env=env)
+ p = subprocess.Popen([
+ sys.executable, '-c',
+ 'import sysconfig; print(sysconfig.get_platform())',
+ ],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ env=env)
test_platform = p.communicate()[0].strip()
test_platform = test_platform.decode('utf-8')
status = p.wait()
@@ -325,20 +324,19 @@ class TestSysConfig(unittest.TestCase):
env = os.environ.copy()
env['MACOSX_DEPLOYMENT_TARGET'] = '10.1'
- with open('/dev/null') as dev_null:
- p = subprocess.Popen([
- sys.executable, '-c',
- 'import sysconfig; print(sysconfig.get_platform())',
- ],
- stdout=subprocess.PIPE,
- stderr=dev_null,
- env=env)
- test_platform = p.communicate()[0].strip()
- test_platform = test_platform.decode('utf-8')
- status = p.wait()
-
- self.assertEqual(status, 0)
- self.assertEqual(my_platform, test_platform)
+ p = subprocess.Popen([
+ sys.executable, '-c',
+ 'import sysconfig; print(sysconfig.get_platform())',
+ ],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ env=env)
+ test_platform = p.communicate()[0].strip()
+ test_platform = test_platform.decode('utf-8')
+ status = p.wait()
+
+ self.assertEqual(status, 0)
+ self.assertEqual(my_platform, test_platform)
def test_srcdir(self):
# See Issues #15322, #15364.
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index c7bf774..8bbf4ae 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -342,7 +342,7 @@ class MiscReadTest(CommonReadTest):
tar.extract("ustar/regtype", TEMPDIR)
try:
tar.extract("ustar/lnktype", TEMPDIR)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
self.fail("hardlink not extracted properly")
@@ -352,7 +352,7 @@ class MiscReadTest(CommonReadTest):
try:
tar.extract("ustar/symtype", TEMPDIR)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
self.fail("symlink not extracted properly")
diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py
index d79f319..f5193dc 100644
--- a/Lib/test/test_tempfile.py
+++ b/Lib/test/test_tempfile.py
@@ -149,7 +149,7 @@ class TestRandomNameSequence(BaseTestCase):
# via any bugs above
try:
os.kill(pid, signal.SIGKILL)
- except EnvironmentError:
+ except OSError:
pass
os.close(read_fd)
os.close(write_fd)
@@ -276,7 +276,7 @@ class TestMkstempInner(BaseTestCase):
file = self.do_create()
mode = stat.S_IMODE(os.stat(file.name).st_mode)
expected = 0o600
- if sys.platform in ('win32', 'os2emx'):
+ if sys.platform == 'win32':
# There's no distinction among 'user', 'group' and 'world';
# replicate the 'user' bits.
user = expected >> 6
@@ -310,7 +310,7 @@ class TestMkstempInner(BaseTestCase):
# On Windows a spawn* /path/ with embedded spaces shouldn't be quoted,
# but an arg with embedded spaces should be decorated with double
# quotes on each end
- if sys.platform in ('win32',):
+ if sys.platform == 'win32':
decorated = '"%s"' % sys.executable
tester = '"%s"' % tester
else:
@@ -479,7 +479,7 @@ class TestMkdtemp(BaseTestCase):
mode = stat.S_IMODE(os.stat(dir).st_mode)
mode &= 0o777 # Mask off sticky bits inherited from /tmp
expected = 0o700
- if sys.platform in ('win32', 'os2emx'):
+ if sys.platform == 'win32':
# There's no distinction among 'user', 'group' and 'world';
# replicate the 'user' bits.
user = expected >> 6
diff --git a/Lib/test/test_thread.py b/Lib/test/test_thread.py
index a191e15..f9a721b 100644
--- a/Lib/test/test_thread.py
+++ b/Lib/test/test_thread.py
@@ -68,7 +68,7 @@ class ThreadRunningTests(BasicThreadTest):
thread.stack_size(0)
self.assertEqual(thread.stack_size(), 0, "stack_size not reset to default")
- if os.name not in ("nt", "os2", "posix"):
+ if os.name not in ("nt", "posix"):
return
tss_supported = True
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index bb8d9b6..13c017d 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -451,8 +451,7 @@ class ThreadJoinOnShutdown(BaseTestCase):
# #12316 and #11870), and fork() from a worker thread is known to trigger
# problems with some operating systems (issue #3863): skip problematic tests
# on platforms known to behave badly.
- platforms_to_skip = ('freebsd4', 'freebsd5', 'freebsd6', 'netbsd5',
- 'os2emx')
+ platforms_to_skip = ('freebsd4', 'freebsd5', 'freebsd6', 'netbsd5')
def _run_and_join(self, script):
script = """if 1:
@@ -754,7 +753,8 @@ class ThreadingExceptionTests(BaseTestCase):
lock = threading.Lock()
self.assertRaises(RuntimeError, lock.release)
- @unittest.skipUnless(sys.platform == 'darwin', 'test macosx problem')
+ @unittest.skipUnless(sys.platform == 'darwin' and test.support.python_is_optimized(),
+ 'test macosx problem')
def test_recursion_limit(self):
# Issue 9670
# test that excessive recursion within a non-main thread causes
diff --git a/Lib/test/test_threadsignals.py b/Lib/test/test_threadsignals.py
index f975a75..b1004e6 100644
--- a/Lib/test/test_threadsignals.py
+++ b/Lib/test/test_threadsignals.py
@@ -8,7 +8,7 @@ from test.support import run_unittest, import_module
thread = import_module('_thread')
import time
-if sys.platform[:3] in ('win', 'os2') or sys.platform=='riscos':
+if (sys.platform[:3] == 'win'):
raise unittest.SkipTest("Can't test signal on %s" % sys.platform)
process_pid = os.getpid()
diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py
index 9aaedd3..6ac62fb 100644
--- a/Lib/test/test_unicode.py
+++ b/Lib/test/test_unicode.py
@@ -1983,9 +1983,10 @@ class UnicodeTest(string_tests.CommonTest,
# Test PyUnicode_FromFormat()
def test_from_format(self):
support.import_module('ctypes')
- from ctypes import (pythonapi, py_object,
+ from ctypes import (
+ pythonapi, py_object, sizeof,
c_int, c_long, c_longlong, c_ssize_t,
- c_uint, c_ulong, c_ulonglong, c_size_t)
+ c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p)
name = "PyUnicode_FromFormat"
_PyUnicode_FromFormat = getattr(pythonapi, name)
_PyUnicode_FromFormat.restype = py_object
@@ -2036,6 +2037,31 @@ class UnicodeTest(string_tests.CommonTest,
self.assertEqual(PyUnicode_FromFormat(b'%llu', c_ulonglong(123)), '123')
self.assertEqual(PyUnicode_FromFormat(b'%zu', c_size_t(123)), '123')
+ # test long output
+ min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1))
+ max_longlong = -min_longlong - 1
+ self.assertEqual(PyUnicode_FromFormat(b'%lld', c_longlong(min_longlong)), str(min_longlong))
+ self.assertEqual(PyUnicode_FromFormat(b'%lld', c_longlong(max_longlong)), str(max_longlong))
+ max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1
+ self.assertEqual(PyUnicode_FromFormat(b'%llu', c_ulonglong(max_ulonglong)), str(max_ulonglong))
+ PyUnicode_FromFormat(b'%p', c_void_p(-1))
+
+ # test padding (width and/or precision)
+ self.assertEqual(PyUnicode_FromFormat(b'%010i', c_int(123)), '123'.rjust(10, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100i', c_int(123)), '123'.rjust(100))
+ self.assertEqual(PyUnicode_FromFormat(b'%.100i', c_int(123)), '123'.rjust(100, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100.80i', c_int(123)), '123'.rjust(80, '0').rjust(100))
+
+ self.assertEqual(PyUnicode_FromFormat(b'%010u', c_uint(123)), '123'.rjust(10, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100u', c_uint(123)), '123'.rjust(100))
+ self.assertEqual(PyUnicode_FromFormat(b'%.100u', c_uint(123)), '123'.rjust(100, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100.80u', c_uint(123)), '123'.rjust(80, '0').rjust(100))
+
+ self.assertEqual(PyUnicode_FromFormat(b'%010x', c_int(0x123)), '123'.rjust(10, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100x', c_int(0x123)), '123'.rjust(100))
+ self.assertEqual(PyUnicode_FromFormat(b'%.100x', c_int(0x123)), '123'.rjust(100, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100.80x', c_int(0x123)), '123'.rjust(80, '0').rjust(100))
+
# test %A
text = PyUnicode_FromFormat(b'%%A:%A', 'abc\xe9\uabcd\U0010ffff')
self.assertEqual(text, r"%A:'abc\xe9\uabcd\U0010ffff'")
diff --git a/Lib/test/test_unicodedata.py b/Lib/test/test_unicodedata.py
index 99aa003..677508b 100644
--- a/Lib/test/test_unicodedata.py
+++ b/Lib/test/test_unicodedata.py
@@ -80,7 +80,7 @@ class UnicodeDatabaseTest(unittest.TestCase):
class UnicodeFunctionsTest(UnicodeDatabaseTest):
# update this, if the database changes
- expectedchecksum = '17fe2f12b788e4fff5479b469c4404bb6ecf841f'
+ expectedchecksum = 'ebd64e81553c9cb37f424f5616254499fcd8849e'
def test_function_checksum(self):
data = []
h = hashlib.sha1()
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index 52e7749..1e30fa6 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -270,9 +270,10 @@ Content-Type: text/html; charset=iso-8859-1
def test_missing_localfile(self):
# Test for #10836
- # 3.3 - URLError is not captured, explicit IOError is raised.
- with self.assertRaises(IOError):
+ with self.assertRaises(urllib.error.URLError) as e:
urlopen('file://localhost/a/file/which/doesnot/exists.py')
+ self.assertTrue(e.exception.filename)
+ self.assertTrue(e.exception.reason)
def test_file_notexists(self):
fd, tmp_file = tempfile.mkstemp()
@@ -285,20 +286,21 @@ Content-Type: text/html; charset=iso-8859-1
os.close(fd)
os.unlink(tmp_file)
self.assertFalse(os.path.exists(tmp_file))
- # 3.3 - IOError instead of URLError
- with self.assertRaises(IOError):
+ with self.assertRaises(urllib.error.URLError):
urlopen(tmp_fileurl)
def test_ftp_nohost(self):
test_ftp_url = 'ftp:///path'
- # 3.3 - IOError instead of URLError
- with self.assertRaises(IOError):
+ with self.assertRaises(urllib.error.URLError) as e:
urlopen(test_ftp_url)
+ self.assertFalse(e.exception.filename)
+ self.assertTrue(e.exception.reason)
def test_ftp_nonexisting(self):
- # 3.3 - IOError instead of URLError
- with self.assertRaises(IOError):
+ with self.assertRaises(urllib.error.URLError) as e:
urlopen('ftp://localhost/a/file/which/doesnot/exists.py')
+ self.assertFalse(e.exception.filename)
+ self.assertTrue(e.exception.reason)
def test_userpass_inurl(self):
@@ -335,6 +337,79 @@ Content-Type: text/html; charset=iso-8859-1
with support.check_warnings(('',DeprecationWarning)):
urllib.request.URLopener()
+class urlopen_DataTests(unittest.TestCase):
+ """Test urlopen() opening a data URL."""
+
+ def setUp(self):
+ # text containing URL special- and unicode-characters
+ self.text = "test data URLs :;,%=& \u00f6 \u00c4 "
+ # 2x1 pixel RGB PNG image with one black and one white pixel
+ self.image = (
+ b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x02\x00\x00\x00'
+ b'\x01\x08\x02\x00\x00\x00{@\xe8\xdd\x00\x00\x00\x01sRGB\x00\xae'
+ b'\xce\x1c\xe9\x00\x00\x00\x0fIDAT\x08\xd7c```\xf8\xff\xff?\x00'
+ b'\x06\x01\x02\xfe\no/\x1e\x00\x00\x00\x00IEND\xaeB`\x82')
+
+ self.text_url = (
+ "data:text/plain;charset=UTF-8,test%20data%20URLs%20%3A%3B%2C%25%3"
+ "D%26%20%C3%B6%20%C3%84%20")
+ self.text_url_base64 = (
+ "data:text/plain;charset=ISO-8859-1;base64,dGVzdCBkYXRhIFVSTHMgOjs"
+ "sJT0mIPYgxCA%3D")
+ # base64 encoded data URL that contains ignorable spaces,
+ # such as "\n", " ", "%0A", and "%20".
+ self.image_url = (
+ "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAIAAAABCAIAAAB7\n"
+ "QOjdAAAAAXNSR0IArs4c6QAAAA9JREFUCNdj%0AYGBg%2BP//PwAGAQL%2BCm8 "
+ "vHgAAAABJRU5ErkJggg%3D%3D%0A%20")
+
+ self.text_url_resp = urllib.request.urlopen(self.text_url)
+ self.text_url_base64_resp = urllib.request.urlopen(
+ self.text_url_base64)
+ self.image_url_resp = urllib.request.urlopen(self.image_url)
+
+ def test_interface(self):
+ # Make sure object returned by urlopen() has the specified methods
+ for attr in ("read", "readline", "readlines",
+ "close", "info", "geturl", "getcode", "__iter__"):
+ self.assertTrue(hasattr(self.text_url_resp, attr),
+ "object returned by urlopen() lacks %s attribute" %
+ attr)
+
+ def test_info(self):
+ self.assertIsInstance(self.text_url_resp.info(), email.message.Message)
+ self.assertEqual(self.text_url_base64_resp.info().get_params(),
+ [('text/plain', ''), ('charset', 'ISO-8859-1')])
+ self.assertEqual(self.image_url_resp.info()['content-length'],
+ str(len(self.image)))
+ self.assertEqual(urllib.request.urlopen("data:,").info().get_params(),
+ [('text/plain', ''), ('charset', 'US-ASCII')])
+
+ def test_geturl(self):
+ self.assertEqual(self.text_url_resp.geturl(), self.text_url)
+ self.assertEqual(self.text_url_base64_resp.geturl(),
+ self.text_url_base64)
+ self.assertEqual(self.image_url_resp.geturl(), self.image_url)
+
+ def test_read_text(self):
+ self.assertEqual(self.text_url_resp.read().decode(
+ dict(self.text_url_resp.info().get_params())['charset']), self.text)
+
+ def test_read_text_base64(self):
+ self.assertEqual(self.text_url_base64_resp.read().decode(
+ dict(self.text_url_base64_resp.info().get_params())['charset']),
+ self.text)
+
+ def test_read_image(self):
+ self.assertEqual(self.image_url_resp.read(), self.image)
+
+ def test_missing_comma(self):
+ self.assertRaises(ValueError,urllib.request.urlopen,'data:text/plain')
+
+ def test_invalid_base64_data(self):
+ # missing padding character
+ self.assertRaises(ValueError,urllib.request.urlopen,'data:;base64,Cg=')
+
class urlretrieve_FileTests(unittest.TestCase):
"""Test urllib.urlretrieve() on local files"""
@@ -1311,6 +1386,7 @@ def test_main():
support.run_unittest(
urlopen_FileTests,
urlopen_HttpTests,
+ urlopen_DataTests,
urlretrieve_FileTests,
urlretrieve_HttpTests,
ProxyTests,
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 7ae1553..33042ed 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -124,6 +124,19 @@ def test_request_headers_methods():
>>> r.get_header("Not-there", "default")
'default'
+ Method r.remove_header should remove items both from r.headers and
+ r.unredirected_hdrs dictionaries
+
+ >>> r.remove_header("Spam-eggs")
+ >>> r.has_header("Spam-eggs")
+ False
+ >>> r.add_unredirected_header("Unredirected-spam", "Eggs")
+ >>> r.has_header("Unredirected-spam")
+ True
+ >>> r.remove_header("Unredirected-spam")
+ >>> r.has_header("Unredirected-spam")
+ False
+
"""
@@ -302,6 +315,7 @@ class MockHTTPClass:
self.req_headers = []
self.data = None
self.raise_on_endheaders = False
+ self.sock = None
self._tunnel_headers = {}
def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
@@ -1431,6 +1445,20 @@ class MiscTests(unittest.TestCase):
self.opener_has_handler(o, MyHTTPHandler)
self.opener_has_handler(o, MyOtherHTTPHandler)
+ def test_issue16464(self):
+ opener = urllib.request.build_opener()
+ request = urllib.request.Request("http://www.python.org/~jeremy/")
+ self.assertEqual(None, request.data)
+
+ opener.open(request, "1".encode("us-ascii"))
+ self.assertEqual(b"1", request.data)
+ self.assertEqual("1", request.get_header("Content-length"))
+
+ opener.open(request, "1234567890".encode("us-ascii"))
+ self.assertEqual(b"1234567890", request.data)
+ self.assertEqual("10", request.get_header("Content-length"))
+
+
def opener_has_handler(self, opener, handler_class):
self.assertTrue(any(h.__class__ == handler_class
for h in opener.handlers))
@@ -1454,6 +1482,16 @@ class RequestTests(unittest.TestCase):
self.assertTrue(self.get.data)
self.assertEqual("POST", self.get.get_method())
+ # issue 16464
+ # if we change data we need to remove content-length header
+ # (cause it's most probably calculated for previous value)
+ def test_setting_data_should_remove_content_length(self):
+ self.assertFalse("Content-length" in self.get.unredirected_hdrs)
+ self.get.add_unredirected_header("Content-length", 42)
+ self.assertEqual(42, self.get.unredirected_hdrs["Content-length"])
+ self.get.data = "spam"
+ self.assertFalse("Content-length" in self.get.unredirected_hdrs)
+
def test_get_full_url(self):
self.assertEqual("http://www.python.org/~jeremy/",
self.get.get_full_url())
@@ -1501,11 +1539,15 @@ def test_HTTPError_interface():
interface even though HTTPError is a subclass of URLError.
>>> msg = 'something bad happened'
- >>> url = code = hdrs = fp = None
+ >>> url = code = fp = None
+ >>> hdrs = 'Content-Length: 42'
>>> err = urllib.error.HTTPError(url, code, msg, hdrs, fp)
>>> assert hasattr(err, 'reason')
>>> err.reason
'something bad happened'
+ >>> assert hasattr(err, 'headers')
+ >>> err.headers
+ 'Content-Length: 42'
"""
def test_main(verbose=None):
diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py
index 7f3c93a..b3c1a68 100644
--- a/Lib/test/test_urllib2net.py
+++ b/Lib/test/test_urllib2net.py
@@ -216,7 +216,7 @@ class OtherNetworkTests(unittest.TestCase):
debug(url)
try:
f = urlopen(url, req, TIMEOUT)
- except EnvironmentError as err:
+ except OSError as err:
debug(err)
if expected_err:
msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" %
diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py
index 9696844..2a528c9 100644
--- a/Lib/test/test_venv.py
+++ b/Lib/test/test_venv.py
@@ -113,13 +113,68 @@ class BasicTest(BaseTest):
out, err = p.communicate()
self.assertEqual(out.strip(), expected.encode())
+ if sys.platform == 'win32':
+ ENV_SUBDIRS = (
+ ('Scripts',),
+ ('Include',),
+ ('Lib',),
+ ('Lib', 'site-packages'),
+ )
+ else:
+ ENV_SUBDIRS = (
+ ('bin',),
+ ('include',),
+ ('lib',),
+ ('lib', 'python%d.%d' % sys.version_info[:2]),
+ ('lib', 'python%d.%d' % sys.version_info[:2], 'site-packages'),
+ )
+
+ def create_contents(self, paths, filename):
+ """
+ Create some files in the environment which are unrelated
+ to the virtual environment.
+ """
+ for subdirs in paths:
+ d = os.path.join(self.env_dir, *subdirs)
+ os.mkdir(d)
+ fn = os.path.join(d, filename)
+ with open(fn, 'wb') as f:
+ f.write(b'Still here?')
+
def test_overwrite_existing(self):
"""
- Test control of overwriting an existing environment directory.
+ Test creating environment in an existing directory.
"""
- self.assertRaises(ValueError, venv.create, self.env_dir)
+ self.create_contents(self.ENV_SUBDIRS, 'foo')
+ venv.create(self.env_dir)
+ for subdirs in self.ENV_SUBDIRS:
+ fn = os.path.join(self.env_dir, *(subdirs + ('foo',)))
+ self.assertTrue(os.path.exists(fn))
+ with open(fn, 'rb') as f:
+ self.assertEqual(f.read(), b'Still here?')
+
builder = venv.EnvBuilder(clear=True)
builder.create(self.env_dir)
+ for subdirs in self.ENV_SUBDIRS:
+ fn = os.path.join(self.env_dir, *(subdirs + ('foo',)))
+ self.assertFalse(os.path.exists(fn))
+
+ def clear_directory(self, path):
+ for fn in os.listdir(path):
+ fn = os.path.join(path, fn)
+ if os.path.islink(fn) or os.path.isfile(fn):
+ os.remove(fn)
+ elif os.path.isdir(fn):
+ shutil.rmtree(fn)
+
+ def test_unoverwritable_fails(self):
+ #create a file clashing with directories in the env dir
+ for paths in self.ENV_SUBDIRS[:3]:
+ fn = os.path.join(self.env_dir, *paths)
+ with open(fn, 'wb') as f:
+ f.write(b'')
+ self.assertRaises((ValueError, OSError), venv.create, self.env_dir)
+ self.clear_directory(self.env_dir)
def test_upgrade(self):
"""
diff --git a/Lib/test/test_wait3.py b/Lib/test/test_wait3.py
index bd06c8d..f6a065d 100644
--- a/Lib/test/test_wait3.py
+++ b/Lib/test/test_wait3.py
@@ -7,15 +7,11 @@ import unittest
from test.fork_wait import ForkWait
from test.support import run_unittest, reap_children
-try:
- os.fork
-except AttributeError:
- raise unittest.SkipTest("os.fork not defined -- skipping test_wait3")
+if not hasattr(os, 'fork'):
+ raise unittest.SkipTest("os.fork not defined")
-try:
- os.wait3
-except AttributeError:
- raise unittest.SkipTest("os.wait3 not defined -- skipping test_wait3")
+if not hasattr(os, 'wait3'):
+ raise unittest.SkipTest("os.wait3 not defined")
class Wait3Test(ForkWait):
def wait_impl(self, cpid):
diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py
index 571e33f..cdd26c7 100644
--- a/Lib/test/test_weakref.py
+++ b/Lib/test/test_weakref.py
@@ -47,6 +47,11 @@ class Object:
return NotImplemented
def __hash__(self):
return hash(self.arg)
+ def some_method(self):
+ return 4
+ def other_method(self):
+ return 5
+
class RefCycle:
def __init__(self):
@@ -901,6 +906,140 @@ class SubclassableWeakrefTestCase(TestBase):
self.assertEqual(self.cbcalled, 0)
+class WeakMethodTestCase(unittest.TestCase):
+
+ def _subclass(self):
+ """Return a Object subclass overriding `some_method`."""
+ class C(Object):
+ def some_method(self):
+ return 6
+ return C
+
+ def test_alive(self):
+ o = Object(1)
+ r = weakref.WeakMethod(o.some_method)
+ self.assertIsInstance(r, weakref.ReferenceType)
+ self.assertIsInstance(r(), type(o.some_method))
+ self.assertIs(r().__self__, o)
+ self.assertIs(r().__func__, o.some_method.__func__)
+ self.assertEqual(r()(), 4)
+
+ def test_object_dead(self):
+ o = Object(1)
+ r = weakref.WeakMethod(o.some_method)
+ del o
+ gc.collect()
+ self.assertIs(r(), None)
+
+ def test_method_dead(self):
+ C = self._subclass()
+ o = C(1)
+ r = weakref.WeakMethod(o.some_method)
+ del C.some_method
+ gc.collect()
+ self.assertIs(r(), None)
+
+ def test_callback_when_object_dead(self):
+ # Test callback behaviour when object dies first.
+ C = self._subclass()
+ calls = []
+ def cb(arg):
+ calls.append(arg)
+ o = C(1)
+ r = weakref.WeakMethod(o.some_method, cb)
+ del o
+ gc.collect()
+ self.assertEqual(calls, [r])
+ # Callback is only called once.
+ C.some_method = Object.some_method
+ gc.collect()
+ self.assertEqual(calls, [r])
+
+ def test_callback_when_method_dead(self):
+ # Test callback behaviour when method dies first.
+ C = self._subclass()
+ calls = []
+ def cb(arg):
+ calls.append(arg)
+ o = C(1)
+ r = weakref.WeakMethod(o.some_method, cb)
+ del C.some_method
+ gc.collect()
+ self.assertEqual(calls, [r])
+ # Callback is only called once.
+ del o
+ gc.collect()
+ self.assertEqual(calls, [r])
+
+ @support.cpython_only
+ def test_no_cycles(self):
+ # A WeakMethod doesn't create any reference cycle to itself.
+ o = Object(1)
+ def cb(_):
+ pass
+ r = weakref.WeakMethod(o.some_method, cb)
+ wr = weakref.ref(r)
+ del r
+ self.assertIs(wr(), None)
+
+ def test_equality(self):
+ def _eq(a, b):
+ self.assertTrue(a == b)
+ self.assertFalse(a != b)
+ def _ne(a, b):
+ self.assertTrue(a != b)
+ self.assertFalse(a == b)
+ x = Object(1)
+ y = Object(1)
+ a = weakref.WeakMethod(x.some_method)
+ b = weakref.WeakMethod(y.some_method)
+ c = weakref.WeakMethod(x.other_method)
+ d = weakref.WeakMethod(y.other_method)
+ # Objects equal, same method
+ _eq(a, b)
+ _eq(c, d)
+ # Objects equal, different method
+ _ne(a, c)
+ _ne(a, d)
+ _ne(b, c)
+ _ne(b, d)
+ # Objects unequal, same or different method
+ z = Object(2)
+ e = weakref.WeakMethod(z.some_method)
+ f = weakref.WeakMethod(z.other_method)
+ _ne(a, e)
+ _ne(a, f)
+ _ne(b, e)
+ _ne(b, f)
+ del x, y, z
+ gc.collect()
+ # Dead WeakMethods compare by identity
+ refs = a, b, c, d, e, f
+ for q in refs:
+ for r in refs:
+ self.assertEqual(q == r, q is r)
+ self.assertEqual(q != r, q is not r)
+
+ def test_hashing(self):
+ # Alive WeakMethods are hashable if the underlying object is
+ # hashable.
+ x = Object(1)
+ y = Object(1)
+ a = weakref.WeakMethod(x.some_method)
+ b = weakref.WeakMethod(y.some_method)
+ c = weakref.WeakMethod(y.other_method)
+ # Since WeakMethod objects are equal, the hashes should be equal.
+ self.assertEqual(hash(a), hash(b))
+ ha = hash(a)
+ # Dead WeakMethods retain their old hash value
+ del x, y
+ gc.collect()
+ self.assertEqual(hash(a), ha)
+ self.assertEqual(hash(b), ha)
+ # If it wasn't hashed when alive, a dead WeakMethod cannot be hashed.
+ self.assertRaises(TypeError, hash, c)
+
+
class MappingTestCase(TestBase):
COUNT = 10
@@ -1476,6 +1615,7 @@ __test__ = {'libreftest' : libreftest}
def test_main():
support.run_unittest(
ReferencesTestCase,
+ WeakMethodTestCase,
MappingTestCase,
WeakValueDictionaryTestCase,
WeakKeyDictionaryTestCase,
diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py
index 55163c9..406c5a2 100644
--- a/Lib/test/test_winreg.py
+++ b/Lib/test/test_winreg.py
@@ -8,7 +8,7 @@ threading = support.import_module("threading")
from platform import machine
# Do this first so test will be skipped if module doesn't exist
-support.import_module('winreg')
+support.import_module('winreg', required_on=['win'])
# Now import everything
from winreg import *
@@ -97,7 +97,7 @@ class BaseWinregTests(unittest.TestCase):
QueryInfoKey(int_sub_key)
self.fail("It appears the CloseKey() function does "
"not close the actual key!")
- except EnvironmentError:
+ except OSError:
pass
# ... and close that key that way :-)
int_key = int(key)
@@ -106,7 +106,7 @@ class BaseWinregTests(unittest.TestCase):
QueryInfoKey(int_key)
self.fail("It appears the key.Close() function "
"does not close the actual key!")
- except EnvironmentError:
+ except OSError:
pass
def _read_test_data(self, root_key, subkeystr="sub_key", OpenKey=OpenKey):
@@ -123,7 +123,7 @@ class BaseWinregTests(unittest.TestCase):
while 1:
try:
data = EnumValue(sub_key, index)
- except EnvironmentError:
+ except OSError:
break
self.assertEqual(data in test_data, True,
"Didn't read back the correct test data")
@@ -144,7 +144,7 @@ class BaseWinregTests(unittest.TestCase):
try:
EnumKey(key, 1)
self.fail("Was able to get a second key when I only have one!")
- except EnvironmentError:
+ except OSError:
pass
key.Close()
@@ -168,7 +168,7 @@ class BaseWinregTests(unittest.TestCase):
# Shouldnt be able to delete it twice!
DeleteKey(key, subkeystr)
self.fail("Deleting the key twice succeeded")
- except EnvironmentError:
+ except OSError:
pass
key.Close()
DeleteKey(root_key, test_key_name)
diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py
index 7e5b915..255f5b9 100644
--- a/Lib/tkinter/__init__.py
+++ b/Lib/tkinter/__init__.py
@@ -2152,45 +2152,6 @@ class Button(Widget):
"""
return self.tk.call(self._w, 'invoke')
-
-# Indices:
-# XXX I don't like these -- take them away
-def AtEnd():
- warnings.warn("tkinter.AtEnd will be removed in 3.4",
- DeprecationWarning, stacklevel=2)
- return 'end'
-
-
-def AtInsert(*args):
- warnings.warn("tkinter.AtInsert will be removed in 3.4",
- DeprecationWarning, stacklevel=2)
- s = 'insert'
- for a in args:
- if a: s = s + (' ' + a)
- return s
-
-
-def AtSelFirst():
- warnings.warn("tkinter.AtSelFirst will be removed in 3.4",
- DeprecationWarning, stacklevel=2)
- return 'sel.first'
-
-
-def AtSelLast():
- warnings.warn("tkinter.AtSelLast will be removed in 3.4",
- DeprecationWarning, stacklevel=2)
- return 'sel.last'
-
-
-def At(x, y=None):
- warnings.warn("tkinter.At will be removed in 3.4",
- DeprecationWarning, stacklevel=2)
- if y is None:
- return '@%r' % (x,)
- else:
- return '@%r,%r' % (x, y)
-
-
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
diff --git a/Lib/tkinter/filedialog.py b/Lib/tkinter/filedialog.py
index 3ffb252..a71afb2 100644
--- a/Lib/tkinter/filedialog.py
+++ b/Lib/tkinter/filedialog.py
@@ -166,7 +166,7 @@ class FileDialog:
dir, pat = self.get_filter()
try:
names = os.listdir(dir)
- except os.error:
+ except OSError:
self.master.bell()
return
self.directory = dir
@@ -209,7 +209,7 @@ class FileDialog:
if not os.path.isabs(dir):
try:
pwd = os.getcwd()
- except os.error:
+ except OSError:
pwd = None
if pwd:
dir = os.path.join(pwd, dir)
diff --git a/Lib/traceback.py b/Lib/traceback.py
index eeb9e73..33b86c7 100644
--- a/Lib/traceback.py
+++ b/Lib/traceback.py
@@ -132,8 +132,7 @@ def _iter_chain(exc, custom_tb=None, seen=None):
its.append([(exc, custom_tb or exc.__traceback__)])
# itertools.chain is in an extension module and may be unavailable
for it in its:
- for x in it:
- yield x
+ yield from it
def print_exception(etype, value, tb, limit=None, file=None, chain=True):
diff --git a/Lib/unittest/loader.py b/Lib/unittest/loader.py
index 541884e..a5ac737 100644
--- a/Lib/unittest/loader.py
+++ b/Lib/unittest/loader.py
@@ -291,8 +291,7 @@ class TestLoader(object):
# tests loaded from package file
yield tests
# recurse into the package
- for test in self._find_tests(full_path, pattern):
- yield test
+ yield from self._find_tests(full_path, pattern)
else:
try:
yield load_tests(self, tests, pattern)
diff --git a/Lib/urllib/error.py b/Lib/urllib/error.py
index c1dfc40..237ed6b 100644
--- a/Lib/urllib/error.py
+++ b/Lib/urllib/error.py
@@ -61,6 +61,14 @@ class HTTPError(URLError, urllib.response.addinfourl):
def reason(self):
return self.msg
+ @property
+ def headers(self):
+ return self.hdrs
+
+ @headers.setter
+ def headers(self, headers):
+ self.hdrs = headers
+
# exception raised when downloaded size does not match content-length
class ContentTooShortError(URLError):
def __init__(self, message, content):
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index 5ddec5f..d8439b3 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -103,7 +103,8 @@ from urllib.error import URLError, HTTPError, ContentTooShortError
from urllib.parse import (
urlparse, urlsplit, urljoin, unwrap, quote, unquote,
splittype, splithost, splitport, splituser, splitpasswd,
- splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse)
+ splitattr, splitquery, splitvalue, splittag, to_bytes,
+ unquote_to_bytes, urlunparse)
from urllib.response import addinfourl, addclosehook
# check for SSL
@@ -121,7 +122,7 @@ __all__ = [
'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm',
'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler',
'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler',
- 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler',
+ 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', 'DataHandler',
'UnknownHandler', 'HTTPErrorProcessor',
# Functions
'urlopen', 'install_opener', 'build_opener',
@@ -231,7 +232,7 @@ def urlcleanup():
for temp_file in _url_tempfiles:
try:
os.unlink(temp_file)
- except EnvironmentError:
+ except OSError:
pass
del _url_tempfiles[:]
@@ -265,12 +266,13 @@ class Request:
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self.full_url = unwrap(url)
self.full_url, self.fragment = splittag(self.full_url)
- self.data = data
self.headers = {}
+ self.unredirected_hdrs = {}
+ self._data = None
+ self.data = data
self._tunnel_host = None
for key, value in headers.items():
self.add_header(key, value)
- self.unredirected_hdrs = {}
if origin_req_host is None:
origin_req_host = request_host(self)
self.origin_req_host = origin_req_host
@@ -278,6 +280,24 @@ class Request:
self.method = method
self._parse()
+ @property
+ def data(self):
+ return self._data
+
+ @data.setter
+ def data(self, data):
+ if data != self._data:
+ self._data = data
+ # issue 16464
+ # if we change data we need to remove content-length header
+ # (cause it's most probably calculated for previous value)
+ if self.has_header("Content-length"):
+ self.remove_header("Content-length")
+
+ @data.deleter
+ def data(self):
+ self._data = None
+
def _parse(self):
self.type, rest = splittype(self.full_url)
if self.type is None:
@@ -373,6 +393,10 @@ class Request:
header_name,
self.unredirected_hdrs.get(header_name, default))
+ def remove_header(self, header_name):
+ self.headers.pop(header_name, None)
+ self.unredirected_hdrs.pop(header_name, None)
+
def header_items(self):
hdrs = self.unredirected_hdrs.copy()
hdrs.update(self.headers)
@@ -535,7 +559,8 @@ def build_opener(*handlers):
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
- FTPHandler, FileHandler, HTTPErrorProcessor]
+ FTPHandler, FileHandler, HTTPErrorProcessor,
+ DataHandler]
if hasattr(http.client, "HTTPSConnection"):
default_classes.append(HTTPSHandler)
skip = set()
@@ -1255,6 +1280,12 @@ class AbstractHTTPHandler(BaseHandler):
raise URLError(err)
else:
r = h.getresponse()
+ # If the server does not send us a 'Connection: close' header,
+ # HTTPConnection assumes the socket should be left open. Manually
+ # mark the socket to be closed when this response object goes away.
+ if h.sock:
+ h.sock.close()
+ h.sock = None
r.url = req.get_full_url()
# This line replaces the .msg attribute of the HTTPResponse
@@ -1535,6 +1566,36 @@ class CacheFTPHandler(FTPHandler):
self.cache.clear()
self.timeout.clear()
+class DataHandler(BaseHandler):
+ def data_open(self, req):
+ # data URLs as specified in RFC 2397.
+ #
+ # ignores POSTed data
+ #
+ # syntax:
+ # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
+ # mediatype := [ type "/" subtype ] *( ";" parameter )
+ # data := *urlchar
+ # parameter := attribute "=" value
+ url = req.full_url
+
+ scheme, data = url.split(":",1)
+ mediatype, data = data.split(",",1)
+
+ # even base64 encoded data URLs might be quoted so unquote in any case:
+ data = unquote_to_bytes(data)
+ if mediatype.endswith(";base64"):
+ data = base64.decodebytes(data)
+ mediatype = mediatype[:-7]
+
+ if not mediatype:
+ mediatype = "text/plain;charset=US-ASCII"
+
+ headers = email.message_from_string("Content-type: %s\nContent-length: %d\n" %
+ (mediatype, len(data)))
+
+ return addinfourl(io.BytesIO(data), headers, url)
+
# Code move from the old urllib module
@@ -1658,7 +1719,7 @@ class URLopener:
return getattr(self, name)(url)
else:
return getattr(self, name)(url, data)
- except HTTPError:
+ except (HTTPError, URLError):
raise
except socket.error as msg:
raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py
index 3920747..ecdb68e 100644
--- a/Lib/venv/__init__.py
+++ b/Lib/venv/__init__.py
@@ -92,6 +92,14 @@ class EnvBuilder:
self.setup_scripts(context)
self.post_setup(context)
+ def clear_directory(self, path):
+ for fn in os.listdir(path):
+ fn = os.path.join(path, fn)
+ if os.path.islink(fn) or os.path.isfile(fn):
+ os.remove(fn)
+ elif os.path.isdir(fn):
+ shutil.rmtree(fn)
+
def ensure_directories(self, env_dir):
"""
Create the directories for the environment.
@@ -103,11 +111,11 @@ class EnvBuilder:
def create_if_needed(d):
if not os.path.exists(d):
os.makedirs(d)
+ elif os.path.islink(d) or os.path.isfile(d):
+ raise ValueError('Unable to create directory %r' % d)
- if os.path.exists(env_dir) and not (self.clear or self.upgrade):
- raise ValueError('Directory exists: %s' % env_dir)
if os.path.exists(env_dir) and self.clear:
- shutil.rmtree(env_dir)
+ self.clear_directory(env_dir)
context = Context()
context.env_dir = env_dir
context.env_name = os.path.split(env_dir)[1]
@@ -378,11 +386,10 @@ def main(args=None):
'when symlinks are not the default for '
'the platform.')
parser.add_argument('--clear', default=False, action='store_true',
- dest='clear', help='Delete the environment '
- 'directory if it already '
- 'exists. If not specified and '
- 'the directory exists, an error'
- ' is raised.')
+ dest='clear', help='Delete the contents of the '
+ 'environment directory if it '
+ 'already exists, before '
+ 'environment creation.')
parser.add_argument('--upgrade', default=False, action='store_true',
dest='upgrade', help='Upgrade the environment '
'directory to use this version '
diff --git a/Lib/venv/scripts/posix/activate.csh b/Lib/venv/scripts/posix/activate.csh
new file mode 100644
index 0000000..99d79e0
--- /dev/null
+++ b/Lib/venv/scripts/posix/activate.csh
@@ -0,0 +1,37 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb@gmail.com>.
+# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
+
+# Unset irrelavent variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "__VENV_DIR__"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/__VENV_BIN_NAME__:$PATH"
+
+
+set _OLD_VIRTUAL_PROMPT="$prompt"
+
+if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
+ if ("__VENV_NAME__" != "") then
+ set env_name = "__VENV_NAME__"
+ else
+ if (`basename "VIRTUAL_ENV"` == "__") then
+ # special case for Aspen magic directories
+ # see http://www.zetadev.com/software/aspen/
+ set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
+ else
+ set env_name = `basename "$VIRTUAL_ENV"`
+ endif
+ endif
+ set prompt = "[$env_name] $prompt"
+ unset env_name
+endif
+
+alias pydoc python -m pydoc
+
+rehash
diff --git a/Lib/venv/scripts/posix/activate.fish b/Lib/venv/scripts/posix/activate.fish
new file mode 100644
index 0000000..5ac1638
--- /dev/null
+++ b/Lib/venv/scripts/posix/activate.fish
@@ -0,0 +1,74 @@
+# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
+# you cannot run it directly
+
+function deactivate -d "Exit virtualenv and return to normal shell environment"
+ # reset old environment variables
+ if test -n "$_OLD_VIRTUAL_PATH"
+ set -gx PATH $_OLD_VIRTUAL_PATH
+ set -e _OLD_VIRTUAL_PATH
+ end
+ if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+ set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
+ set -e _OLD_VIRTUAL_PYTHONHOME
+ end
+
+ if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+ functions -e fish_prompt
+ set -e _OLD_FISH_PROMPT_OVERRIDE
+ . ( begin
+ printf "function fish_prompt\n\t#"
+ functions _old_fish_prompt
+ end | psub )
+ functions -e _old_fish_prompt
+ end
+
+ set -e VIRTUAL_ENV
+ if test "$argv[1]" != "nondestructive"
+ # Self destruct!
+ functions -e deactivate
+ end
+end
+
+# unset irrelavent variables
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV "__VENV_DIR__"
+
+set -gx _OLD_VIRTUAL_PATH $PATH
+set -gx PATH "$VIRTUAL_ENV/__VENV_BIN_NAME__" $PATH
+
+# unset PYTHONHOME if set
+if set -q PYTHONHOME
+ set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+ set -e PYTHONHOME
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+ # fish uses a function instead of an env var to generate the prompt.
+
+ # save the current fish_prompt function as the function _old_fish_prompt
+ . ( begin
+ printf "function _old_fish_prompt\n\t#"
+ functions fish_prompt
+ end | psub )
+
+ # with the original prompt function renamed, we can override with our own.
+ function fish_prompt
+ # Prompt override?
+ if test -n "__VENV_NAME__"
+ printf "%s%s%s" "__VENV_NAME__" (set_color normal) (_old_fish_prompt)
+ return
+ end
+ # ...Otherwise, prepend env
+ set -l _checkbase (basename "$VIRTUAL_ENV")
+ if test $_checkbase = "__"
+ # special case for Aspen magic directories
+ # see http://www.zetadev.com/software/aspen/
+ printf "%s[%s]%s %s" (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) (_old_fish_prompt)
+ else
+ printf "%s(%s)%s%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) (_old_fish_prompt)
+ end
+ end
+
+ set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+end
diff --git a/Lib/weakref.py b/Lib/weakref.py
index fcb6b74..8f9c107 100644
--- a/Lib/weakref.py
+++ b/Lib/weakref.py
@@ -27,7 +27,61 @@ ProxyTypes = (ProxyType, CallableProxyType)
__all__ = ["ref", "proxy", "getweakrefcount", "getweakrefs",
"WeakKeyDictionary", "ReferenceType", "ProxyType",
"CallableProxyType", "ProxyTypes", "WeakValueDictionary",
- "WeakSet"]
+ "WeakSet", "WeakMethod"]
+
+
+class WeakMethod(ref):
+ """
+ A custom `weakref.ref` subclass which simulates a weak reference to
+ a bound method, working around the lifetime problem of bound methods.
+ """
+
+ __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__"
+
+ def __new__(cls, meth, callback=None):
+ try:
+ obj = meth.__self__
+ func = meth.__func__
+ except AttributeError:
+ raise TypeError("argument should be a bound method, not {}"
+ .format(type(meth))) from None
+ def _cb(arg):
+ # The self-weakref trick is needed to avoid creating a reference
+ # cycle.
+ self = self_wr()
+ if self._alive:
+ self._alive = False
+ if callback is not None:
+ callback(self)
+ self = ref.__new__(cls, obj, _cb)
+ self._func_ref = ref(func, _cb)
+ self._meth_type = type(meth)
+ self._alive = True
+ self_wr = ref(self)
+ return self
+
+ def __call__(self):
+ obj = super().__call__()
+ func = self._func_ref()
+ if obj is None or func is None:
+ return None
+ return self._meth_type(func, obj)
+
+ def __eq__(self, other):
+ if isinstance(other, WeakMethod):
+ if not self._alive or not other._alive:
+ return self is other
+ return ref.__eq__(self, other) and self._func_ref == other._func_ref
+ return False
+
+ def __ne__(self, other):
+ if isinstance(other, WeakMethod):
+ if not self._alive or not other._alive:
+ return self is not other
+ return ref.__ne__(self, other) or self._func_ref != other._func_ref
+ return True
+
+ __hash__ = ref.__hash__
class WeakValueDictionary(collections.MutableMapping):
@@ -153,8 +207,7 @@ class WeakValueDictionary(collections.MutableMapping):
"""
with _IterationGuard(self):
- for wr in self.data.values():
- yield wr
+ yield from self.data.values()
def values(self):
with _IterationGuard(self):
diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py
index 94d4ad4..8ebfa3c 100644
--- a/Lib/webbrowser.py
+++ b/Lib/webbrowser.py
@@ -638,17 +638,6 @@ if sys.platform == 'darwin':
register("MacOSX", None, MacOSXOSAScript('default'), -1)
-#
-# Platform support for OS/2
-#
-
-if sys.platform[:3] == "os2" and _iscommand("netscape"):
- _tryorder = []
- _browsers = {}
- register("os2netscape", None,
- GenericBrowser(["start", "netscape", "%s"]), -1)
-
-
# OK, now that we know what the default preference orders for each
# platform are, allow user to override them with the BROWSER variable.
if "BROWSER" in os.environ:
diff --git a/Lib/xml/etree/ElementPath.py b/Lib/xml/etree/ElementPath.py
index 52e65f0..341dac0 100644
--- a/Lib/xml/etree/ElementPath.py
+++ b/Lib/xml/etree/ElementPath.py
@@ -105,14 +105,12 @@ def prepare_child(next, token):
def prepare_star(next, token):
def select(context, result):
for elem in result:
- for e in elem:
- yield e
+ yield from elem
return select
def prepare_self(next, token):
def select(context, result):
- for elem in result:
- yield elem
+ yield from result
return select
def prepare_descendant(next, token):
diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py
index 9553c51..ded894d 100644
--- a/Lib/xml/etree/ElementTree.py
+++ b/Lib/xml/etree/ElementTree.py
@@ -461,8 +461,7 @@ class Element:
if tag is None or self.tag == tag:
yield self
for e in self._children:
- for e in e.iter(tag):
- yield e
+ yield from e.iter(tag)
# compatibility
def getiterator(self, tag=None):
@@ -489,8 +488,7 @@ class Element:
if self.text:
yield self.text
for e in self:
- for s in e.itertext():
- yield s
+ yield from e.itertext()
if e.tail:
yield e.tail
diff --git a/Lib/zipfile.py b/Lib/zipfile.py
index 68051c8..df4ed91 100644
--- a/Lib/zipfile.py
+++ b/Lib/zipfile.py
@@ -1376,7 +1376,6 @@ class ZipFile:
zinfo.compress_size = len(data) # Compressed size
else:
zinfo.compress_size = zinfo.file_size
- zinfo.header_offset = self.fp.tell() # Start of header data
self.fp.write(zinfo.FileHeader())
self.fp.write(data)
self.fp.flush()