summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
Diffstat (limited to 'Lib')
-rw-r--r--Lib/_dummy_thread.py6
-rw-r--r--Lib/_pyio.py58
-rw-r--r--Lib/abc.py8
-rw-r--r--Lib/argparse.py33
-rw-r--r--Lib/asynchat.py2
-rw-r--r--Lib/asyncore.py2
-rw-r--r--Lib/bz2.py392
-rw-r--r--Lib/collections/__init__.py (renamed from Lib/collections.py)156
-rw-r--r--Lib/collections/abc.py (renamed from Lib/_abcoll.py)37
-rw-r--r--Lib/compileall.py10
-rw-r--r--Lib/concurrent/futures/_base.py22
-rw-r--r--Lib/concurrent/futures/process.py5
-rw-r--r--Lib/configparser.py3
-rw-r--r--Lib/crypt.py62
-rw-r--r--Lib/ctypes/test/test_memfunctions.py2
-rw-r--r--Lib/ctypes/test/test_python_api.py3
-rw-r--r--Lib/ctypes/test/test_refcounts.py3
-rw-r--r--Lib/ctypes/test/test_stringptr.py2
-rw-r--r--Lib/distutils/__init__.py2
-rw-r--r--Lib/distutils/command/bdist_wininst.py6
-rw-r--r--Lib/distutils/tests/test_bdist_rpm.py5
-rw-r--r--Lib/doctest.py2
-rw-r--r--Lib/email/_parseaddr.py8
-rw-r--r--Lib/email/parser.py24
-rw-r--r--Lib/email/test/__init__.py0
-rw-r--r--Lib/email/utils.py28
-rw-r--r--Lib/ftplib.py27
-rw-r--r--Lib/functools.py13
-rw-r--r--Lib/getopt.py19
-rw-r--r--Lib/getpass.py2
-rw-r--r--Lib/glob.py1
-rw-r--r--Lib/gzip.py22
-rw-r--r--Lib/http/client.py4
-rw-r--r--Lib/http/server.py4
-rw-r--r--Lib/idlelib/idlever.py2
-rw-r--r--Lib/importlib/_bootstrap.py9
-rw-r--r--Lib/importlib/test/regrtest.py7
-rw-r--r--Lib/lib2to3/__main__.py4
-rw-r--r--Lib/lib2to3/patcomp.py3
-rw-r--r--Lib/lib2to3/pgen2/driver.py11
-rw-r--r--Lib/lib2to3/tests/test_parser.py10
-rw-r--r--Lib/logging/__init__.py50
-rw-r--r--Lib/logging/handlers.py12
-rw-r--r--Lib/multiprocessing/connection.py4
-rw-r--r--Lib/multiprocessing/process.py8
-rw-r--r--Lib/nntplib.py14
-rw-r--r--Lib/optparse.py26
-rw-r--r--Lib/os.py2
-rwxr-xr-xLib/platform.py83
-rw-r--r--Lib/poplib.py17
-rwxr-xr-xLib/pydoc.py9
-rw-r--r--Lib/random.py10
-rw-r--r--Lib/re.py8
-rw-r--r--Lib/site.py10
-rwxr-xr-xLib/smtpd.py2
-rwxr-xr-xLib/smtplib.py13
-rw-r--r--Lib/socket.py3
-rw-r--r--Lib/sqlite3/test/hooks.py52
-rw-r--r--Lib/sqlite3/test/types.py2
-rw-r--r--Lib/sre_parse.py2
-rw-r--r--Lib/string.py22
-rw-r--r--Lib/subprocess.py449
-rw-r--r--Lib/tarfile.py18
-rw-r--r--Lib/test/crashers/README4
-rw-r--r--Lib/test/crashers/compiler_recursion.py12
-rw-r--r--Lib/test/future_test1.py (renamed from Lib/test/test_future1.py)0
-rw-r--r--Lib/test/future_test2.py (renamed from Lib/test/test_future2.py)0
-rw-r--r--Lib/test/list_tests.py41
-rw-r--r--Lib/test/pickletester.py24
-rwxr-xr-xLib/test/regrtest.py103
-rw-r--r--Lib/test/script_helper.py5
-rw-r--r--Lib/test/support.py63
-rw-r--r--Lib/test/test_abc.py24
-rw-r--r--Lib/test/test_argparse.py33
-rw-r--r--Lib/test/test_asyncore.py20
-rw-r--r--Lib/test/test_bigmem.py85
-rw-r--r--Lib/test/test_builtin.py19
-rw-r--r--Lib/test/test_bytes.py51
-rw-r--r--Lib/test/test_bz2.py178
-rw-r--r--Lib/test/test_cmd_line.py10
-rw-r--r--Lib/test/test_codecs.py4
-rw-r--r--Lib/test/test_collections.py90
-rw-r--r--Lib/test/test_compileall.py2
-rw-r--r--Lib/test/test_concurrent_futures.py10
-rw-r--r--Lib/test/test_configparser.py (renamed from Lib/test/test_cfgparser.py)0
-rw-r--r--Lib/test/test_crashers.py38
-rw-r--r--Lib/test/test_crypt.py19
-rw-r--r--Lib/test/test_descr.py6
-rw-r--r--Lib/test/test_descrtut.py2
-rw-r--r--Lib/test/test_dis.py104
-rw-r--r--Lib/test/test_doctest.py444
-rw-r--r--Lib/test/test_dummy_thread.py4
-rw-r--r--Lib/test/test_email.py14
-rw-r--r--Lib/test/test_email/__init__.py43
-rw-r--r--Lib/test/test_email/__main__.py3
-rw-r--r--Lib/test/test_email/data/PyBanner048.gif (renamed from Lib/email/test/data/PyBanner048.gif)bin954 -> 954 bytes
-rw-r--r--Lib/test/test_email/data/audiotest.au (renamed from Lib/email/test/data/audiotest.au)bin28144 -> 28144 bytes
-rw-r--r--Lib/test/test_email/data/msg_01.txt (renamed from Lib/email/test/data/msg_01.txt)0
-rw-r--r--Lib/test/test_email/data/msg_02.txt (renamed from Lib/email/test/data/msg_02.txt)0
-rw-r--r--Lib/test/test_email/data/msg_03.txt (renamed from Lib/email/test/data/msg_03.txt)0
-rw-r--r--Lib/test/test_email/data/msg_04.txt (renamed from Lib/email/test/data/msg_04.txt)0
-rw-r--r--Lib/test/test_email/data/msg_05.txt (renamed from Lib/email/test/data/msg_05.txt)0
-rw-r--r--Lib/test/test_email/data/msg_06.txt (renamed from Lib/email/test/data/msg_06.txt)0
-rw-r--r--Lib/test/test_email/data/msg_07.txt (renamed from Lib/email/test/data/msg_07.txt)0
-rw-r--r--Lib/test/test_email/data/msg_08.txt (renamed from Lib/email/test/data/msg_08.txt)0
-rw-r--r--Lib/test/test_email/data/msg_09.txt (renamed from Lib/email/test/data/msg_09.txt)0
-rw-r--r--Lib/test/test_email/data/msg_10.txt (renamed from Lib/email/test/data/msg_10.txt)0
-rw-r--r--Lib/test/test_email/data/msg_11.txt (renamed from Lib/email/test/data/msg_11.txt)0
-rw-r--r--Lib/test/test_email/data/msg_12.txt (renamed from Lib/email/test/data/msg_12.txt)0
-rw-r--r--Lib/test/test_email/data/msg_12a.txt (renamed from Lib/email/test/data/msg_12a.txt)0
-rw-r--r--Lib/test/test_email/data/msg_13.txt (renamed from Lib/email/test/data/msg_13.txt)0
-rw-r--r--Lib/test/test_email/data/msg_14.txt (renamed from Lib/email/test/data/msg_14.txt)0
-rw-r--r--Lib/test/test_email/data/msg_15.txt (renamed from Lib/email/test/data/msg_15.txt)0
-rw-r--r--Lib/test/test_email/data/msg_16.txt (renamed from Lib/email/test/data/msg_16.txt)0
-rw-r--r--Lib/test/test_email/data/msg_17.txt (renamed from Lib/email/test/data/msg_17.txt)0
-rw-r--r--Lib/test/test_email/data/msg_18.txt (renamed from Lib/email/test/data/msg_18.txt)0
-rw-r--r--Lib/test/test_email/data/msg_19.txt (renamed from Lib/email/test/data/msg_19.txt)0
-rw-r--r--Lib/test/test_email/data/msg_20.txt (renamed from Lib/email/test/data/msg_20.txt)0
-rw-r--r--Lib/test/test_email/data/msg_21.txt (renamed from Lib/email/test/data/msg_21.txt)0
-rw-r--r--Lib/test/test_email/data/msg_22.txt (renamed from Lib/email/test/data/msg_22.txt)0
-rw-r--r--Lib/test/test_email/data/msg_23.txt (renamed from Lib/email/test/data/msg_23.txt)0
-rw-r--r--Lib/test/test_email/data/msg_24.txt (renamed from Lib/email/test/data/msg_24.txt)0
-rw-r--r--Lib/test/test_email/data/msg_25.txt (renamed from Lib/email/test/data/msg_25.txt)0
-rw-r--r--Lib/test/test_email/data/msg_26.txt (renamed from Lib/email/test/data/msg_26.txt)0
-rw-r--r--Lib/test/test_email/data/msg_27.txt (renamed from Lib/email/test/data/msg_27.txt)0
-rw-r--r--Lib/test/test_email/data/msg_28.txt (renamed from Lib/email/test/data/msg_28.txt)0
-rw-r--r--Lib/test/test_email/data/msg_29.txt (renamed from Lib/email/test/data/msg_29.txt)0
-rw-r--r--Lib/test/test_email/data/msg_30.txt (renamed from Lib/email/test/data/msg_30.txt)0
-rw-r--r--Lib/test/test_email/data/msg_31.txt (renamed from Lib/email/test/data/msg_31.txt)0
-rw-r--r--Lib/test/test_email/data/msg_32.txt (renamed from Lib/email/test/data/msg_32.txt)0
-rw-r--r--Lib/test/test_email/data/msg_33.txt (renamed from Lib/email/test/data/msg_33.txt)0
-rw-r--r--Lib/test/test_email/data/msg_34.txt (renamed from Lib/email/test/data/msg_34.txt)0
-rw-r--r--Lib/test/test_email/data/msg_35.txt (renamed from Lib/email/test/data/msg_35.txt)0
-rw-r--r--Lib/test/test_email/data/msg_36.txt (renamed from Lib/email/test/data/msg_36.txt)0
-rw-r--r--Lib/test/test_email/data/msg_37.txt (renamed from Lib/email/test/data/msg_37.txt)0
-rw-r--r--Lib/test/test_email/data/msg_38.txt (renamed from Lib/email/test/data/msg_38.txt)0
-rw-r--r--Lib/test/test_email/data/msg_39.txt (renamed from Lib/email/test/data/msg_39.txt)0
-rw-r--r--Lib/test/test_email/data/msg_40.txt (renamed from Lib/email/test/data/msg_40.txt)0
-rw-r--r--Lib/test/test_email/data/msg_41.txt (renamed from Lib/email/test/data/msg_41.txt)0
-rw-r--r--Lib/test/test_email/data/msg_42.txt (renamed from Lib/email/test/data/msg_42.txt)0
-rw-r--r--Lib/test/test_email/data/msg_43.txt (renamed from Lib/email/test/data/msg_43.txt)0
-rw-r--r--Lib/test/test_email/data/msg_44.txt (renamed from Lib/email/test/data/msg_44.txt)0
-rw-r--r--Lib/test/test_email/data/msg_45.txt (renamed from Lib/email/test/data/msg_45.txt)0
-rw-r--r--Lib/test/test_email/data/msg_46.txt (renamed from Lib/email/test/data/msg_46.txt)0
-rw-r--r--Lib/test/test_email/test_asian_codecs.py (renamed from Lib/email/test/test_email_codecs.py)15
-rw-r--r--Lib/test/test_email/test_email.py (renamed from Lib/email/test/test_email.py)116
-rw-r--r--Lib/test/test_email/torture_test.py (renamed from Lib/email/test/test_email_torture.py)0
-rw-r--r--Lib/test/test_exceptions.py5
-rw-r--r--Lib/test/test_faulthandler.py514
-rw-r--r--Lib/test/test_fileinput.py636
-rw-r--r--Lib/test/test_float.py2
-rw-r--r--Lib/test/test_ftplib.py24
-rw-r--r--Lib/test/test_functools.py66
-rw-r--r--Lib/test/test_future.py12
-rw-r--r--Lib/test/test_gc.py6
-rw-r--r--Lib/test/test_genexps.py8
-rw-r--r--Lib/test/test_gzip.py23
-rw-r--r--Lib/test/test_imp.py10
-rw-r--r--Lib/test/test_import.py2
-rw-r--r--Lib/test/test_importhooks.py9
-rw-r--r--Lib/test/test_io.py20
-rw-r--r--Lib/test/test_itertools.py12
-rw-r--r--Lib/test/test_logging.py558
-rw-r--r--Lib/test/test_mailbox.py6
-rw-r--r--Lib/test/test_metaclass.py8
-rw-r--r--Lib/test/test_minidom.py39
-rw-r--r--Lib/test/test_multiprocessing.py12
-rw-r--r--Lib/test/test_nntplib.py27
-rw-r--r--Lib/test/test_optparse.py2
-rw-r--r--Lib/test/test_os.py290
-rw-r--r--Lib/test/test_osx_env.py3
-rw-r--r--Lib/test/test_pdb.py3
-rw-r--r--Lib/test/test_peepholer.py43
-rw-r--r--Lib/test/test_pep292.py4
-rw-r--r--Lib/test/test_pep3120.py4
-rw-r--r--Lib/test/test_platform.py34
-rw-r--r--Lib/test/test_poplib.py15
-rw-r--r--Lib/test/test_posix.py361
-rw-r--r--Lib/test/test_pulldom.py347
-rw-r--r--Lib/test/test_pydoc.py8
-rw-r--r--Lib/test/test_re.py4
-rw-r--r--Lib/test/test_reprlib.py2
-rw-r--r--Lib/test/test_richcmp.py1
-rw-r--r--Lib/test/test_runpy.py4
-rw-r--r--Lib/test/test_sax.py4
-rw-r--r--Lib/test/test_scope.py19
-rw-r--r--Lib/test/test_select.py1
-rw-r--r--Lib/test/test_shelve.py6
-rw-r--r--Lib/test/test_smtplib.py38
-rw-r--r--Lib/test/test_socket.py41
-rw-r--r--Lib/test/test_strlit.py2
-rw-r--r--Lib/test/test_subprocess.py101
-rw-r--r--Lib/test/test_sys.py3
-rw-r--r--Lib/test/test_sys_settrace.py10
-rw-r--r--Lib/test/test_tarfile.py6
-rw-r--r--Lib/test/test_threading.py12
-rw-r--r--Lib/test/test_threadsignals.py8
-rw-r--r--Lib/test/test_trace.py11
-rw-r--r--Lib/test/test_unicode.py47
-rw-r--r--Lib/test/test_urllib.py2
-rw-r--r--Lib/test/test_urllib2.py2
-rw-r--r--Lib/test/test_uuid.py4
-rw-r--r--Lib/test/test_xml_etree.py6
-rw-r--r--Lib/test/test_xmlrpc_net.py4
-rw-r--r--Lib/test/test_zipimport_support.py5
-rw-r--r--Lib/threading.py21
-rw-r--r--Lib/timeit.py8
-rw-r--r--Lib/unittest/case.py44
-rw-r--r--Lib/unittest/test/test_case.py54
-rw-r--r--Lib/unittest/test/test_loader.py4
-rw-r--r--Lib/urllib/request.py2
-rw-r--r--Lib/urllib/response.py7
212 files changed, 5777 insertions, 1240 deletions
diff --git a/Lib/_dummy_thread.py b/Lib/_dummy_thread.py
index ed50520..13b1f26 100644
--- a/Lib/_dummy_thread.py
+++ b/Lib/_dummy_thread.py
@@ -24,11 +24,7 @@ TIMEOUT_MAX = 2**31
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
-class error(Exception):
- """Dummy implementation of _thread.error."""
-
- def __init__(self, *args):
- self.args = args
+error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
diff --git a/Lib/_pyio.py b/Lib/_pyio.py
index 35dea41..a3b89e7 100644
--- a/Lib/_pyio.py
+++ b/Lib/_pyio.py
@@ -1511,6 +1511,7 @@ class TextIOWrapper(TextIOBase):
self._decoded_chars_used = 0 # offset into _decoded_chars for read()
self._snapshot = None # info for reconstructing decoder state
self._seekable = self._telling = self.buffer.seekable()
+ self._b2cratio = 0.0
if self._seekable and self.writable():
position = self.buffer.tell()
@@ -1678,7 +1679,12 @@ class TextIOWrapper(TextIOBase):
# Read a chunk, decode it, and put the result in self._decoded_chars.
input_chunk = self.buffer.read1(self._CHUNK_SIZE)
eof = not input_chunk
- self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
+ decoded_chars = self._decoder.decode(input_chunk, eof)
+ self._set_decoded_chars(decoded_chars)
+ if decoded_chars:
+ self._b2cratio = len(input_chunk) / len(self._decoded_chars)
+ else:
+ self._b2cratio = 0.0
if self._telling:
# At the snapshot point, len(dec_buffer) bytes before the read,
@@ -1732,20 +1738,56 @@ class TextIOWrapper(TextIOBase):
# forward until it gives us enough decoded characters.
saved_state = decoder.getstate()
try:
+ # Fast search for an acceptable start point, close to our
+ # current pos.
+ # Rationale: calling decoder.decode() has a large overhead
+ # regardless of chunk size; we want the number of such calls to
+ # be O(1) in most situations (common decoders, non-crazy input).
+ # Actually, it will be exactly 1 for fixed-size codecs (all
+ # 8-bit codecs, also UTF-16 and UTF-32).
+ skip_bytes = int(self._b2cratio * chars_to_skip)
+ skip_back = 1
+ assert skip_bytes <= len(next_input)
+ while skip_bytes > 0:
+ decoder.setstate((b'', dec_flags))
+ # Decode up to temptative start point
+ n = len(decoder.decode(next_input[:skip_bytes]))
+ if n <= chars_to_skip:
+ b, d = decoder.getstate()
+ if not b:
+ # Before pos and no bytes buffered in decoder => OK
+ dec_flags = d
+ chars_to_skip -= n
+ break
+ # Skip back by buffered amount and reset heuristic
+ skip_bytes -= len(b)
+ skip_back = 1
+ else:
+ # We're too far ahead, skip back a bit
+ skip_bytes -= skip_back
+ skip_back = skip_back * 2
+ else:
+ skip_bytes = 0
+ decoder.setstate((b'', dec_flags))
+
# Note our initial start point.
- decoder.setstate((b'', dec_flags))
- start_pos = position
- start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
- need_eof = 0
+ start_pos = position + skip_bytes
+ start_flags = dec_flags
+ if chars_to_skip == 0:
+ # We haven't moved from the start point.
+ return self._pack_cookie(start_pos, start_flags)
# Feed the decoder one byte at a time. As we go, note the
# nearest "safe start point" before the current location
# (a point where the decoder has nothing buffered, so seek()
# can safely start from there and advance to this location).
- next_byte = bytearray(1)
- for next_byte[0] in next_input:
+ bytes_fed = 0
+ need_eof = 0
+ # Chars decoded since `start_pos`
+ chars_decoded = 0
+ for i in range(skip_bytes, len(next_input)):
bytes_fed += 1
- chars_decoded += len(decoder.decode(next_byte))
+ chars_decoded += len(decoder.decode(next_input[i:i+1]))
dec_buffer, dec_flags = decoder.getstate()
if not dec_buffer and chars_decoded <= chars_to_skip:
# Decoder buffer is empty, so this is a safe start point.
diff --git a/Lib/abc.py b/Lib/abc.py
index a6c2dc4..40f88b9 100644
--- a/Lib/abc.py
+++ b/Lib/abc.py
@@ -133,11 +133,14 @@ class ABCMeta(type):
return cls
def register(cls, subclass):
- """Register a virtual subclass of an ABC."""
+ """Register a virtual subclass of an ABC.
+
+ Returns the subclass, to allow usage as a class decorator.
+ """
if not isinstance(subclass, type):
raise TypeError("Can only register classes")
if issubclass(subclass, cls):
- return # Already a subclass
+ return subclass # Already a subclass
# Subtle: test for cycles *after* testing for "already a subclass";
# this means we allow X.register(X) and interpret it as a no-op.
if issubclass(cls, subclass):
@@ -145,6 +148,7 @@ class ABCMeta(type):
raise RuntimeError("Refusing to create an inheritance cycle")
cls._abc_registry.add(subclass)
ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache
+ return subclass
def _dump_registry(cls, file=None):
"""Debug helper to print the ABC registry."""
diff --git a/Lib/argparse.py b/Lib/argparse.py
index 63561f7..0658472 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -71,6 +71,7 @@ __all__ = [
'ArgumentDefaultsHelpFormatter',
'RawDescriptionHelpFormatter',
'RawTextHelpFormatter',
+ 'MetavarTypeHelpFormatter',
'Namespace',
'Action',
'ONE_OR_MORE',
@@ -423,7 +424,8 @@ class HelpFormatter(object):
# produce all arg strings
elif not action.option_strings:
- part = self._format_args(action, action.dest)
+ default = self._get_default_metavar_for_positional(action)
+ part = self._format_args(action, default)
# if it's in a group, strip the outer []
if action in group_actions:
@@ -445,7 +447,7 @@ class HelpFormatter(object):
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
else:
- default = action.dest.upper()
+ default = self._get_default_metavar_for_optional(action)
args_string = self._format_args(action, default)
part = '%s %s' % (option_string, args_string)
@@ -531,7 +533,8 @@ class HelpFormatter(object):
def _format_action_invocation(self, action):
if not action.option_strings:
- metavar, = self._metavar_formatter(action, action.dest)(1)
+ default = self._get_default_metavar_for_positional(action)
+ metavar, = self._metavar_formatter(action, default)(1)
return metavar
else:
@@ -545,7 +548,7 @@ class HelpFormatter(object):
# if the Optional takes a value, format is:
# -s ARGS, --long ARGS
else:
- default = action.dest.upper()
+ default = self._get_default_metavar_for_optional(action)
args_string = self._format_args(action, default)
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
@@ -623,6 +626,12 @@ class HelpFormatter(object):
def _get_help_string(self, action):
return action.help
+ def _get_default_metavar_for_optional(self, action):
+ return action.dest.upper()
+
+ def _get_default_metavar_for_positional(self, action):
+ return action.dest
+
class RawDescriptionHelpFormatter(HelpFormatter):
"""Help message formatter which retains any formatting in descriptions.
@@ -663,6 +672,22 @@ class ArgumentDefaultsHelpFormatter(HelpFormatter):
return help
+class MetavarTypeHelpFormatter(HelpFormatter):
+ """Help message formatter which uses the argument 'type' as the default
+ metavar value (instead of the argument 'dest')
+
+ Only the name of this class is considered a public API. All the methods
+ provided by the class are considered an implementation detail.
+ """
+
+ def _get_default_metavar_for_optional(self, action):
+ return action.type.__name__
+
+ def _get_default_metavar_for_positional(self, action):
+ return action.type.__name__
+
+
+
# =====================
# Options and Arguments
# =====================
diff --git a/Lib/asynchat.py b/Lib/asynchat.py
index 6558512..2199d1b 100644
--- a/Lib/asynchat.py
+++ b/Lib/asynchat.py
@@ -75,7 +75,7 @@ class async_chat (asyncore.dispatcher):
# sign of an application bug that we don't want to pass silently
use_encoding = 0
- encoding = 'latin1'
+ encoding = 'latin-1'
def __init__ (self, sock=None, map=None):
# for string terminator matching
diff --git a/Lib/asyncore.py b/Lib/asyncore.py
index 5d7bdda..d647606 100644
--- a/Lib/asyncore.py
+++ b/Lib/asyncore.py
@@ -289,7 +289,7 @@ class dispatcher:
del map[fd]
self._fileno = None
- def create_socket(self, family, type):
+ def create_socket(self, family=socket.AF_INET, type=socket.SOCK_STREAM):
self.family_and_type = family, type
sock = socket.socket(family, type)
sock.setblocking(0)
diff --git a/Lib/bz2.py b/Lib/bz2.py
new file mode 100644
index 0000000..9506b4a
--- /dev/null
+++ b/Lib/bz2.py
@@ -0,0 +1,392 @@
+"""Interface to the libbzip2 compression library.
+
+This module provides a file interface, classes for incremental
+(de)compression, and functions for one-shot (de)compression.
+"""
+
+__all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "compress",
+ "decompress"]
+
+__author__ = "Nadeem Vawda <nadeem.vawda@gmail.com>"
+
+import io
+import threading
+import warnings
+
+from _bz2 import BZ2Compressor, BZ2Decompressor
+
+
+_MODE_CLOSED = 0
+_MODE_READ = 1
+_MODE_READ_EOF = 2
+_MODE_WRITE = 3
+
+_BUFFER_SIZE = 8192
+
+
+class BZ2File(io.BufferedIOBase):
+
+ """A file object providing transparent bzip2 (de)compression.
+
+ A BZ2File can act as a wrapper for an existing file object, or refer
+ directly to a named file on disk.
+
+ Note that BZ2File provides a *binary* file interface - data read is
+ returned as bytes, and data to be written should be given as bytes.
+ """
+
+ def __init__(self, filename=None, mode="r", buffering=None,
+ compresslevel=9, fileobj=None):
+ """Open a bzip2-compressed file.
+
+ If filename is given, open the named file. Otherwise, operate on
+ the file object given by fileobj. Exactly one of these two
+ parameters should be provided.
+
+ mode can be 'r' for reading (default), or 'w' for writing.
+
+ buffering is ignored. Its use is deprecated.
+
+ If mode is 'w', compresslevel can be a number between 1 and 9
+ specifying the level of compression: 1 produces the least
+ compression, and 9 (default) produces the most compression.
+ """
+ # This lock must be recursive, so that BufferedIOBase's
+ # readline(), readlines() and writelines() don't deadlock.
+ self._lock = threading.RLock()
+ self._fp = None
+ self._closefp = False
+ self._mode = _MODE_CLOSED
+ self._pos = 0
+ self._size = -1
+
+ if buffering is not None:
+ warnings.warn("Use of 'buffering' argument is deprecated",
+ DeprecationWarning)
+
+ if not (1 <= compresslevel <= 9):
+ raise ValueError("compresslevel must be between 1 and 9")
+
+ if mode in ("", "r", "rb"):
+ mode = "rb"
+ mode_code = _MODE_READ
+ self._decompressor = BZ2Decompressor()
+ self._buffer = None
+ elif mode in ("w", "wb"):
+ mode = "wb"
+ mode_code = _MODE_WRITE
+ self._compressor = BZ2Compressor()
+ else:
+ raise ValueError("Invalid mode: {!r}".format(mode))
+
+ if filename is not None and fileobj is None:
+ self._fp = open(filename, mode)
+ self._closefp = True
+ self._mode = mode_code
+ elif fileobj is not None and filename is None:
+ self._fp = fileobj
+ self._mode = mode_code
+ else:
+ raise ValueError("Must give exactly one of filename and fileobj")
+
+ def close(self):
+ """Flush and close the file.
+
+ May be called more than once without error. Once the file is
+ closed, any other operation on it will raise a ValueError.
+ """
+ with self._lock:
+ if self._mode == _MODE_CLOSED:
+ return
+ try:
+ if self._mode in (_MODE_READ, _MODE_READ_EOF):
+ self._decompressor = None
+ elif self._mode == _MODE_WRITE:
+ self._fp.write(self._compressor.flush())
+ self._compressor = None
+ finally:
+ try:
+ if self._closefp:
+ self._fp.close()
+ finally:
+ self._fp = None
+ self._closefp = False
+ self._mode = _MODE_CLOSED
+ self._buffer = None
+
+ @property
+ def closed(self):
+ """True if this file is closed."""
+ return self._mode == _MODE_CLOSED
+
+ def fileno(self):
+ """Return the file descriptor for the underlying file."""
+ return self._fp.fileno()
+
+ def seekable(self):
+ """Return whether the file supports seeking."""
+ return self.readable()
+
+ def readable(self):
+ """Return whether the file was opened for reading."""
+ return self._mode in (_MODE_READ, _MODE_READ_EOF)
+
+ def writable(self):
+ """Return whether the file was opened for writing."""
+ return self._mode == _MODE_WRITE
+
+ # Mode-checking helper functions.
+
+ def _check_not_closed(self):
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ def _check_can_read(self):
+ if not self.readable():
+ self._check_not_closed()
+ raise io.UnsupportedOperation("File not open for reading")
+
+ def _check_can_write(self):
+ if not self.writable():
+ self._check_not_closed()
+ raise io.UnsupportedOperation("File not open for writing")
+
+ def _check_can_seek(self):
+ if not self.seekable():
+ self._check_not_closed()
+ raise io.UnsupportedOperation("Seeking is only supported "
+ "on files opening for reading")
+
+ # Fill the readahead buffer if it is empty. Returns False on EOF.
+ def _fill_buffer(self):
+ if self._buffer:
+ return True
+ if self._decompressor.eof:
+ self._mode = _MODE_READ_EOF
+ self._size = self._pos
+ return False
+ rawblock = self._fp.read(_BUFFER_SIZE)
+ if not rawblock:
+ raise EOFError("Compressed file ended before the "
+ "end-of-stream marker was reached")
+ self._buffer = self._decompressor.decompress(rawblock)
+ return True
+
+ # Read data until EOF.
+ # If return_data is false, consume the data without returning it.
+ def _read_all(self, return_data=True):
+ blocks = []
+ while self._fill_buffer():
+ if return_data:
+ blocks.append(self._buffer)
+ self._pos += len(self._buffer)
+ self._buffer = None
+ if return_data:
+ return b"".join(blocks)
+
+ # Read a block of up to n bytes.
+ # If return_data is false, consume the data without returning it.
+ def _read_block(self, n, return_data=True):
+ blocks = []
+ while n > 0 and self._fill_buffer():
+ if n < len(self._buffer):
+ data = self._buffer[:n]
+ self._buffer = self._buffer[n:]
+ else:
+ data = self._buffer
+ self._buffer = None
+ if return_data:
+ blocks.append(data)
+ self._pos += len(data)
+ n -= len(data)
+ if return_data:
+ return b"".join(blocks)
+
+ def peek(self, n=0):
+ """Return buffered data without advancing the file position.
+
+ Always returns at least one byte of data, unless at EOF.
+ The exact number of bytes returned is unspecified.
+ """
+ with self._lock:
+ self._check_can_read()
+ if self._mode == _MODE_READ_EOF or not self._fill_buffer():
+ return b""
+ return self._buffer
+
+ def read(self, size=-1):
+ """Read up to size uncompressed bytes from the file.
+
+ If size is negative or omitted, read until EOF is reached.
+ Returns b'' if the file is already at EOF.
+ """
+ with self._lock:
+ self._check_can_read()
+ if self._mode == _MODE_READ_EOF or size == 0:
+ return b""
+ elif size < 0:
+ return self._read_all()
+ else:
+ return self._read_block(size)
+
+ def read1(self, size=-1):
+ """Read up to size uncompressed bytes with at most one read
+ from the underlying stream.
+
+ Returns b'' if the file is at EOF.
+ """
+ with self._lock:
+ self._check_can_read()
+ if (size == 0 or self._mode == _MODE_READ_EOF or
+ not self._fill_buffer()):
+ return b""
+ if 0 < size < len(self._buffer):
+ data = self._buffer[:size]
+ self._buffer = self._buffer[size:]
+ else:
+ data = self._buffer
+ self._buffer = None
+ self._pos += len(data)
+ return data
+
+ def readinto(self, b):
+ """Read up to len(b) bytes into b.
+
+ Returns the number of bytes read (0 for EOF).
+ """
+ with self._lock:
+ return io.BufferedIOBase.readinto(self, b)
+
+ def readline(self, size=-1):
+ """Read a line of uncompressed bytes from the file.
+
+ The terminating newline (if present) is retained. If size is
+ non-negative, no more than size bytes will be read (in which
+ case the line may be incomplete). Returns b'' if already at EOF.
+ """
+ if not hasattr(size, "__index__"):
+ raise TypeError("Integer argument expected")
+ size = size.__index__()
+ with self._lock:
+ return io.BufferedIOBase.readline(self, size)
+
+ def readlines(self, size=-1):
+ """Read a list of lines of uncompressed bytes from the file.
+
+ size can be specified to control the number of lines read: no
+ further lines will be read once the total size of the lines read
+ so far equals or exceeds size.
+ """
+ if not hasattr(size, "__index__"):
+ raise TypeError("Integer argument expected")
+ size = size.__index__()
+ with self._lock:
+ return io.BufferedIOBase.readlines(self, size)
+
+ def write(self, data):
+ """Write a byte string to the file.
+
+ Returns the number of uncompressed bytes written, which is
+ always len(data). Note that due to buffering, the file on disk
+ may not reflect the data written until close() is called.
+ """
+ with self._lock:
+ self._check_can_write()
+ compressed = self._compressor.compress(data)
+ self._fp.write(compressed)
+ self._pos += len(data)
+ return len(data)
+
+ def writelines(self, seq):
+ """Write a sequence of byte strings to the file.
+
+ Returns the number of uncompressed bytes written.
+ seq can be any iterable yielding byte strings.
+
+ Line separators are not added between the written byte strings.
+ """
+ with self._lock:
+ return io.BufferedIOBase.writelines(self, seq)
+
+ # Rewind the file to the beginning of the data stream.
+ def _rewind(self):
+ self._fp.seek(0, 0)
+ self._mode = _MODE_READ
+ self._pos = 0
+ self._decompressor = BZ2Decompressor()
+ self._buffer = None
+
+ def seek(self, offset, whence=0):
+ """Change the file position.
+
+ The new position is specified by offset, relative to the
+ position indicated by whence. Values for whence are:
+
+ 0: start of stream (default); offset must not be negative
+ 1: current stream position
+ 2: end of stream; offset must not be positive
+
+ Returns the new file position.
+
+ Note that seeking is emulated, so depending on the parameters,
+ this operation may be extremely slow.
+ """
+ with self._lock:
+ self._check_can_seek()
+
+ # Recalculate offset as an absolute file position.
+ if whence == 0:
+ pass
+ elif whence == 1:
+ offset = self._pos + offset
+ elif whence == 2:
+ # Seeking relative to EOF - we need to know the file's size.
+ if self._size < 0:
+ self._read_all(return_data=False)
+ offset = self._size + offset
+ else:
+ raise ValueError("Invalid value for whence: {}".format(whence))
+
+ # Make it so that offset is the number of bytes to skip forward.
+ if offset < self._pos:
+ self._rewind()
+ else:
+ offset -= self._pos
+
+ # Read and discard data until we reach the desired position.
+ if self._mode != _MODE_READ_EOF:
+ self._read_block(offset, return_data=False)
+
+ return self._pos
+
+ def tell(self):
+ """Return the current file position."""
+ with self._lock:
+ self._check_not_closed()
+ return self._pos
+
+
+def compress(data, compresslevel=9):
+ """Compress a block of data.
+
+ compresslevel, if given, must be a number between 1 and 9.
+
+ For incremental compression, use a BZ2Compressor object instead.
+ """
+ comp = BZ2Compressor(compresslevel)
+ return comp.compress(data) + comp.flush()
+
+
+def decompress(data):
+ """Decompress a block of data.
+
+ For incremental decompression, use a BZ2Decompressor object instead.
+ """
+ if len(data) == 0:
+ return b""
+ decomp = BZ2Decompressor()
+ result = decomp.decompress(data)
+ if not decomp.eof:
+ raise ValueError("Compressed data ended before the "
+ "end-of-stream marker was reached")
+ return result
diff --git a/Lib/collections.py b/Lib/collections/__init__.py
index 98c4325..122dfb6 100644
--- a/Lib/collections.py
+++ b/Lib/collections/__init__.py
@@ -1,10 +1,11 @@
__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList',
'UserString', 'Counter', 'OrderedDict']
-# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py.
-# They should however be considered an integral part of collections.py.
-from _abcoll import *
-import _abcoll
-__all__ += _abcoll.__all__
+
+# For backwards compatibility, continue to make the collections ABCs
+# available through the collections module.
+from collections.abc import *
+import collections.abc
+__all__ += collections.abc.__all__
from _collections import deque, defaultdict
from operator import itemgetter as _itemgetter
@@ -232,10 +233,62 @@ class OrderedDict(dict):
### namedtuple
################################################################################
+_class_template = '''\
+from builtins import property as _property, tuple as _tuple
+from operator import itemgetter as _itemgetter
+from collections import OrderedDict
+
+class {typename}(tuple):
+ '{typename}({arg_list})'
+
+ __slots__ = ()
+
+ _fields = {field_names!r}
+
+ def __new__(_cls, {arg_list}):
+ 'Create new instance of {typename}({arg_list})'
+ return _tuple.__new__(_cls, ({arg_list}))
+
+ @classmethod
+ def _make(cls, iterable, new=tuple.__new__, len=len):
+ 'Make a new {typename} object from a sequence or iterable'
+ result = new(cls, iterable)
+ if len(result) != {num_fields:d}:
+ raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result))
+ return result
+
+ def __repr__(self):
+ 'Return a nicely formatted representation string'
+ return self.__class__.__name__ + '({repr_fmt})' % self
+
+ def _asdict(self):
+ 'Return a new OrderedDict which maps field names to their values'
+ return OrderedDict(zip(self._fields, self))
+
+ def _replace(_self, **kwds):
+ 'Return a new {typename} object replacing specified fields with new values'
+ result = _self._make(map(kwds.pop, {field_names!r}, _self))
+ if kwds:
+ raise ValueError('Got unexpected field names: %r' % list(kwds))
+ return result
+
+ def __getnewargs__(self):
+ 'Return self as a plain tuple. Used by copy and pickle.'
+ return tuple(self)
+
+{field_defs}
+'''
+
+_repr_template = '{name}=%r'
+
+_field_template = '''\
+ {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}')
+'''
+
def namedtuple(typename, field_names, verbose=False, rename=False):
"""Returns a new subclass of tuple with named fields.
- >>> Point = namedtuple('Point', 'x y')
+ >>> Point = namedtuple('Point', ['x', 'y'])
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
@@ -260,79 +313,55 @@ def namedtuple(typename, field_names, verbose=False, rename=False):
# generating informative error messages and preventing template injection attacks.
if isinstance(field_names, str):
field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
- field_names = tuple(map(str, field_names))
+ field_names = list(map(str, field_names))
if rename:
- names = list(field_names)
seen = set()
- for i, name in enumerate(names):
- if (not all(c.isalnum() or c=='_' for c in name) or _iskeyword(name)
- or not name or name[0].isdigit() or name.startswith('_')
+ for index, name in enumerate(field_names):
+ if (not all(c.isalnum() or c=='_' for c in name)
+ or _iskeyword(name)
+ or not name
+ or name[0].isdigit()
+ or name.startswith('_')
or name in seen):
- names[i] = '_%d' % i
+ field_names[index] = '_%d' % index
seen.add(name)
- field_names = tuple(names)
- for name in (typename,) + field_names:
+ for name in [typename] + field_names:
if not all(c.isalnum() or c=='_' for c in name):
raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a keyword: %r' % name)
if name[0].isdigit():
raise ValueError('Type names and field names cannot start with a number: %r' % name)
- seen_names = set()
+ seen = set()
for name in field_names:
if name.startswith('_') and not rename:
raise ValueError('Field names cannot start with an underscore: %r' % name)
- if name in seen_names:
+ if name in seen:
raise ValueError('Encountered duplicate field name: %r' % name)
- seen_names.add(name)
-
- # Create and fill-in the class template
- numfields = len(field_names)
- argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
- reprtxt = ', '.join('%s=%%r' % name for name in field_names)
- template = '''class %(typename)s(tuple):
- '%(typename)s(%(argtxt)s)' \n
- __slots__ = () \n
- _fields = %(field_names)r \n
- def __new__(_cls, %(argtxt)s):
- 'Create new instance of %(typename)s(%(argtxt)s)'
- return _tuple.__new__(_cls, (%(argtxt)s)) \n
- @classmethod
- def _make(cls, iterable, new=tuple.__new__, len=len):
- 'Make a new %(typename)s object from a sequence or iterable'
- result = new(cls, iterable)
- if len(result) != %(numfields)d:
- raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
- return result \n
- def __repr__(self):
- 'Return a nicely formatted representation string'
- return self.__class__.__name__ + '(%(reprtxt)s)' %% self \n
- def _asdict(self):
- 'Return a new OrderedDict which maps field names to their values'
- return OrderedDict(zip(self._fields, self)) \n
- def _replace(_self, **kwds):
- 'Return a new %(typename)s object replacing specified fields with new values'
- result = _self._make(map(kwds.pop, %(field_names)r, _self))
- if kwds:
- raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
- return result \n
- def __getnewargs__(self):
- 'Return self as a plain tuple. Used by copy and pickle.'
- return tuple(self) \n\n''' % locals()
- for i, name in enumerate(field_names):
- template += " %s = _property(_itemgetter(%d), doc='Alias for field number %d')\n" % (name, i, i)
- if verbose:
- print(template)
+ seen.add(name)
+
+ # Fill-in the class template
+ class_definition = _class_template.format(
+ typename = typename,
+ field_names = tuple(field_names),
+ num_fields = len(field_names),
+ arg_list = repr(tuple(field_names)).replace("'", "")[1:-1],
+ repr_fmt = ', '.join(_repr_template.format(name=name) for name in field_names),
+ field_defs = '\n'.join(_field_template.format(index=index, name=name)
+ for index, name in enumerate(field_names))
+ )
# Execute the template string in a temporary namespace and
# support tracing utilities by setting a value for frame.f_globals['__name__']
- namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
- OrderedDict=OrderedDict, _property=property, _tuple=tuple)
+ namespace = dict(__name__='namedtuple_%s' % typename)
try:
- exec(template, namespace)
+ exec(class_definition, namespace)
except SyntaxError as e:
- raise SyntaxError(e.msg + ':\n\n' + template)
+ raise SyntaxError(e.msg + ':\n\n' + class_definition)
result = namespace[typename]
+ result._source = class_definition
+ if verbose:
+ print(result._source)
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in enviroments where
@@ -632,10 +661,10 @@ class Counter(dict):
########################################################################
-### ChainMap (helper for configparser)
+### ChainMap (helper for configparser and string.Template)
########################################################################
-class _ChainMap(MutableMapping):
+class ChainMap(MutableMapping):
''' A ChainMap groups multiple dicts (or other mappings) together
to create a single, updateable view.
@@ -678,6 +707,9 @@ class _ChainMap(MutableMapping):
def __contains__(self, key):
return any(key in m for m in self.maps)
+ def __bool__(self):
+ return any(self.maps)
+
@_recursive_repr()
def __repr__(self):
return '{0.__class__.__name__}({1})'.format(
@@ -843,6 +875,8 @@ class UserList(MutableSequence):
def insert(self, i, item): self.data.insert(i, item)
def pop(self, i=-1): return self.data.pop(i)
def remove(self, item): self.data.remove(item)
+ def clear(self): self.data.clear()
+ def copy(self): return self.data.copy()
def count(self, item): return self.data.count(item)
def index(self, item, *args): return self.data.index(item, *args)
def reverse(self): self.data.reverse()
diff --git a/Lib/_abcoll.py b/Lib/collections/abc.py
index 2417d18..7fbe84d 100644
--- a/Lib/_abcoll.py
+++ b/Lib/collections/abc.py
@@ -3,9 +3,7 @@
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
-DON'T USE THIS MODULE DIRECTLY! The classes here should be imported
-via collections; they are defined here only to alleviate certain
-bootstrapping issues. Unit tests are in test_collections.
+Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
@@ -48,6 +46,8 @@ dict_proxy = type(type.__dict__)
class Hashable(metaclass=ABCMeta):
+ __slots__ = ()
+
@abstractmethod
def __hash__(self):
return 0
@@ -65,6 +65,8 @@ class Hashable(metaclass=ABCMeta):
class Iterable(metaclass=ABCMeta):
+ __slots__ = ()
+
@abstractmethod
def __iter__(self):
while False:
@@ -80,6 +82,8 @@ class Iterable(metaclass=ABCMeta):
class Iterator(Iterable):
+ __slots__ = ()
+
@abstractmethod
def __next__(self):
raise StopIteration
@@ -111,6 +115,8 @@ Iterator.register(zip_iterator)
class Sized(metaclass=ABCMeta):
+ __slots__ = ()
+
@abstractmethod
def __len__(self):
return 0
@@ -125,6 +131,8 @@ class Sized(metaclass=ABCMeta):
class Container(metaclass=ABCMeta):
+ __slots__ = ()
+
@abstractmethod
def __contains__(self, x):
return False
@@ -139,6 +147,8 @@ class Container(metaclass=ABCMeta):
class Callable(metaclass=ABCMeta):
+ __slots__ = ()
+
@abstractmethod
def __call__(self, *args, **kwds):
return False
@@ -166,6 +176,8 @@ class Set(Sized, Iterable, Container):
then the other operations will automatically follow suit.
"""
+ __slots__ = ()
+
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
@@ -277,6 +289,8 @@ Set.register(frozenset)
class MutableSet(Set):
+ __slots__ = ()
+
@abstractmethod
def add(self, value):
"""Add an element."""
@@ -350,6 +364,8 @@ MutableSet.register(set)
class Mapping(Sized, Iterable, Container):
+ __slots__ = ()
+
@abstractmethod
def __getitem__(self, key):
raise KeyError
@@ -453,6 +469,8 @@ ValuesView.register(dict_values)
class MutableMapping(Mapping):
+ __slots__ = ()
+
@abstractmethod
def __setitem__(self, key, value):
raise KeyError
@@ -532,6 +550,8 @@ class Sequence(Sized, Iterable, Container):
__getitem__, and __len__.
"""
+ __slots__ = ()
+
@abstractmethod
def __getitem__(self, index):
raise IndexError
@@ -577,12 +597,16 @@ class ByteString(Sequence):
XXX Should add all their methods.
"""
+ __slots__ = ()
+
ByteString.register(bytes)
ByteString.register(bytearray)
class MutableSequence(Sequence):
+ __slots__ = ()
+
@abstractmethod
def __setitem__(self, index, value):
raise IndexError
@@ -598,6 +622,13 @@ class MutableSequence(Sequence):
def append(self, value):
self.insert(len(self), value)
+ def clear(self):
+ try:
+ while True:
+ self.pop()
+ except IndexError:
+ pass
+
def reverse(self):
n = len(self)
for i in range(n//2):
diff --git a/Lib/compileall.py b/Lib/compileall.py
index d79a1bb..743be27 100644
--- a/Lib/compileall.py
+++ b/Lib/compileall.py
@@ -35,11 +35,11 @@ def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None,
optimize: optimization level or -1 for level of the interpreter
"""
if not quiet:
- print('Listing', dir, '...')
+ print('Listing {!r}...'.format(dir))
try:
names = os.listdir(dir)
except os.error:
- print("Can't list", dir)
+ print("Can't list {!r}".format(dir))
names = []
names.sort()
success = 1
@@ -109,13 +109,13 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False,
except IOError:
pass
if not quiet:
- print('Compiling', fullname, '...')
+ print('Compiling {!r}...'.format(fullname))
try:
ok = py_compile.compile(fullname, cfile, dfile, True,
optimize=optimize)
except py_compile.PyCompileError as err:
if quiet:
- print('*** Error compiling', fullname, '...')
+ print('*** Error compiling {!r}...'.format(fullname))
else:
print('*** ', end='')
# escape non-printable characters in msg
@@ -126,7 +126,7 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False,
success = 0
except (SyntaxError, UnicodeError, IOError) as e:
if quiet:
- print('*** Error compiling', fullname, '...')
+ print('*** Error compiling {!r}...'.format(fullname))
else:
print('*** ', end='')
print(e.__class__.__name__ + ':', e)
diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py
index 79b91d4..6cfded3 100644
--- a/Lib/concurrent/futures/_base.py
+++ b/Lib/concurrent/futures/_base.py
@@ -536,15 +536,19 @@ class Executor(object):
fs = [self.submit(fn, *args) for args in zip(*iterables)]
- try:
- for future in fs:
- if timeout is None:
- yield future.result()
- else:
- yield future.result(end_time - time.time())
- finally:
- for future in fs:
- future.cancel()
+ # Yield must be hidden in closure so that the futures are submitted
+ # before the first iterator value is required.
+ def result_iterator():
+ try:
+ for future in fs:
+ if timeout is None:
+ yield future.result()
+ else:
+ yield future.result(end_time - time.time())
+ finally:
+ for future in fs:
+ future.cancel()
+ return result_iterator()
def shutdown(self, wait=True):
"""Clean-up the resources associated with the Executor.
diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py
index 36cd411..1f4b242 100644
--- a/Lib/concurrent/futures/process.py
+++ b/Lib/concurrent/futures/process.py
@@ -49,6 +49,7 @@ import atexit
from concurrent.futures import _base
import queue
import multiprocessing
+from multiprocessing.queues import SimpleQueue
import threading
import weakref
@@ -204,7 +205,7 @@ def _queue_manangement_worker(executor_reference,
work_ids_queue,
call_queue)
- result_item = result_queue.get(block=True)
+ result_item = result_queue.get()
if result_item is not None:
work_item = pending_work_items[result_item.work_id]
del pending_work_items[result_item.work_id]
@@ -284,7 +285,7 @@ class ProcessPoolExecutor(_base.Executor):
# because futures in the call queue cannot be cancelled.
self._call_queue = multiprocessing.Queue(self._max_workers +
EXTRA_QUEUED_CALLS)
- self._result_queue = multiprocessing.Queue()
+ self._result_queue = SimpleQueue()
self._work_ids = queue.Queue()
self._queue_management_thread = None
self._processes = set()
diff --git a/Lib/configparser.py b/Lib/configparser.py
index 1bfdac8..15fc266 100644
--- a/Lib/configparser.py
+++ b/Lib/configparser.py
@@ -119,7 +119,8 @@ ConfigParser -- responsible for parsing a list of
between keys and values are surrounded by spaces.
"""
-from collections import MutableMapping, OrderedDict as _default_dict, _ChainMap
+from collections.abc import MutableMapping
+from collections import OrderedDict as _default_dict, ChainMap as _ChainMap
import functools
import io
import itertools
diff --git a/Lib/crypt.py b/Lib/crypt.py
new file mode 100644
index 0000000..e65b0cb
--- /dev/null
+++ b/Lib/crypt.py
@@ -0,0 +1,62 @@
+"""Wrapper to the POSIX crypt library call and associated functionality."""
+
+import _crypt
+import string
+from random import choice
+from collections import namedtuple
+
+
+_saltchars = string.ascii_letters + string.digits + './'
+
+
+class _Method(namedtuple('_Method', 'name ident salt_chars total_size')):
+
+ """Class representing a salt method per the Modular Crypt Format or the
+ legacy 2-character crypt method."""
+
+ def __repr__(self):
+ return '<crypt.METHOD_{}>'.format(self.name)
+
+
+
+def mksalt(method=None):
+ """Generate a salt for the specified method.
+
+ If not specified, the strongest available method will be used.
+
+ """
+ if method is None:
+ method = methods[0]
+ s = '${}$'.format(method.ident) if method.ident else ''
+ s += ''.join(choice(_saltchars) for _ in range(method.salt_chars))
+ return s
+
+
+def crypt(word, salt=None):
+ """Return a string representing the one-way hash of a password, with a salt
+ prepended.
+
+ If ``salt`` is not specified or is ``None``, the strongest
+ available method will be selected and a salt generated. Otherwise,
+ ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as
+ returned by ``crypt.mksalt()``.
+
+ """
+ if salt is None or isinstance(salt, _Method):
+ salt = mksalt(salt)
+ return _crypt.crypt(word, salt)
+
+
+# available salting/crypto methods
+METHOD_CRYPT = _Method('CRYPT', None, 2, 13)
+METHOD_MD5 = _Method('MD5', '1', 8, 34)
+METHOD_SHA256 = _Method('SHA256', '5', 16, 63)
+METHOD_SHA512 = _Method('SHA512', '6', 16, 106)
+
+methods = []
+for _method in (METHOD_SHA512, METHOD_SHA256, METHOD_MD5):
+ _result = crypt('', _method)
+ if _result and len(_result) == _method.total_size:
+ methods.append(_method)
+methods.append(METHOD_CRYPT)
+del _result, _method
diff --git a/Lib/ctypes/test/test_memfunctions.py b/Lib/ctypes/test/test_memfunctions.py
index aa2113b..aec4aaa 100644
--- a/Lib/ctypes/test/test_memfunctions.py
+++ b/Lib/ctypes/test/test_memfunctions.py
@@ -1,4 +1,5 @@
import sys
+from test import support
import unittest
from ctypes import *
@@ -49,6 +50,7 @@ class MemFunctionsTest(unittest.TestCase):
self.assertEqual(cast(a, POINTER(c_byte))[:7:7],
[97])
+ @support.refcount_test
def test_string_at(self):
s = string_at(b"foo bar")
# XXX The following may be wrong, depending on how Python
diff --git a/Lib/ctypes/test/test_python_api.py b/Lib/ctypes/test/test_python_api.py
index 1f4c603..9de3980 100644
--- a/Lib/ctypes/test/test_python_api.py
+++ b/Lib/ctypes/test/test_python_api.py
@@ -1,5 +1,6 @@
from ctypes import *
import unittest, sys
+from test import support
from ctypes.test import is_resource_enabled
################################################################
@@ -25,6 +26,7 @@ class PythonAPITestCase(unittest.TestCase):
self.assertEqual(PyBytes_FromStringAndSize(b"abcdefghi", 3), b"abc")
+ @support.refcount_test
def test_PyString_FromString(self):
pythonapi.PyBytes_FromString.restype = py_object
pythonapi.PyBytes_FromString.argtypes = (c_char_p,)
@@ -56,6 +58,7 @@ class PythonAPITestCase(unittest.TestCase):
del res
self.assertEqual(grc(42), ref42)
+ @support.refcount_test
def test_PyObj_FromPtr(self):
s = "abc def ghi jkl"
ref = grc(s)
diff --git a/Lib/ctypes/test/test_refcounts.py b/Lib/ctypes/test/test_refcounts.py
index 35a81aa..5613e7a 100644
--- a/Lib/ctypes/test/test_refcounts.py
+++ b/Lib/ctypes/test/test_refcounts.py
@@ -1,4 +1,5 @@
import unittest
+from test import support
import ctypes
import gc
@@ -10,6 +11,7 @@ dll = ctypes.CDLL(_ctypes_test.__file__)
class RefcountTestCase(unittest.TestCase):
+ @support.refcount_test
def test_1(self):
from sys import getrefcount as grc
@@ -34,6 +36,7 @@ class RefcountTestCase(unittest.TestCase):
self.assertEqual(grc(callback), 2)
+ @support.refcount_test
def test_refcount(self):
from sys import getrefcount as grc
def func(*args):
diff --git a/Lib/ctypes/test/test_stringptr.py b/Lib/ctypes/test/test_stringptr.py
index 3d25fa5..95cd161 100644
--- a/Lib/ctypes/test/test_stringptr.py
+++ b/Lib/ctypes/test/test_stringptr.py
@@ -1,4 +1,5 @@
import unittest
+from test import support
from ctypes import *
import _ctypes_test
@@ -7,6 +8,7 @@ lib = CDLL(_ctypes_test.__file__)
class StringPtrTestCase(unittest.TestCase):
+ @support.refcount_test
def test__POINTER_c_char(self):
class X(Structure):
_fields_ = [("str", POINTER(c_char))]
diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py
index 49b6d51..bd7d9dd 100644
--- a/Lib/distutils/__init__.py
+++ b/Lib/distutils/__init__.py
@@ -15,5 +15,5 @@ __revision__ = "$Id$"
# Updated automatically by the Python release process.
#
#--start constants--
-__version__ = "3.2"
+__version__ = "3.3a0"
#--end constants--
diff --git a/Lib/distutils/command/bdist_wininst.py b/Lib/distutils/command/bdist_wininst.py
index b2e2fc6..b886055 100644
--- a/Lib/distutils/command/bdist_wininst.py
+++ b/Lib/distutils/command/bdist_wininst.py
@@ -263,11 +263,11 @@ class bdist_wininst(Command):
cfgdata = cfgdata + b"\0"
if self.pre_install_script:
# We need to normalize newlines, so we open in text mode and
- # convert back to bytes. "latin1" simply avoids any possible
+ # convert back to bytes. "latin-1" simply avoids any possible
# failures.
with open(self.pre_install_script, "r",
- encoding="latin1") as script:
- script_data = script.read().encode("latin1")
+ encoding="latin-1") as script:
+ script_data = script.read().encode("latin-1")
cfgdata = cfgdata + script_data + b"\n\0"
else:
# empty pre-install script
diff --git a/Lib/distutils/tests/test_bdist_rpm.py b/Lib/distutils/tests/test_bdist_rpm.py
index 804fb13..030933f 100644
--- a/Lib/distutils/tests/test_bdist_rpm.py
+++ b/Lib/distutils/tests/test_bdist_rpm.py
@@ -28,6 +28,11 @@ class BuildRpmTestCase(support.TempdirManager,
unittest.TestCase):
def setUp(self):
+ try:
+ sys.executable.encode("UTF-8")
+ except UnicodeEncodeError:
+ raise unittest.SkipTest("sys.executable is not encodable to UTF-8")
+
super(BuildRpmTestCase, self).setUp()
self.old_location = os.getcwd()
self.old_sys_argv = sys.argv, sys.argv[:]
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 9eba4e0..c508a7c 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -1373,6 +1373,7 @@ class DocTestRunner:
# Note that the interactive output will go to *our*
# save_stdout, even if that's not the real sys.stdout; this
# allows us to write test cases for the set_trace behavior.
+ save_trace = sys.gettrace()
save_set_trace = pdb.set_trace
self.debugger = _OutputRedirectingPdb(save_stdout)
self.debugger.reset()
@@ -1392,6 +1393,7 @@ class DocTestRunner:
finally:
sys.stdout = save_stdout
pdb.set_trace = save_set_trace
+ sys.settrace(save_trace)
linecache.getlines = self.save_linecache_getlines
sys.displayhook = save_displayhook
if clear_globs:
diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py
index 41694f9..4b2f5c6 100644
--- a/Lib/email/_parseaddr.py
+++ b/Lib/email/_parseaddr.py
@@ -99,6 +99,14 @@ def parsedate_tz(data):
tss = '0'
elif len(tm) == 3:
[thh, tmm, tss] = tm
+ elif len(tm) == 1 and '.' in tm[0]:
+ # Some non-compliant MUAs use '.' to separate time elements.
+ tm = tm[0].split('.')
+ if len(tm) == 2:
+ [thh, tmm] = tm
+ tss = 0
+ elif len(tm) == 3:
+ [thh, tmm, tss] = tm
else:
return None
try:
diff --git a/Lib/email/parser.py b/Lib/email/parser.py
index 6caaff5..ef051fa 100644
--- a/Lib/email/parser.py
+++ b/Lib/email/parser.py
@@ -15,7 +15,7 @@ from email.message import Message
class Parser:
- def __init__(self, *args, **kws):
+ def __init__(self, _class=Message):
"""Parser of RFC 2822 and MIME email messages.
Creates an in-memory object tree representing the email message, which
@@ -31,27 +31,7 @@ class Parser:
must be created. This class must have a constructor that can take
zero arguments. Default is Message.Message.
"""
- if len(args) >= 1:
- if '_class' in kws:
- raise TypeError("Multiple values for keyword arg '_class'")
- kws['_class'] = args[0]
- if len(args) == 2:
- if 'strict' in kws:
- raise TypeError("Multiple values for keyword arg 'strict'")
- kws['strict'] = args[1]
- if len(args) > 2:
- raise TypeError('Too many arguments')
- if '_class' in kws:
- self._class = kws['_class']
- del kws['_class']
- else:
- self._class = Message
- if 'strict' in kws:
- warnings.warn("'strict' argument is deprecated (and ignored)",
- DeprecationWarning, 2)
- del kws['strict']
- if kws:
- raise TypeError('Unexpected keyword arguments')
+ self._class = _class
def parse(self, fp, headersonly=False):
"""Create a message structure from the data in a file.
diff --git a/Lib/email/test/__init__.py b/Lib/email/test/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/Lib/email/test/__init__.py
+++ /dev/null
diff --git a/Lib/email/utils.py b/Lib/email/utils.py
index ac4da37..82f7283 100644
--- a/Lib/email/utils.py
+++ b/Lib/email/utils.py
@@ -42,6 +42,7 @@ from quopri import decodestring as _qdecode
# Intrapackage imports
from email.encoders import _bencode, _qencode
+from email.charset import Charset
COMMASPACE = ', '
EMPTYSTRING = ''
@@ -56,21 +57,36 @@ escapesre = re.compile(r'[][\\()"]')
# Helpers
-def formataddr(pair):
+def formataddr(pair, charset='utf-8'):
"""The inverse of parseaddr(), this takes a 2-tuple of the form
(realname, email_address) and returns the string value suitable
for an RFC 2822 From, To or Cc header.
If the first element of pair is false, then the second element is
returned unmodified.
+
+ Optional charset if given is the character set that is used to encode
+ realname in case realname is not ASCII safe. Can be an instance of str or
+ a Charset-like object which has a header_encode method. Default is
+ 'utf-8'.
"""
name, address = pair
+ # The address MUST (per RFC) be ascii, so throw a UnicodeError if it isn't.
+ address.encode('ascii')
if name:
- quotes = ''
- if specialsre.search(name):
- quotes = '"'
- name = escapesre.sub(r'\\\g<0>', name)
- return '%s%s%s <%s>' % (quotes, name, quotes, address)
+ try:
+ name.encode('ascii')
+ except UnicodeEncodeError:
+ if isinstance(charset, str):
+ charset = Charset(charset)
+ encoded_name = charset.header_encode(name)
+ return "%s <%s>" % (encoded_name, address)
+ else:
+ quotes = ''
+ if specialsre.search(name):
+ quotes = '"'
+ name = escapesre.sub(r'\\\g<0>', name)
+ return '%s%s%s <%s>' % (quotes, name, quotes, address)
return address
diff --git a/Lib/ftplib.py b/Lib/ftplib.py
index 7c39887..e836b72 100644
--- a/Lib/ftplib.py
+++ b/Lib/ftplib.py
@@ -100,14 +100,15 @@ class FTP:
file = None
welcome = None
passiveserver = 1
- encoding = "latin1"
+ encoding = "latin-1"
# Initialization method (called by class instantiation).
# Initialize host to localhost, port to standard ftp port
# Optional arguments are host (for connect()),
# and user, passwd, acct (for login())
def __init__(self, host='', user='', passwd='', acct='',
- timeout=_GLOBAL_DEFAULT_TIMEOUT):
+ timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None):
+ self.source_address = source_address
self.timeout = timeout
if host:
self.connect(host)
@@ -128,10 +129,12 @@ class FTP:
if self.sock is not None:
self.close()
- def connect(self, host='', port=0, timeout=-999):
+ def connect(self, host='', port=0, timeout=-999, source_address=None):
'''Connect to host. Arguments are:
- host: hostname to connect to (string, default previous host)
- port: port to connect to (integer, default previous port)
+ - source_address: a 2-tuple (host, port) for the socket to bind
+ to as its source address before connecting.
'''
if host != '':
self.host = host
@@ -139,7 +142,10 @@ class FTP:
self.port = port
if timeout != -999:
self.timeout = timeout
- self.sock = socket.create_connection((self.host, self.port), self.timeout)
+ if source_address is not None:
+ self.source_address = source_address
+ self.sock = socket.create_connection((self.host, self.port), self.timeout,
+ source_address=self.source_address)
self.af = self.sock.family
self.file = self.sock.makefile('r', encoding=self.encoding)
self.welcome = self.getresp()
@@ -334,7 +340,8 @@ class FTP:
size = None
if self.passiveserver:
host, port = self.makepasv()
- conn = socket.create_connection((host, port), self.timeout)
+ conn = socket.create_connection((host, port), self.timeout,
+ source_address=self.source_address)
if rest is not None:
self.sendcmd("REST %s" % rest)
resp = self.sendcmd(cmd)
@@ -589,11 +596,11 @@ class FTP:
def close(self):
'''Close the connection without assuming anything about it.'''
- if self.file:
+ if self.file is not None:
self.file.close()
+ if self.sock is not None:
self.sock.close()
- self.file = self.sock = None
-
+ self.file = self.sock = None
try:
import ssl
@@ -637,7 +644,7 @@ else:
def __init__(self, host='', user='', passwd='', acct='', keyfile=None,
certfile=None, context=None,
- timeout=_GLOBAL_DEFAULT_TIMEOUT):
+ timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None):
if context is not None and keyfile is not None:
raise ValueError("context and keyfile arguments are mutually "
"exclusive")
@@ -648,7 +655,7 @@ else:
self.certfile = certfile
self.context = context
self._prot_p = False
- FTP.__init__(self, host, user, passwd, acct, timeout)
+ FTP.__init__(self, host, user, passwd, acct, timeout, source_address)
def login(self, user='', passwd='', acct='', secure=True):
if secure and not isinstance(self.sock, ssl.SSLSocket):
diff --git a/Lib/functools.py b/Lib/functools.py
index e92a2fc..098f6b6 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -97,7 +97,7 @@ def cmp_to_key(mycmp):
"""Convert a cmp= function into a key= function"""
class K(object):
__slots__ = ['obj']
- def __init__(self, obj, *args):
+ def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
@@ -115,6 +115,11 @@ def cmp_to_key(mycmp):
raise TypeError('hash not implemented')
return K
+try:
+ from _functools import cmp_to_key
+except ImportError:
+ pass
+
_CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
def lru_cache(maxsize=100):
@@ -141,7 +146,7 @@ def lru_cache(maxsize=100):
tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
hits = misses = 0
- kwd_mark = object() # separates positional and keyword args
+ kwd_mark = (object(),) # separates positional and keyword args
lock = Lock() # needed because ordereddicts aren't threadsafe
if maxsize is None:
@@ -152,7 +157,7 @@ def lru_cache(maxsize=100):
nonlocal hits, misses
key = args
if kwds:
- key += (kwd_mark,) + tuple(sorted(kwds.items()))
+ key += kwd_mark + tuple(sorted(kwds.items()))
try:
result = cache[key]
hits += 1
@@ -171,7 +176,7 @@ def lru_cache(maxsize=100):
nonlocal hits, misses
key = args
if kwds:
- key += (kwd_mark,) + tuple(sorted(kwds.items()))
+ key += kwd_mark + tuple(sorted(kwds.items()))
try:
with lock:
result = cache[key]
diff --git a/Lib/getopt.py b/Lib/getopt.py
index 980861d..3d6ecbd 100644
--- a/Lib/getopt.py
+++ b/Lib/getopt.py
@@ -19,7 +19,7 @@ option involved with the exception.
# Gerrit Holl <gerrit@nl.linux.org> moved the string-based exceptions
# to class-based exceptions.
#
-# Peter Åstrand <astrand@lysator.liu.se> added gnu_getopt().
+# Peter Ã…strand <astrand@lysator.liu.se> added gnu_getopt().
#
# TODO for gnu_getopt():
#
@@ -34,6 +34,11 @@ option involved with the exception.
__all__ = ["GetoptError","error","getopt","gnu_getopt"]
import os
+try:
+ from gettext import gettext as _
+except ImportError:
+ # Bootstrapping Python: gettext's dependencies not built yet
+ def _(s): return s
class GetoptError(Exception):
opt = ''
@@ -153,10 +158,10 @@ def do_longs(opts, opt, longopts, args):
if has_arg:
if optarg is None:
if not args:
- raise GetoptError('option --%s requires argument' % opt, opt)
+ raise GetoptError(_('option --%s requires argument') % opt, opt)
optarg, args = args[0], args[1:]
elif optarg is not None:
- raise GetoptError('option --%s must not have an argument' % opt, opt)
+ raise GetoptError(_('option --%s must not have an argument') % opt, opt)
opts.append(('--' + opt, optarg or ''))
return opts, args
@@ -166,7 +171,7 @@ def do_longs(opts, opt, longopts, args):
def long_has_args(opt, longopts):
possibilities = [o for o in longopts if o.startswith(opt)]
if not possibilities:
- raise GetoptError('option --%s not recognized' % opt, opt)
+ raise GetoptError(_('option --%s not recognized') % opt, opt)
# Is there an exact match?
if opt in possibilities:
return False, opt
@@ -176,7 +181,7 @@ def long_has_args(opt, longopts):
if len(possibilities) > 1:
# XXX since possibilities contains all valid continuations, might be
# nice to work them into the error msg
- raise GetoptError('option --%s not a unique prefix' % opt, opt)
+ raise GetoptError(_('option --%s not a unique prefix') % opt, opt)
assert len(possibilities) == 1
unique_match = possibilities[0]
has_arg = unique_match.endswith('=')
@@ -190,7 +195,7 @@ def do_shorts(opts, optstring, shortopts, args):
if short_has_arg(opt, shortopts):
if optstring == '':
if not args:
- raise GetoptError('option -%s requires argument' % opt,
+ raise GetoptError(_('option -%s requires argument') % opt,
opt)
optstring, args = args[0], args[1:]
optarg, optstring = optstring, ''
@@ -203,7 +208,7 @@ def short_has_arg(opt, shortopts):
for i in range(len(shortopts)):
if opt == shortopts[i] != ':':
return shortopts.startswith(':', i+1)
- raise GetoptError('option -%s not recognized' % opt, opt)
+ raise GetoptError(_('option -%s not recognized') % opt, opt)
if __name__ == '__main__':
import sys
diff --git a/Lib/getpass.py b/Lib/getpass.py
index ce04566..dc02bd1 100644
--- a/Lib/getpass.py
+++ b/Lib/getpass.py
@@ -62,7 +62,7 @@ def unix_getpass(prompt='Password: ', stream=None):
try:
old = termios.tcgetattr(fd) # a copy to save
new = old[:]
- new[3] &= ~(termios.ECHO|termios.ISIG) # 3 == 'lflags'
+ new[3] &= ~termios.ECHO # 3 == 'lflags'
tcsetattr_flags = termios.TCSAFLUSH
if hasattr(termios, 'TCSASOFT'):
tcsetattr_flags |= termios.TCSASOFT
diff --git a/Lib/glob.py b/Lib/glob.py
index c5f5f69..36d493d 100644
--- a/Lib/glob.py
+++ b/Lib/glob.py
@@ -1,6 +1,5 @@
"""Filename globbing utility."""
-import sys
import os
import re
import fnmatch
diff --git a/Lib/gzip.py b/Lib/gzip.py
index ba2149e..f8cd2a1 100644
--- a/Lib/gzip.py
+++ b/Lib/gzip.py
@@ -348,6 +348,28 @@ class GzipFile(io.BufferedIOBase):
self.offset += size
return chunk
+ def read1(self, size=-1):
+ self._check_closed()
+ if self.mode != READ:
+ import errno
+ raise IOError(errno.EBADF, "read1() on write-only GzipFile object")
+
+ if self.extrasize <= 0 and self.fileobj is None:
+ return b''
+
+ try:
+ self._read()
+ except EOFError:
+ pass
+ if size < 0 or size > self.extrasize:
+ size = self.extrasize
+
+ offset = self.offset - self.extrastart
+ chunk = self.extrabuf[offset: offset + size]
+ self.extrasize -= size
+ self.offset += size
+ return chunk
+
def peek(self, n):
if self.mode != READ:
import errno
diff --git a/Lib/http/client.py b/Lib/http/client.py
index 604577c..2e68b3c 100644
--- a/Lib/http/client.py
+++ b/Lib/http/client.py
@@ -697,7 +697,7 @@ class HTTPConnection:
self.send(connect_bytes)
for header, value in self._tunnel_headers.items():
header_str = "%s: %s\r\n" % (header, value)
- header_bytes = header_str.encode("latin1")
+ header_bytes = header_str.encode("latin-1")
self.send(header_bytes)
self.send(b'\r\n')
@@ -937,7 +937,7 @@ class HTTPConnection:
values = list(values)
for i, one_value in enumerate(values):
if hasattr(one_value, 'encode'):
- values[i] = one_value.encode('latin1')
+ values[i] = one_value.encode('latin-1')
elif isinstance(one_value, int):
values[i] = str(one_value).encode('ascii')
value = b'\r\n\t'.join(values)
diff --git a/Lib/http/server.py b/Lib/http/server.py
index 86fa37f..6aacbbd 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -452,7 +452,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
message = ''
if self.request_version != 'HTTP/0.9':
self.wfile.write(("%s %d %s\r\n" %
- (self.protocol_version, code, message)).encode('latin1', 'strict'))
+ (self.protocol_version, code, message)).encode('latin-1', 'strict'))
def send_header(self, keyword, value):
"""Send a MIME header."""
@@ -460,7 +460,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
if not hasattr(self, '_headers_buffer'):
self._headers_buffer = []
self._headers_buffer.append(
- ("%s: %s\r\n" % (keyword, value)).encode('latin1', 'strict'))
+ ("%s: %s\r\n" % (keyword, value)).encode('latin-1', 'strict'))
if keyword.lower() == 'connection':
if value.lower() == 'close':
diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py
index 5b0907e..5e9afb1 100644
--- a/Lib/idlelib/idlever.py
+++ b/Lib/idlelib/idlever.py
@@ -1 +1 @@
-IDLE_VERSION = "3.2"
+IDLE_VERSION = "3.3a0"
diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py
index 425b8bf..18ea85c 100644
--- a/Lib/importlib/_bootstrap.py
+++ b/Lib/importlib/_bootstrap.py
@@ -240,7 +240,7 @@ class BuiltinImporter:
@classmethod
@_requires_builtin
def is_package(cls, fullname):
- """Return None as built-in module are never packages."""
+ """Return None as built-in modules are never packages."""
return False
@@ -404,6 +404,7 @@ class SourceLoader(_LoaderBasics):
else:
found = marshal.loads(bytes_data)
if isinstance(found, code_type):
+ imp._fix_co_filename(found, source_path)
return found
else:
msg = "Non-code object in {}"
@@ -758,14 +759,14 @@ class _ImportLockContext:
_IMPLICIT_META_PATH = [BuiltinImporter, FrozenImporter, _DefaultPathFinder]
-_ERR_MSG = 'No module named {}'
+_ERR_MSG = 'No module named {!r}'
def _gcd_import(name, package=None, level=0):
"""Import and return the module based on its name, the package the call is
being made from, and the level adjustment.
This function represents the greatest common denominator of functionality
- between import_module and __import__. This includes settting __package__ if
+ between import_module and __import__. This includes setting __package__ if
the loader did not.
"""
@@ -855,7 +856,7 @@ def __import__(name, globals={}, locals={}, fromlist=[], level=0):
module = _gcd_import(name)
else:
# __package__ is not guaranteed to be defined or could be set to None
- # to represent that it's proper value is unknown
+ # to represent that its proper value is unknown
package = globals.get('__package__')
if package is None:
package = globals['__name__']
diff --git a/Lib/importlib/test/regrtest.py b/Lib/importlib/test/regrtest.py
index b103ae7d..dc0eb97 100644
--- a/Lib/importlib/test/regrtest.py
+++ b/Lib/importlib/test/regrtest.py
@@ -5,13 +5,6 @@ invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
-XXX FAILING
- * test_import
- - test_incorrect_code_name
- file name differing between __file__ and co_filename (r68360 on trunk)
- - test_import_by_filename
- exception for trying to import by file name does not match
-
"""
import importlib
import sys
diff --git a/Lib/lib2to3/__main__.py b/Lib/lib2to3/__main__.py
new file mode 100644
index 0000000..80688ba
--- /dev/null
+++ b/Lib/lib2to3/__main__.py
@@ -0,0 +1,4 @@
+import sys
+from .main import main
+
+sys.exit(main("lib2to3.fixes"))
diff --git a/Lib/lib2to3/patcomp.py b/Lib/lib2to3/patcomp.py
index bb538d5..0a259e9 100644
--- a/Lib/lib2to3/patcomp.py
+++ b/Lib/lib2to3/patcomp.py
@@ -11,6 +11,7 @@ The compiler compiles a pattern to a pytree.*Pattern instance.
__author__ = "Guido van Rossum <guido@python.org>"
# Python imports
+import io
import os
# Fairly local imports
@@ -32,7 +33,7 @@ class PatternSyntaxError(Exception):
def tokenize_wrapper(input):
"""Tokenizes a string suppressing significant whitespace."""
skip = set((token.NEWLINE, token.INDENT, token.DEDENT))
- tokens = tokenize.generate_tokens(driver.generate_lines(input).__next__)
+ tokens = tokenize.generate_tokens(io.StringIO(input).readline)
for quintuple in tokens:
type, value, start, end, line_text = quintuple
if type not in skip:
diff --git a/Lib/lib2to3/pgen2/driver.py b/Lib/lib2to3/pgen2/driver.py
index ee77a13..e7828ff 100644
--- a/Lib/lib2to3/pgen2/driver.py
+++ b/Lib/lib2to3/pgen2/driver.py
@@ -17,6 +17,7 @@ __all__ = ["Driver", "load_grammar"]
# Python imports
import codecs
+import io
import os
import logging
import sys
@@ -101,18 +102,10 @@ class Driver(object):
def parse_string(self, text, debug=False):
"""Parse a string and return the syntax tree."""
- tokens = tokenize.generate_tokens(generate_lines(text).__next__)
+ tokens = tokenize.generate_tokens(io.StringIO(text).readline)
return self.parse_tokens(tokens, debug)
-def generate_lines(text):
- """Generator that behaves like readline without using StringIO."""
- for line in text.splitlines(True):
- yield line
- while True:
- yield ""
-
-
def load_grammar(gt="Grammar.txt", gp=None,
save=True, force=False, logger=None):
"""Load the grammar (maybe from a pickle)."""
diff --git a/Lib/lib2to3/tests/test_parser.py b/Lib/lib2to3/tests/test_parser.py
index ce39e41..f389795 100644
--- a/Lib/lib2to3/tests/test_parser.py
+++ b/Lib/lib2to3/tests/test_parser.py
@@ -18,6 +18,16 @@ import os
# Local imports
from lib2to3.pgen2 import tokenize
from ..pgen2.parse import ParseError
+from lib2to3.pygram import python_symbols as syms
+
+
+class TestDriver(support.TestCase):
+
+ def test_formfeed(self):
+ s = """print 1\n\x0Cprint 2\n"""
+ t = driver.parse_string(s)
+ self.assertEqual(t.children[0].children[0].type, syms.print_stmt)
+ self.assertEqual(t.children[1].children[0].type, syms.print_stmt)
class GrammarTest(support.TestCase):
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
index e4b34a1..8f4fc2b 100644
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2011 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
@@ -18,7 +18,7 @@
Logging package for Python. Based on PEP 282 and comments thereto in
comp.lang.python, and influenced by Apache's log4j system.
-Copyright (C) 2001-2010 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2011 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
@@ -1650,6 +1650,10 @@ def basicConfig(**kwargs):
stream Use the specified stream to initialize the StreamHandler. Note
that this argument is incompatible with 'filename' - if both
are present, 'stream' is ignored.
+ handlers If specified, this should be an iterable of already created
+ handlers, which will be added to the root handler. Any handler
+ in the list which does not have a formatter assigned will be
+ assigned the formatter created in this function.
Note that you could specify a stream created using open(filename, mode)
rather than passing the filename and mode in. However, it should be
@@ -1657,27 +1661,47 @@ def basicConfig(**kwargs):
using sys.stdout or sys.stderr), whereas FileHandler closes its stream
when the handler is closed.
- .. versionchanged: 3.2
+ .. versionchanged:: 3.2
Added the ``style`` parameter.
+
+ .. versionchanged:: 3.3
+ Added the ``handlers`` parameter. A ``ValueError`` is now thrown for
+ incompatible arguments (e.g. ``handlers`` specified together with
+ ``filename``/``filemode``, or ``filename``/``filemode`` specified
+ together with ``stream``, or ``handlers`` specified together with
+ ``stream``.
"""
# Add thread safety in case someone mistakenly calls
# basicConfig() from multiple threads
_acquireLock()
try:
if len(root.handlers) == 0:
- filename = kwargs.get("filename")
- if filename:
- mode = kwargs.get("filemode", 'a')
- hdlr = FileHandler(filename, mode)
+ handlers = kwargs.get("handlers")
+ if handlers is None:
+ if "stream" in kwargs and "filename" in kwargs:
+ raise ValueError("'stream' and 'filename' should not be "
+ "specified together")
else:
- stream = kwargs.get("stream")
- hdlr = StreamHandler(stream)
+ if "stream" in kwargs or "filename" in kwargs:
+ raise ValueError("'stream' or 'filename' should not be "
+ "specified together with 'handlers'")
+ if handlers is None:
+ filename = kwargs.get("filename")
+ if filename:
+ mode = kwargs.get("filemode", 'a')
+ h = FileHandler(filename, mode)
+ else:
+ stream = kwargs.get("stream")
+ h = StreamHandler(stream)
+ handlers = [h]
fs = kwargs.get("format", BASIC_FORMAT)
dfs = kwargs.get("datefmt", None)
style = kwargs.get("style", '%')
fmt = Formatter(fs, dfs, style)
- hdlr.setFormatter(fmt)
- root.addHandler(hdlr)
+ for h in handlers:
+ if h.formatter is None:
+ h.setFormatter(fmt)
+ root.addHandler(h)
level = kwargs.get("level")
if level is not None:
root.setLevel(level)
@@ -1826,10 +1850,10 @@ class NullHandler(Handler):
package.
"""
def handle(self, record):
- pass
+ """Stub."""
def emit(self, record):
- pass
+ """Stub."""
def createLock(self):
self.lock = None
diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
index 96384bd..57b2057 100644
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -1307,6 +1307,16 @@ class QueueListener(object):
except queue.Empty:
break
+ def enqueue_sentinel(self):
+ """
+ This is used to enqueue the sentinel record.
+
+ The base implementation uses put_nowait. You may want to override this
+ method if you want to use timeouts or work with custom queue
+ implementations.
+ """
+ self.queue.put_nowait(self._sentinel)
+
def stop(self):
"""
Stop the listener.
@@ -1316,6 +1326,6 @@ class QueueListener(object):
may be some records still left on the queue, which won't be processed.
"""
self._stop.set()
- self.queue.put_nowait(self._sentinel)
+ self.enqueue_sentinel()
self._thread.join()
self._thread = None
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
index d6c23fb..d6627e5 100644
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -434,10 +434,10 @@ class ConnectionWrapper(object):
return self._loads(s)
def _xml_dumps(obj):
- return xmlrpclib.dumps((obj,), None, None, None, 1).encode('utf8')
+ return xmlrpclib.dumps((obj,), None, None, None, 1).encode('utf-8')
def _xml_loads(s):
- (obj,), method = xmlrpclib.loads(s.decode('utf8'))
+ (obj,), method = xmlrpclib.loads(s.decode('utf-8'))
return obj
class XmlListener(Listener):
diff --git a/Lib/multiprocessing/process.py b/Lib/multiprocessing/process.py
index b56a061..3fb9ff6 100644
--- a/Lib/multiprocessing/process.py
+++ b/Lib/multiprocessing/process.py
@@ -91,12 +91,16 @@ class Process(object):
'''
_Popen = None
- def __init__(self, group=None, target=None, name=None, args=(), kwargs={}):
+ def __init__(self, group=None, target=None, name=None, args=(), kwargs={},
+ *, daemon=None):
assert group is None, 'group argument must be None for now'
count = next(_current_process._counter)
self._identity = _current_process._identity + (count,)
self._authkey = _current_process._authkey
- self._daemonic = _current_process._daemonic
+ if daemon is not None:
+ self._daemonic = daemon
+ else:
+ self._daemonic = _current_process._daemonic
self._tempdir = _current_process._tempdir
self._parent_pid = os.getpid()
self._popen = None
diff --git a/Lib/nntplib.py b/Lib/nntplib.py
index bf66734..3e863dc 100644
--- a/Lib/nntplib.py
+++ b/Lib/nntplib.py
@@ -346,6 +346,20 @@ class _NNTPBase:
# Log in and encryption setup order is left to subclasses.
self.authenticated = False
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ is_connected = lambda: hasattr(self, "file")
+ if is_connected():
+ try:
+ self.quit()
+ except (socket.error, EOFError):
+ pass
+ finally:
+ if is_connected():
+ self._close()
+
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
diff --git a/Lib/optparse.py b/Lib/optparse.py
index 3eb652a..c207713 100644
--- a/Lib/optparse.py
+++ b/Lib/optparse.py
@@ -86,10 +86,16 @@ def _repr(self):
# Id: errors.py 509 2006-04-20 00:58:24Z gward
try:
- from gettext import gettext
+ from gettext import gettext, ngettext
except ImportError:
def gettext(message):
return message
+
+ def ngettext(singular, plural, n):
+ if n == 1:
+ return singular
+ return plural
+
_ = gettext
@@ -1478,11 +1484,10 @@ class OptionParser (OptionContainer):
if option.takes_value():
nargs = option.nargs
if len(rargs) < nargs:
- if nargs == 1:
- self.error(_("%s option requires an argument") % opt)
- else:
- self.error(_("%s option requires %d arguments")
- % (opt, nargs))
+ self.error(ngettext(
+ "%(option)s option requires %(number)d argument",
+ "%(option)s option requires %(number)d arguments",
+ nargs) % {"option": opt, "number": nargs})
elif nargs == 1:
value = rargs.pop(0)
else:
@@ -1517,11 +1522,10 @@ class OptionParser (OptionContainer):
nargs = option.nargs
if len(rargs) < nargs:
- if nargs == 1:
- self.error(_("%s option requires an argument") % opt)
- else:
- self.error(_("%s option requires %d arguments")
- % (opt, nargs))
+ self.error(ngettext(
+ "%(option)s option requires %(number)d argument",
+ "%(option)s option requires %(number)d arguments",
+ nargs) % {"option": opt, "number": nargs})
elif nargs == 1:
value = rargs.pop(0)
else:
diff --git a/Lib/os.py b/Lib/os.py
index a894ee0..28979bf 100644
--- a/Lib/os.py
+++ b/Lib/os.py
@@ -434,7 +434,7 @@ def get_exec_path(env=None):
# Change environ to automatically call putenv(), unsetenv if they exist.
-from _abcoll import MutableMapping # Can't use collections (bootstrap)
+from collections.abc import MutableMapping
class _Environ(MutableMapping):
def __init__(self, data, encodekey, decodekey, encodevalue, decodevalue, putenv, unsetenv):
diff --git a/Lib/platform.py b/Lib/platform.py
index abe917a..75bc5f3 100755
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -357,92 +357,11 @@ def dist(distname='',version='',id='',
supported_dists=supported_dists,
full_distribution_name=0)
-class _popen:
-
- """ Fairly portable (alternative) popen implementation.
-
- This is mostly needed in case os.popen() is not available, or
- doesn't work as advertised, e.g. in Win9X GUI programs like
- PythonWin or IDLE.
-
- Writing to the pipe is currently not supported.
-
- """
- tmpfile = ''
- pipe = None
- bufsize = None
- mode = 'r'
-
- def __init__(self,cmd,mode='r',bufsize=None):
-
- if mode != 'r':
- raise ValueError('popen()-emulation only supports read mode')
- import tempfile
- self.tmpfile = tmpfile = tempfile.mktemp()
- os.system(cmd + ' > %s' % tmpfile)
- self.pipe = open(tmpfile,'rb')
- self.bufsize = bufsize
- self.mode = mode
-
- def read(self):
-
- return self.pipe.read()
-
- def readlines(self):
-
- if self.bufsize is not None:
- return self.pipe.readlines()
-
- def close(self,
-
- remove=os.unlink,error=os.error):
-
- if self.pipe:
- rc = self.pipe.close()
- else:
- rc = 255
- if self.tmpfile:
- try:
- remove(self.tmpfile)
- except error:
- pass
- return rc
-
- # Alias
- __del__ = close
-
def popen(cmd, mode='r', bufsize=-1):
""" Portable popen() interface.
"""
- # Find a working popen implementation preferring win32pipe.popen
- # over os.popen over _popen
- popen = None
- if os.environ.get('OS','') == 'Windows_NT':
- # On NT win32pipe should work; on Win9x it hangs due to bugs
- # in the MS C lib (see MS KnowledgeBase article Q150956)
- try:
- import win32pipe
- except ImportError:
- pass
- else:
- popen = win32pipe.popen
- if popen is None:
- if hasattr(os,'popen'):
- popen = os.popen
- # Check whether it works... it doesn't in GUI programs
- # on Windows platforms
- if sys.platform == 'win32': # XXX Others too ?
- try:
- popen('')
- except os.error:
- popen = _popen
- else:
- popen = _popen
- if bufsize is None:
- return popen(cmd,mode)
- else:
- return popen(cmd,mode,bufsize)
+ return os.popen(cmd, mode, bufsize)
def _norm_version(version, build=''):
diff --git a/Lib/poplib.py b/Lib/poplib.py
index 84ea88d..d42d9dd 100644
--- a/Lib/poplib.py
+++ b/Lib/poplib.py
@@ -250,15 +250,18 @@ class POP3:
def quit(self):
"""Signoff: commit changes on server, unlock mailbox, close connection."""
- try:
- resp = self._shortcmd('QUIT')
- except error_proto as val:
- resp = val
- self.file.close()
- self.sock.close()
- del self.file, self.sock
+ resp = self._shortcmd('QUIT')
+ self.close()
return resp
+ def close(self):
+ """Close the connection without assuming anything about it."""
+ if self.file is not None:
+ self.file.close()
+ if self.sock is not None:
+ self.sock.close()
+ self.file = self.sock = None
+
#__del__ = quit
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index dc398e3..8e14ee7 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -168,11 +168,11 @@ def _split_list(s, predicate):
def visiblename(name, all=None, obj=None):
"""Decide whether to show documentation on a variable."""
# Certain special names are redundant.
- _hidden_names = ('__builtins__', '__doc__', '__file__', '__path__',
+ if name in {'__builtins__', '__doc__', '__file__', '__path__',
'__module__', '__name__', '__slots__', '__package__',
'__cached__', '__author__', '__credits__', '__date__',
- '__version__')
- if name in _hidden_names: return 0
+ '__version__'}:
+ return 0
# Private names are hidden, but special names are displayed.
if name.startswith('__') and name.endswith('__'): return 1
# Namedtuples have public fields and methods with a single leading underscore
@@ -952,6 +952,9 @@ class HTMLDoc(Doc):
modpkgs = []
if shadowed is None: shadowed = {}
for importer, name, ispkg in pkgutil.iter_modules([dir]):
+ if any((0xD800 <= ord(ch) <= 0xDFFF) for ch in name):
+ # ignore a module if its name contains a surrogate character
+ continue
modpkgs.append((name, '', ispkg, name in shadowed))
shadowed[name] = 1
diff --git a/Lib/random.py b/Lib/random.py
index 7e0de4f..3d2af24 100644
--- a/Lib/random.py
+++ b/Lib/random.py
@@ -42,7 +42,7 @@ from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethod
from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
from os import urandom as _urandom
-import collections as _collections
+from collections.abc import Set as _Set, Sequence as _Sequence
from hashlib import sha512 as _sha512
__all__ = ["Random","seed","random","uniform","randint","choice","sample",
@@ -114,7 +114,7 @@ class Random(_random.Random):
if version == 2:
if isinstance(a, (str, bytes, bytearray)):
if isinstance(a, str):
- a = a.encode("utf8")
+ a = a.encode("utf-8")
a += _sha512(a).digest()
a = int.from_bytes(a, 'big')
@@ -293,10 +293,10 @@ class Random(_random.Random):
# preferred since the list takes less space than the
# set and it doesn't suffer from frequent reselections.
- if isinstance(population, _collections.Set):
+ if isinstance(population, _Set):
population = tuple(population)
- if not isinstance(population, _collections.Sequence):
- raise TypeError("Population must be a sequence or Set. For dicts, use list(d).")
+ if not isinstance(population, _Sequence):
+ raise TypeError("Population must be a sequence or set. For dicts, use list(d).")
randbelow = self._randbelow
n = len(population)
if not 0 <= k <= n:
diff --git a/Lib/re.py b/Lib/re.py
index abd7ea2..cdf5976 100644
--- a/Lib/re.py
+++ b/Lib/re.py
@@ -215,12 +215,14 @@ def template(pattern, flags=0):
return _compile(pattern, flags|T)
_alphanum_str = frozenset(
- "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
+ "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
_alphanum_bytes = frozenset(
- b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
+ b"_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
def escape(pattern):
- "Escape all non-alphanumeric characters in pattern."
+ """
+ Escape all the characters in pattern except ASCII letters, numbers and '_'.
+ """
if isinstance(pattern, str):
alphanum = _alphanum_str
s = list(pattern)
diff --git a/Lib/site.py b/Lib/site.py
index a2c0bec..fcfdbed 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -508,6 +508,11 @@ def execusercustomize():
def main():
+ """Add standard site-specific directories to the module search path.
+
+ This function is called automatically when this module is imported,
+ unless the python interpreter was started with the -S flag.
+ """
global ENABLE_USER_SITE
abs_paths()
@@ -526,7 +531,10 @@ def main():
if ENABLE_USER_SITE:
execusercustomize()
-main()
+# Prevent edition of sys.path when python was started with -S and
+# site is imported later.
+if not sys.flags.no_site:
+ main()
def _script():
help = """\
diff --git a/Lib/smtpd.py b/Lib/smtpd.py
index 599e79b..32f45ae 100755
--- a/Lib/smtpd.py
+++ b/Lib/smtpd.py
@@ -275,7 +275,7 @@ class SMTPChannel(asynchat.async_chat):
return
elif limit:
self.num_bytes += len(data)
- self.received_lines.append(str(data, "utf8"))
+ self.received_lines.append(str(data, "utf-8"))
# Implementation of base class abstract method
def found_terminator(self):
diff --git a/Lib/smtplib.py b/Lib/smtplib.py
index 14e6250..213138c 100755
--- a/Lib/smtplib.py
+++ b/Lib/smtplib.py
@@ -269,6 +269,19 @@ class SMTP:
pass
self.local_hostname = '[%s]' % addr
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ try:
+ code, message = self.docmd("QUIT")
+ if code != 221:
+ raise SMTPResponseException(code, message)
+ except SMTPServerDisconnected:
+ pass
+ finally:
+ self.close()
+
def set_debuglevel(self, debuglevel):
"""Set the debug output level.
diff --git a/Lib/socket.py b/Lib/socket.py
index 1e28549..5715034 100644
--- a/Lib/socket.py
+++ b/Lib/socket.py
@@ -112,6 +112,9 @@ class socket(_socket.socket):
s[7:])
return s
+ def __getstate__(self):
+ raise TypeError("Cannot serialize socket object")
+
def dup(self):
"""dup() -> socket object
diff --git a/Lib/sqlite3/test/hooks.py b/Lib/sqlite3/test/hooks.py
index a6161fa..dad35d9 100644
--- a/Lib/sqlite3/test/hooks.py
+++ b/Lib/sqlite3/test/hooks.py
@@ -175,10 +175,60 @@ class ProgressTests(unittest.TestCase):
con.execute("select 1 union select 2 union select 3").fetchall()
self.assertEqual(action, 0, "progress handler was not cleared")
+class TraceCallbackTests(unittest.TestCase):
+ def CheckTraceCallbackUsed(self):
+ """
+ Test that the trace callback is invoked once it is set.
+ """
+ con = sqlite.connect(":memory:")
+ traced_statements = []
+ def trace(statement):
+ traced_statements.append(statement)
+ con.set_trace_callback(trace)
+ con.execute("create table foo(a, b)")
+ self.assertTrue(traced_statements)
+ self.assertTrue(any("create table foo" in stmt for stmt in traced_statements))
+
+ def CheckClearTraceCallback(self):
+ """
+ Test that setting the trace callback to None clears the previously set callback.
+ """
+ con = sqlite.connect(":memory:")
+ traced_statements = []
+ def trace(statement):
+ traced_statements.append(statement)
+ con.set_trace_callback(trace)
+ con.set_trace_callback(None)
+ con.execute("create table foo(a, b)")
+ self.assertFalse(traced_statements, "trace callback was not cleared")
+
+ def CheckUnicodeContent(self):
+ """
+ Test that the statement can contain unicode literals.
+ """
+ unicode_value = '\xf6\xe4\xfc\xd6\xc4\xdc\xdf\u20ac'
+ con = sqlite.connect(":memory:")
+ traced_statements = []
+ def trace(statement):
+ traced_statements.append(statement)
+ con.set_trace_callback(trace)
+ con.execute("create table foo(x)")
+ # Can't execute bound parameters as their values don't appear
+ # in traced statements before SQLite 3.6.21
+ # (cf. http://www.sqlite.org/draft/releaselog/3_6_21.html)
+ con.execute('insert into foo(x) values ("%s")' % unicode_value)
+ con.commit()
+ self.assertTrue(any(unicode_value in stmt for stmt in traced_statements),
+ "Unicode data %s garbled in trace callback: %s"
+ % (ascii(unicode_value), ', '.join(map(ascii, traced_statements))))
+
+
+
def suite():
collation_suite = unittest.makeSuite(CollationTests, "Check")
progress_suite = unittest.makeSuite(ProgressTests, "Check")
- return unittest.TestSuite((collation_suite, progress_suite))
+ trace_suite = unittest.makeSuite(TraceCallbackTests, "Check")
+ return unittest.TestSuite((collation_suite, progress_suite, trace_suite))
def test():
runner = unittest.TextTestRunner()
diff --git a/Lib/sqlite3/test/types.py b/Lib/sqlite3/test/types.py
index 29413e1..d214f3d 100644
--- a/Lib/sqlite3/test/types.py
+++ b/Lib/sqlite3/test/types.py
@@ -85,7 +85,7 @@ class DeclTypesTests(unittest.TestCase):
if isinstance(_val, bytes):
# sqlite3 always calls __init__ with a bytes created from a
# UTF-8 string when __conform__ was used to store the object.
- _val = _val.decode('utf8')
+ _val = _val.decode('utf-8')
self.val = _val
def __cmp__(self, other):
diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py
index 13737ca..ae63c31 100644
--- a/Lib/sre_parse.py
+++ b/Lib/sre_parse.py
@@ -791,7 +791,7 @@ def parse_template(source, pattern):
else:
# The tokenizer implicitly decodes bytes objects as latin-1, we must
# therefore re-encode the final representation.
- encode = lambda x: x.encode('latin1')
+ encode = lambda x: x.encode('latin-1')
for c, s in p:
if c is MARK:
groupsappend((i, s))
diff --git a/Lib/string.py b/Lib/string.py
index ef0334c..d4f9cd9 100644
--- a/Lib/string.py
+++ b/Lib/string.py
@@ -46,23 +46,7 @@ def capwords(s, sep=None):
####################################################################
import re as _re
-
-class _multimap:
- """Helper class for combining multiple mappings.
-
- Used by .{safe_,}substitute() to combine the mapping and keyword
- arguments.
- """
- def __init__(self, primary, secondary):
- self._primary = primary
- self._secondary = secondary
-
- def __getitem__(self, key):
- try:
- return self._primary[key]
- except KeyError:
- return self._secondary[key]
-
+from collections import ChainMap
class _TemplateMetaclass(type):
pattern = r"""
@@ -116,7 +100,7 @@ class Template(metaclass=_TemplateMetaclass):
if not args:
mapping = kws
elif kws:
- mapping = _multimap(kws, args[0])
+ mapping = ChainMap(kws, args[0])
else:
mapping = args[0]
# Helper function for .sub()
@@ -142,7 +126,7 @@ class Template(metaclass=_TemplateMetaclass):
if not args:
mapping = kws
elif kws:
- mapping = _multimap(kws, args[0])
+ mapping = ChainMap(kws, args[0])
else:
mapping = args[0]
# Helper function for .sub()
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index b3fb935..e4bb78e 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -191,8 +191,10 @@ should prepare for OSErrors.
A ValueError will be raised if Popen is called with invalid arguments.
-check_call() and check_output() will raise CalledProcessError, if the
-called process returns a non-zero return code.
+Exceptions defined within this module inherit from SubprocessError.
+check_call() and check_output() will raise CalledProcessError if the
+called process returns a non-zero return code. TimeoutExpired
+be raised if a timeout was specified and expired.
Security
@@ -340,6 +342,7 @@ mswindows = (sys.platform == "win32")
import io
import os
+import time
import traceback
import gc
import signal
@@ -348,7 +351,10 @@ import warnings
import errno
# Exception classes used by this module.
-class CalledProcessError(Exception):
+class SubprocessError(Exception): pass
+
+
+class CalledProcessError(SubprocessError):
"""This exception is raised when a process run by check_call() or
check_output() returns a non-zero exit status.
The exit status will be stored in the returncode attribute;
@@ -362,6 +368,20 @@ class CalledProcessError(Exception):
return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
+class TimeoutExpired(SubprocessError):
+ """This exception is raised when the timeout expires while waiting for a
+ child process.
+ """
+ def __init__(self, cmd, timeout, output=None):
+ self.cmd = cmd
+ self.timeout = timeout
+ self.output = output
+
+ def __str__(self):
+ return ("Command '%s' timed out after %s seconds" %
+ (self.cmd, self.timeout))
+
+
if mswindows:
import threading
import msvcrt
@@ -412,7 +432,7 @@ else:
return fds
__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
- "getoutput", "check_output", "CalledProcessError"]
+ "getoutput", "check_output", "CalledProcessError", "DEVNULL"]
if mswindows:
from _subprocess import CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP
@@ -437,6 +457,7 @@ def _cleanup():
PIPE = -1
STDOUT = -2
+DEVNULL = -3
def _eintr_retry_call(func, *args):
@@ -449,15 +470,21 @@ def _eintr_retry_call(func, *args):
raise
-def call(*popenargs, **kwargs):
- """Run command with arguments. Wait for command to complete, then
- return the returncode attribute.
+def call(*popenargs, timeout=None, **kwargs):
+ """Run command with arguments. Wait for command to complete or
+ timeout, then return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
"""
- return Popen(*popenargs, **kwargs).wait()
+ p = Popen(*popenargs, **kwargs)
+ try:
+ return p.wait(timeout=timeout)
+ except TimeoutExpired:
+ p.kill()
+ p.wait()
+ raise
def check_call(*popenargs, **kwargs):
@@ -466,7 +493,7 @@ def check_call(*popenargs, **kwargs):
CalledProcessError. The CalledProcessError object will have the
return code in the returncode attribute.
- The arguments are the same as for the Popen constructor. Example:
+ The arguments are the same as for the call function. Example:
check_call(["ls", "-l"])
"""
@@ -479,7 +506,7 @@ def check_call(*popenargs, **kwargs):
return 0
-def check_output(*popenargs, **kwargs):
+def check_output(*popenargs, timeout=None, **kwargs):
r"""Run command with arguments and return its output as a byte string.
If the exit code was non-zero it raises a CalledProcessError. The
@@ -502,13 +529,15 @@ def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = Popen(*popenargs, stdout=PIPE, **kwargs)
- output, unused_err = process.communicate()
+ try:
+ output, unused_err = process.communicate(timeout=timeout)
+ except TimeoutExpired:
+ process.kill()
+ output, unused_err = process.communicate()
+ raise TimeoutExpired(process.args, timeout, output=output)
retcode = process.poll()
if retcode:
- cmd = kwargs.get("args")
- if cmd is None:
- cmd = popenargs[0]
- raise CalledProcessError(retcode, cmd, output=output)
+ raise CalledProcessError(retcode, process.args, output=output)
return output
@@ -639,6 +668,8 @@ class Popen(object):
_cleanup()
self._child_created = False
+ self._input = None
+ self._communication_started = False
if bufsize is None:
bufsize = 0 # Restore default
if not isinstance(bufsize, int):
@@ -673,6 +704,7 @@ class Popen(object):
raise ValueError("creationflags is only supported on Windows "
"platforms")
+ self.args = args
self.stdin = None
self.stdout = None
self.stderr = None
@@ -768,8 +800,12 @@ class Popen(object):
# Child is still running, keep us alive until we can wait on it.
_active.append(self)
+ def _get_devnull(self):
+ if not hasattr(self, '_devnull'):
+ self._devnull = os.open(os.devnull, os.O_RDWR)
+ return self._devnull
- def communicate(self, input=None):
+ def communicate(self, input=None, timeout=None):
"""Interact with process: Send data to stdin. Read data from
stdout and stderr, until end-of-file is reached. Wait for
process to terminate. The optional input argument should be a
@@ -778,9 +814,14 @@ class Popen(object):
communicate() returns a tuple (stdout, stderr)."""
- # Optimization: If we are only using one pipe, or no pipe at
- # all, using select() or threads is unnecessary.
- if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
+ if self._communication_started and input:
+ raise ValueError("Cannot send input after starting communication")
+
+ # Optimization: If we are not worried about timeouts, we haven't
+ # started communicating, and we have one or zero pipes, using select()
+ # or threads is unnecessary.
+ if (timeout is None and not self._communication_started and
+ [self.stdin, self.stdout, self.stderr].count(None) >= 2):
stdout = None
stderr = None
if self.stdin:
@@ -798,15 +839,42 @@ class Popen(object):
stderr = self.stderr.read()
self.stderr.close()
self.wait()
- return (stdout, stderr)
+ else:
+ if timeout is not None:
+ endtime = time.time() + timeout
+ else:
+ endtime = None
- return self._communicate(input)
+ try:
+ stdout, stderr = self._communicate(input, endtime, timeout)
+ finally:
+ self._communication_started = True
+
+ sts = self.wait(timeout=self._remaining_time(endtime))
+
+ return (stdout, stderr)
def poll(self):
return self._internal_poll()
+ def _remaining_time(self, endtime):
+ """Convenience for _communicate when computing timeouts."""
+ if endtime is None:
+ return None
+ else:
+ return endtime - time.time()
+
+
+ def _check_timeout(self, endtime, orig_timeout):
+ """Convenience for checking if a timeout has expired."""
+ if endtime is None:
+ return
+ if time.time() > endtime:
+ raise TimeoutExpired(self.args, orig_timeout)
+
+
if mswindows:
#
# Windows methods
@@ -828,6 +896,8 @@ class Popen(object):
p2cread, _ = _subprocess.CreatePipe(None, 0)
elif stdin == PIPE:
p2cread, p2cwrite = _subprocess.CreatePipe(None, 0)
+ elif stdin == DEVNULL:
+ p2cread = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdin, int):
p2cread = msvcrt.get_osfhandle(stdin)
else:
@@ -841,6 +911,8 @@ class Popen(object):
_, c2pwrite = _subprocess.CreatePipe(None, 0)
elif stdout == PIPE:
c2pread, c2pwrite = _subprocess.CreatePipe(None, 0)
+ elif stdout == DEVNULL:
+ c2pwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdout, int):
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
@@ -856,6 +928,8 @@ class Popen(object):
errread, errwrite = _subprocess.CreatePipe(None, 0)
elif stderr == STDOUT:
errwrite = c2pwrite
+ elif stderr == DEVNULL:
+ errwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stderr, int):
errwrite = msvcrt.get_osfhandle(stderr)
else:
@@ -965,6 +1039,8 @@ class Popen(object):
c2pwrite.Close()
if errwrite != -1:
errwrite.Close()
+ if hasattr(self, '_devnull'):
+ os.close(self._devnull)
# Retain the process handle, but close the thread handle
self._child_created = True
@@ -989,12 +1065,20 @@ class Popen(object):
return self.returncode
- def wait(self):
+ def wait(self, timeout=None, endtime=None):
"""Wait for child process to terminate. Returns returncode
attribute."""
+ if endtime is not None:
+ timeout = self._remaining_time(endtime)
+ if timeout is None:
+ timeout_millis = _subprocess.INFINITE
+ else:
+ timeout_millis = int(timeout * 1000)
if self.returncode is None:
- _subprocess.WaitForSingleObject(self._handle,
- _subprocess.INFINITE)
+ result = _subprocess.WaitForSingleObject(self._handle,
+ timeout_millis)
+ if result == _subprocess.WAIT_TIMEOUT:
+ raise TimeoutExpired(self.args, timeout)
self.returncode = _subprocess.GetExitCodeProcess(self._handle)
return self.returncode
@@ -1004,22 +1088,23 @@ class Popen(object):
fh.close()
- def _communicate(self, input):
- stdout = None # Return
- stderr = None # Return
-
- if self.stdout:
- stdout = []
- stdout_thread = threading.Thread(target=self._readerthread,
- args=(self.stdout, stdout))
- stdout_thread.daemon = True
- stdout_thread.start()
- if self.stderr:
- stderr = []
- stderr_thread = threading.Thread(target=self._readerthread,
- args=(self.stderr, stderr))
- stderr_thread.daemon = True
- stderr_thread.start()
+ def _communicate(self, input, endtime, orig_timeout):
+ # Start reader threads feeding into a list hanging off of this
+ # object, unless they've already been started.
+ if self.stdout and not hasattr(self, "_stdout_buff"):
+ self._stdout_buff = []
+ self.stdout_thread = \
+ threading.Thread(target=self._readerthread,
+ args=(self.stdout, self._stdout_buff))
+ self.stdout_thread.daemon = True
+ self.stdout_thread.start()
+ if self.stderr and not hasattr(self, "_stderr_buff"):
+ self._stderr_buff = []
+ self.stderr_thread = \
+ threading.Thread(target=self._readerthread,
+ args=(self.stderr, self._stderr_buff))
+ self.stderr_thread.daemon = True
+ self.stderr_thread.start()
if self.stdin:
if input is not None:
@@ -1030,10 +1115,28 @@ class Popen(object):
raise
self.stdin.close()
+ # Wait for the reader threads, or time out. If we time out, the
+ # threads remain reading and the fds left open in case the user
+ # calls communicate again.
+ if self.stdout is not None:
+ self.stdout_thread.join(self._remaining_time(endtime))
+ if self.stdout_thread.isAlive():
+ raise TimeoutExpired(self.args, orig_timeout)
+ if self.stderr is not None:
+ self.stderr_thread.join(self._remaining_time(endtime))
+ if self.stderr_thread.isAlive():
+ raise TimeoutExpired(self.args, orig_timeout)
+
+ # Collect the output from and close both pipes, now that we know
+ # both have been read successfully.
+ stdout = None
+ stderr = None
if self.stdout:
- stdout_thread.join()
+ stdout = self._stdout_buff
+ self.stdout.close()
if self.stderr:
- stderr_thread.join()
+ stderr = self._stderr_buff
+ self.stderr.close()
# All data exchanged. Translate lists into strings.
if stdout is not None:
@@ -1041,7 +1144,6 @@ class Popen(object):
if stderr is not None:
stderr = stderr[0]
- self.wait()
return (stdout, stderr)
def send_signal(self, sig):
@@ -1079,6 +1181,8 @@ class Popen(object):
pass
elif stdin == PIPE:
p2cread, p2cwrite = _create_pipe()
+ elif stdin == DEVNULL:
+ p2cread = self._get_devnull()
elif isinstance(stdin, int):
p2cread = stdin
else:
@@ -1089,6 +1193,8 @@ class Popen(object):
pass
elif stdout == PIPE:
c2pread, c2pwrite = _create_pipe()
+ elif stdout == DEVNULL:
+ c2pwrite = self._get_devnull()
elif isinstance(stdout, int):
c2pwrite = stdout
else:
@@ -1101,6 +1207,8 @@ class Popen(object):
errread, errwrite = _create_pipe()
elif stderr == STDOUT:
errwrite = c2pwrite
+ elif stderr == DEVNULL:
+ errwrite = self._get_devnull()
elif isinstance(stderr, int):
errwrite = stderr
else:
@@ -1131,7 +1239,7 @@ class Popen(object):
restore_signals, start_new_session):
"""Execute program (POSIX version)"""
- if isinstance(args, str):
+ if isinstance(args, (str, bytes)):
args = [args]
else:
args = list(args)
@@ -1294,6 +1402,8 @@ class Popen(object):
os.close(c2pwrite)
if errwrite != -1 and errread != -1:
os.close(errwrite)
+ if hasattr(self, '_devnull'):
+ os.close(self._devnull)
# Wait for exec to fail or succeed; possibly raising an
# exception (limited in size)
@@ -1371,25 +1481,57 @@ class Popen(object):
return self.returncode
- def wait(self):
+ def _try_wait(self, wait_flags):
+ try:
+ (pid, sts) = _eintr_retry_call(os.waitpid, self.pid, wait_flags)
+ except OSError as e:
+ if e.errno != errno.ECHILD:
+ raise
+ # This happens if SIGCLD is set to be ignored or waiting
+ # for child processes has otherwise been disabled for our
+ # process. This child is dead, we can't get the status.
+ pid = self.pid
+ sts = 0
+ return (pid, sts)
+
+
+ def wait(self, timeout=None, endtime=None):
"""Wait for child process to terminate. Returns returncode
attribute."""
- if self.returncode is None:
- try:
- pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
- except OSError as e:
- if e.errno != errno.ECHILD:
- raise
- # This happens if SIGCLD is set to be ignored or waiting
- # for child processes has otherwise been disabled for our
- # process. This child is dead, we can't get the status.
- sts = 0
+ if self.returncode is not None:
+ return self.returncode
+
+ # endtime is preferred to timeout. timeout is only used for
+ # printing.
+ if endtime is not None or timeout is not None:
+ if endtime is None:
+ endtime = time.time() + timeout
+ elif timeout is None:
+ timeout = self._remaining_time(endtime)
+
+ if endtime is not None:
+ # Enter a busy loop if we have a timeout. This busy loop was
+ # cribbed from Lib/threading.py in Thread.wait() at r71065.
+ delay = 0.0005 # 500 us -> initial delay of 1 ms
+ while True:
+ (pid, sts) = self._try_wait(os.WNOHANG)
+ assert pid == self.pid or pid == 0
+ if pid == self.pid:
+ self._handle_exitstatus(sts)
+ break
+ remaining = self._remaining_time(endtime)
+ if remaining <= 0:
+ raise TimeoutExpired(self.args, timeout)
+ delay = min(delay * 2, remaining, .05)
+ time.sleep(delay)
+ elif self.returncode is None:
+ (pid, sts) = self._try_wait(0)
self._handle_exitstatus(sts)
return self.returncode
- def _communicate(self, input):
- if self.stdin:
+ def _communicate(self, input, endtime, orig_timeout):
+ if self.stdin and not self._communication_started:
# Flush stdio buffer. This might block, if the user has
# been writing to .stdin in an uncontrolled fashion.
self.stdin.flush()
@@ -1397,9 +1539,13 @@ class Popen(object):
self.stdin.close()
if _has_poll:
- stdout, stderr = self._communicate_with_poll(input)
+ stdout, stderr = self._communicate_with_poll(input, endtime,
+ orig_timeout)
else:
- stdout, stderr = self._communicate_with_select(input)
+ stdout, stderr = self._communicate_with_select(input, endtime,
+ orig_timeout)
+
+ self.wait(timeout=self._remaining_time(endtime))
# All data exchanged. Translate lists into strings.
if stdout is not None:
@@ -1417,67 +1563,87 @@ class Popen(object):
stderr = self._translate_newlines(stderr,
self.stderr.encoding)
- self.wait()
return (stdout, stderr)
- def _communicate_with_poll(self, input):
+ def _communicate_with_poll(self, input, endtime, orig_timeout):
stdout = None # Return
stderr = None # Return
- fd2file = {}
- fd2output = {}
+
+ if not self._communication_started:
+ self._fd2file = {}
poller = select.poll()
def register_and_append(file_obj, eventmask):
poller.register(file_obj.fileno(), eventmask)
- fd2file[file_obj.fileno()] = file_obj
+ self._fd2file[file_obj.fileno()] = file_obj
def close_unregister_and_remove(fd):
poller.unregister(fd)
- fd2file[fd].close()
- fd2file.pop(fd)
+ self._fd2file[fd].close()
+ self._fd2file.pop(fd)
if self.stdin and input:
register_and_append(self.stdin, select.POLLOUT)
+ # Only create this mapping if we haven't already.
+ if not self._communication_started:
+ self._fd2output = {}
+ if self.stdout:
+ self._fd2output[self.stdout.fileno()] = []
+ if self.stderr:
+ self._fd2output[self.stderr.fileno()] = []
+
select_POLLIN_POLLPRI = select.POLLIN | select.POLLPRI
if self.stdout:
register_and_append(self.stdout, select_POLLIN_POLLPRI)
- fd2output[self.stdout.fileno()] = stdout = []
+ stdout = self._fd2output[self.stdout.fileno()]
if self.stderr:
register_and_append(self.stderr, select_POLLIN_POLLPRI)
- fd2output[self.stderr.fileno()] = stderr = []
-
- input_offset = 0
- while fd2file:
+ stderr = self._fd2output[self.stderr.fileno()]
+
+ # Save the input here so that if we time out while communicating,
+ # we can continue sending input if we retry.
+ if self.stdin and self._input is None:
+ self._input_offset = 0
+ self._input = input
+ if self.universal_newlines:
+ self._input = self._input.encode(self.stdin.encoding)
+
+ while self._fd2file:
+ timeout = self._remaining_time(endtime)
+ if timeout is not None and timeout < 0:
+ raise TimeoutExpired(self.args, orig_timeout)
try:
- ready = poller.poll()
+ ready = poller.poll(timeout)
except select.error as e:
if e.args[0] == errno.EINTR:
continue
raise
+ self._check_timeout(endtime, orig_timeout)
# XXX Rewrite these to use non-blocking I/O on the
# file objects; they are no longer using C stdio!
for fd, mode in ready:
if mode & select.POLLOUT:
- chunk = input[input_offset : input_offset + _PIPE_BUF]
+ chunk = self._input[self._input_offset :
+ self._input_offset + _PIPE_BUF]
try:
- input_offset += os.write(fd, chunk)
+ self._input_offset += os.write(fd, chunk)
except OSError as e:
if e.errno == errno.EPIPE:
close_unregister_and_remove(fd)
else:
raise
else:
- if input_offset >= len(input):
+ if self._input_offset >= len(self._input):
close_unregister_and_remove(fd)
elif mode & select_POLLIN_POLLPRI:
data = os.read(fd, 4096)
if not data:
close_unregister_and_remove(fd)
- fd2output[fd].append(data)
+ self._fd2output[fd].append(data)
else:
# Ignore hang up or errors.
close_unregister_and_remove(fd)
@@ -1485,61 +1651,87 @@ class Popen(object):
return (stdout, stderr)
- def _communicate_with_select(self, input):
- read_set = []
- write_set = []
+ def _communicate_with_select(self, input, endtime, orig_timeout):
+ if not self._communication_started:
+ self._read_set = []
+ self._write_set = []
+ if self.stdin and input:
+ self._write_set.append(self.stdin)
+ if self.stdout:
+ self._read_set.append(self.stdout)
+ if self.stderr:
+ self._read_set.append(self.stderr)
+
+ if self.stdin and self._input is None:
+ self._input_offset = 0
+ self._input = input
+ if self.universal_newlines:
+ self._input = self._input.encode(self.stdin.encoding)
+
stdout = None # Return
stderr = None # Return
- if self.stdin and input:
- write_set.append(self.stdin)
if self.stdout:
- read_set.append(self.stdout)
- stdout = []
+ if not self._communication_started:
+ self._stdout_buff = []
+ stdout = self._stdout_buff
if self.stderr:
- read_set.append(self.stderr)
- stderr = []
-
- input_offset = 0
- while read_set or write_set:
+ if not self._communication_started:
+ self._stderr_buff = []
+ stderr = self._stderr_buff
+
+ while self._read_set or self._write_set:
+ timeout = self._remaining_time(endtime)
+ if timeout is not None and timeout < 0:
+ raise TimeoutExpired(self.args, orig_timeout)
try:
- rlist, wlist, xlist = select.select(read_set, write_set, [])
+ (rlist, wlist, xlist) = \
+ select.select(self._read_set, self._write_set, [],
+ timeout)
except select.error as e:
if e.args[0] == errno.EINTR:
continue
raise
+ # According to the docs, returning three empty lists indicates
+ # that the timeout expired.
+ if not (rlist or wlist or xlist):
+ raise TimeoutExpired(self.args, orig_timeout)
+ # We also check what time it is ourselves for good measure.
+ self._check_timeout(endtime, orig_timeout)
+
# XXX Rewrite these to use non-blocking I/O on the
# file objects; they are no longer using C stdio!
if self.stdin in wlist:
- chunk = input[input_offset : input_offset + _PIPE_BUF]
+ chunk = self._input[self._input_offset :
+ self._input_offset + _PIPE_BUF]
try:
bytes_written = os.write(self.stdin.fileno(), chunk)
except OSError as e:
if e.errno == errno.EPIPE:
self.stdin.close()
- write_set.remove(self.stdin)
+ self._write_set.remove(self.stdin)
else:
raise
else:
- input_offset += bytes_written
- if input_offset >= len(input):
+ self._input_offset += bytes_written
+ if self._input_offset >= len(self._input):
self.stdin.close()
- write_set.remove(self.stdin)
+ self._write_set.remove(self.stdin)
if self.stdout in rlist:
data = os.read(self.stdout.fileno(), 1024)
if not data:
self.stdout.close()
- read_set.remove(self.stdout)
+ self._read_set.remove(self.stdout)
stdout.append(data)
if self.stderr in rlist:
data = os.read(self.stderr.fileno(), 1024)
if not data:
self.stderr.close()
- read_set.remove(self.stderr)
+ self._read_set.remove(self.stderr)
stderr.append(data)
return (stdout, stderr)
@@ -1559,68 +1751,3 @@ class Popen(object):
"""Kill the process with SIGKILL
"""
self.send_signal(signal.SIGKILL)
-
-
-def _demo_posix():
- #
- # Example 1: Simple redirection: Get process list
- #
- plist = Popen(["ps"], stdout=PIPE).communicate()[0]
- print("Process list:")
- print(plist)
-
- #
- # Example 2: Change uid before executing child
- #
- if os.getuid() == 0:
- p = Popen(["id"], preexec_fn=lambda: os.setuid(100))
- p.wait()
-
- #
- # Example 3: Connecting several subprocesses
- #
- print("Looking for 'hda'...")
- p1 = Popen(["dmesg"], stdout=PIPE)
- p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
- print(repr(p2.communicate()[0]))
-
- #
- # Example 4: Catch execution error
- #
- print()
- print("Trying a weird file...")
- try:
- print(Popen(["/this/path/does/not/exist"]).communicate())
- except OSError as e:
- if e.errno == errno.ENOENT:
- print("The file didn't exist. I thought so...")
- print("Child traceback:")
- print(e.child_traceback)
- else:
- print("Error", e.errno)
- else:
- print("Gosh. No error.", file=sys.stderr)
-
-
-def _demo_windows():
- #
- # Example 1: Connecting several subprocesses
- #
- print("Looking for 'PROMPT' in set output...")
- p1 = Popen("set", stdout=PIPE, shell=True)
- p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE)
- print(repr(p2.communicate()[0]))
-
- #
- # Example 2: Simple execution of program
- #
- print("Executing calc...")
- p = Popen("calc")
- p.wait()
-
-
-if __name__ == "__main__":
- if mswindows:
- _demo_windows()
- else:
- _demo_posix()
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index 0f9d1da..6b663f4 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -1084,7 +1084,7 @@ class TarInfo(object):
def create_pax_global_header(cls, pax_headers):
"""Return the object as a pax global header block sequence.
"""
- return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8")
+ return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
def _posix_split_name(self, name):
"""Split a name longer than 100 chars into a prefix
@@ -1167,7 +1167,7 @@ class TarInfo(object):
binary = False
for keyword, value in pax_headers.items():
try:
- value.encode("utf8", "strict")
+ value.encode("utf-8", "strict")
except UnicodeEncodeError:
binary = True
break
@@ -1178,13 +1178,13 @@ class TarInfo(object):
records += b"21 hdrcharset=BINARY\n"
for keyword, value in pax_headers.items():
- keyword = keyword.encode("utf8")
+ keyword = keyword.encode("utf-8")
if binary:
# Try to restore the original byte representation of `value'.
# Needless to say, that the encoding must match the string.
value = value.encode(encoding, "surrogateescape")
else:
- value = value.encode("utf8")
+ value = value.encode("utf-8")
l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
n = p = 0
@@ -1393,7 +1393,7 @@ class TarInfo(object):
# the translation to UTF-8 fails.
match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
if match is not None:
- pax_headers["hdrcharset"] = match.group(1).decode("utf8")
+ pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
# For the time being, we don't care about anything other than "BINARY".
# The only other value that is currently allowed by the standard is
@@ -1402,7 +1402,7 @@ class TarInfo(object):
if hdrcharset == "BINARY":
encoding = tarfile.encoding
else:
- encoding = "utf8"
+ encoding = "utf-8"
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
@@ -1419,20 +1419,20 @@ class TarInfo(object):
length = int(length)
value = buf[match.end(2) + 1:match.start(1) + length - 1]
- # Normally, we could just use "utf8" as the encoding and "strict"
+ # Normally, we could just use "utf-8" as the encoding and "strict"
# as the error handler, but we better not take the risk. For
# example, GNU tar <= 1.23 is known to store filenames it cannot
# translate to UTF-8 as raw strings (unfortunately without a
# hdrcharset=BINARY header).
# We first try the strict standard encoding, and if that fails we
# fall back on the user's encoding and error handler.
- keyword = self._decode_pax_field(keyword, "utf8", "utf8",
+ keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
tarfile.errors)
if keyword in PAX_NAME_FIELDS:
value = self._decode_pax_field(value, encoding, tarfile.encoding,
tarfile.errors)
else:
- value = self._decode_pax_field(value, "utf8", "utf8",
+ value = self._decode_pax_field(value, "utf-8", "utf-8",
tarfile.errors)
pax_headers[keyword] = value
diff --git a/Lib/test/crashers/README b/Lib/test/crashers/README
index 2a73e1b..0259a06 100644
--- a/Lib/test/crashers/README
+++ b/Lib/test/crashers/README
@@ -14,3 +14,7 @@ note if the cause is system or environment dependent and what the variables are.
Once the crash is fixed, the test case should be moved into an appropriate test
(even if it was originally from the test suite). This ensures the regression
doesn't happen again. And if it does, it should be easier to track down.
+
+Also see Lib/test_crashers.py which exercises the crashers in this directory.
+In particular, make sure to add any new infinite loop crashers to the black
+list so it doesn't try to run them.
diff --git a/Lib/test/crashers/compiler_recursion.py b/Lib/test/crashers/compiler_recursion.py
index 4954bdd..31f28a9 100644
--- a/Lib/test/crashers/compiler_recursion.py
+++ b/Lib/test/crashers/compiler_recursion.py
@@ -1,5 +1,13 @@
"""
-The compiler (>= 2.5) recurses happily.
+The compiler (>= 2.5) recurses happily until it blows the stack.
+
+Recorded on the tracker as http://bugs.python.org/issue11383
"""
-compile('()'*9**5, '?', 'exec')
+# The variant below blows up in compiler_call, but there are assorted
+# other variations that blow up in other functions
+# e.g. '1*'*10**5+'1' will die in compiler_visit_expr
+
+# The exact limit to destroy the stack will vary by platform
+# but 10M should do the trick even with huge stack allocations
+compile('()'*10**7, '?', 'exec')
diff --git a/Lib/test/test_future1.py b/Lib/test/future_test1.py
index 297c2e0..297c2e0 100644
--- a/Lib/test/test_future1.py
+++ b/Lib/test/future_test1.py
diff --git a/Lib/test/test_future2.py b/Lib/test/future_test2.py
index 3d7fc86..3d7fc86 100644
--- a/Lib/test/test_future2.py
+++ b/Lib/test/future_test2.py
diff --git a/Lib/test/list_tests.py b/Lib/test/list_tests.py
index e3a7845..0c656fd 100644
--- a/Lib/test/list_tests.py
+++ b/Lib/test/list_tests.py
@@ -425,6 +425,47 @@ class CommonTest(seq_tests.CommonTest):
self.assertRaises(TypeError, u.reverse, 42)
+ def test_clear(self):
+ u = self.type2test([2, 3, 4])
+ u.clear()
+ self.assertEqual(u, [])
+
+ u = self.type2test([])
+ u.clear()
+ self.assertEqual(u, [])
+
+ u = self.type2test([])
+ u.append(1)
+ u.clear()
+ u.append(2)
+ self.assertEqual(u, [2])
+
+ self.assertRaises(TypeError, u.clear, None)
+
+ def test_copy(self):
+ u = self.type2test([1, 2, 3])
+ v = u.copy()
+ self.assertEqual(v, [1, 2, 3])
+
+ u = self.type2test([])
+ v = u.copy()
+ self.assertEqual(v, [])
+
+ # test that it's indeed a copy and not a reference
+ u = self.type2test(['a', 'b'])
+ v = u.copy()
+ v.append('i')
+ self.assertEqual(u, ['a', 'b'])
+ self.assertEqual(v, u + ['i'])
+
+ # test that it's a shallow, not a deep copy
+ u = self.type2test([1, 2, [3, 4], 5])
+ v = u.copy()
+ self.assertEqual(u, v)
+ self.assertIs(v[3], u[3])
+
+ self.assertRaises(TypeError, u.copy, None)
+
def test_sort(self):
u = self.type2test([1, 0])
u.sort()
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
index a843486..45e8f74 100644
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -3,9 +3,10 @@ import unittest
import pickle
import pickletools
import copyreg
+import weakref
from http.cookies import SimpleCookie
-from test.support import TestFailed, TESTFN, run_with_locale
+from test.support import TestFailed, TESTFN, run_with_locale, no_tracing
from pickle import bytes_types
@@ -842,6 +843,25 @@ class AbstractPickleTests(unittest.TestCase):
self.assertEqual(B(x), B(y), detail)
self.assertEqual(x.__dict__, y.__dict__, detail)
+ def test_newobj_proxies(self):
+ # NEWOBJ should use the __class__ rather than the raw type
+ classes = myclasses[:]
+ # Cannot create weakproxies to these classes
+ for c in (MyInt, MyTuple):
+ classes.remove(c)
+ for proto in protocols:
+ for C in classes:
+ B = C.__base__
+ x = C(C.sample)
+ x.foo = 42
+ p = weakref.proxy(x)
+ s = self.dumps(p, proto)
+ y = self.loads(s)
+ self.assertEqual(type(y), type(x)) # rather than type(p)
+ detail = (proto, C, B, x, y, type(y))
+ self.assertEqual(B(x), B(y), detail)
+ self.assertEqual(x.__dict__, y.__dict__, detail)
+
# Register a type with copyreg, with extension code extcode. Pickle
# an object of that type. Check that the resulting pickle uses opcode
# (EXT[124]) under proto 2, and not in proto 1.
@@ -1002,13 +1022,13 @@ class AbstractPickleTests(unittest.TestCase):
y = self.loads(s)
self.assertEqual(y._reduce_called, 1)
+ @no_tracing
def test_bad_getattr(self):
x = BadGetattr()
for proto in 0, 1:
self.assertRaises(RuntimeError, self.dumps, x, proto)
# protocol 2 don't raise a RuntimeError.
d = self.dumps(x, 2)
- self.assertRaises(RuntimeError, self.loads, d)
def test_reduce_bad_iterator(self):
# Issue4176: crash when 4th and 5th items of __reduce__()
diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py
index 89acb9b..dca0b4e 100755
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -20,6 +20,10 @@ python -E -Wd -m test [options] [test_name1 ...]
Options:
-h/--help -- print this text and exit
+--timeout TIMEOUT
+ -- dump the traceback and exit if a test takes more
+ than TIMEOUT seconds (default: 30 minutes); disable
+ the timeout if TIMEOUT is zero
Verbosity
@@ -42,6 +46,9 @@ Selecting tests
-- specify which special resource intensive tests to run
-M/--memlimit LIMIT
-- run very large memory-consuming tests
+ --testdir DIR
+ -- execute test files in the specified directory (instead
+ of the Python stdlib test suite)
Special runs
@@ -154,6 +161,7 @@ option '-uall,-gui'.
"""
import builtins
+import faulthandler
import getopt
import json
import os
@@ -212,6 +220,7 @@ ENV_CHANGED = -1
SKIPPED = -2
RESOURCE_DENIED = -3
INTERRUPTED = -4
+CHILD_ERROR = -5 # error in a child process
from test import support
@@ -254,6 +263,10 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
directly to set the values that would normally be set by flags
on the command line.
"""
+ if hasattr(faulthandler, 'dump_tracebacks_later'):
+ timeout = 60*60
+ else:
+ timeout = None
replace_stdout()
@@ -265,7 +278,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
'use=', 'threshold=', 'trace', 'coverdir=', 'nocoverdir',
'runleaks', 'huntrleaks=', 'memlimit=', 'randseed=',
'multiprocess=', 'coverage', 'slaveargs=', 'forever', 'debug',
- 'start=', 'nowindows', 'header'])
+ 'start=', 'nowindows', 'header', 'testdir=', 'timeout='])
except getopt.error as msg:
usage(msg)
@@ -315,7 +328,9 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
elif o in ('-T', '--coverage'):
trace = True
elif o in ('-D', '--coverdir'):
- coverdir = os.path.join(os.getcwd(), a)
+ # CWD is replaced with a temporary dir before calling main(), so we
+ # need join it with the saved CWD so it goes where the user expects.
+ coverdir = os.path.join(support.SAVEDCWD, a)
elif o in ('-N', '--nocoverdir'):
coverdir = None
elif o in ('-R', '--huntrleaks'):
@@ -374,6 +389,13 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
forever = True
elif o in ('-j', '--multiprocess'):
use_mp = int(a)
+ if use_mp <= 0:
+ try:
+ import multiprocessing
+ # Use all cores + extras for tests that like to sleep
+ use_mp = 2 + multiprocessing.cpu_count()
+ except (ImportError, NotImplementedError):
+ use_mp = 3
elif o == '--header':
header = True
elif o == '--slaveargs':
@@ -386,6 +408,16 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
print() # Force a newline (just in case)
print(json.dumps(result))
sys.exit(0)
+ elif o == '--testdir':
+ # CWD is replaced with a temporary dir before calling main(), so we
+ # join it with the saved CWD so it ends up where the user expects.
+ testdir = os.path.join(support.SAVEDCWD, a)
+ elif o == '--timeout':
+ if not hasattr(faulthandler, 'dump_tracebacks_later'):
+ print("--timeout option requires "
+ "faulthandler.dump_tracebacks_later", file=sys.stderr)
+ sys.exit(1)
+ timeout = float(a)
else:
print(("No handler for option {}. Please report this as a bug "
"at http://bugs.python.org.").format(o), file=sys.stderr)
@@ -460,7 +492,13 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
print("== ", os.getcwd())
print("Testing with flags:", sys.flags)
- alltests = findtests(testdir, stdtests, nottests)
+ # if testdir is set, then we are not running the python tests suite, so
+ # don't add default tests to be executed or skipped (pass empty values)
+ if testdir:
+ alltests = findtests(testdir, list(), set())
+ else:
+ alltests = findtests(testdir, stdtests, nottests)
+
selected = tests or args or alltests
if single:
selected = selected[:1]
@@ -535,7 +573,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
args_tuple = (
(test, verbose, quiet),
dict(huntrleaks=huntrleaks, use_resources=use_resources,
- debug=debug, rerun_failed=verbose3)
+ debug=debug, rerun_failed=verbose3, timeout=timeout)
)
yield (test, args_tuple)
pending = tests_and_args()
@@ -556,10 +594,15 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
universal_newlines=True,
close_fds=(os.name != 'nt'))
stdout, stderr = popen.communicate()
+ retcode = popen.wait()
# Strip last refcount output line if it exists, since it
# comes from the shutdown of the interpreter in the subcommand.
stderr = debug_output_pat.sub("", stderr)
stdout, _, result = stdout.strip().rpartition("\n")
+ if retcode != 0:
+ result = (CHILD_ERROR, "Exit code %s" % retcode)
+ output.put((test, stdout.rstrip(), stderr.rstrip(), result))
+ return
if not result:
output.put((None, None, None, None))
return
@@ -589,6 +632,8 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
if result[0] == INTERRUPTED:
assert result[1] == 'KeyboardInterrupt'
raise KeyboardInterrupt # What else?
+ if result[0] == CHILD_ERROR:
+ raise Exception("Child error: {}".format(result[1]))
accumulate_result(test, result)
test_index += 1
except KeyboardInterrupt:
@@ -605,12 +650,12 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
if trace:
# If we're tracing code coverage, then we don't exit with status
# if on a false return value from main.
- tracer.runctx('runtest(test, verbose, quiet)',
+ tracer.runctx('runtest(test, verbose, quiet, timeout=timeout)',
globals=globals(), locals=vars())
else:
try:
result = runtest(test, verbose, quiet, huntrleaks, debug,
- rerun_failed=verbose3)
+ rerun_failed=verbose3, timeout=timeout)
accumulate_result(test, result)
except KeyboardInterrupt:
interrupted = True
@@ -681,7 +726,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
sys.stdout.flush()
try:
verbose = True
- ok = runtest(test, True, quiet, huntrleaks, debug)
+ ok = runtest(test, True, quiet, huntrleaks, debug, timeout=timeout)
except KeyboardInterrupt:
# print a newline separate from the ^C
print()
@@ -706,6 +751,8 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
sys.exit(len(bad) > 0 or interrupted)
+# small set of tests to determine if we have a basically functioning interpreter
+# (i.e. if any of these fail, then anything else is likely to follow)
STDTESTS = [
'test_grammar',
'test_opcodes',
@@ -718,10 +765,8 @@ STDTESTS = [
'test_doctest2',
]
-NOTTESTS = {
- 'test_future1',
- 'test_future2',
-}
+# set of tests that we don't want to be executed when using regrtest
+NOTTESTS = set()
def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS):
"""Return a list of all applicable test modules."""
@@ -730,9 +775,9 @@ def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS):
tests = []
others = set(stdtests) | nottests
for name in names:
- modname, ext = os.path.splitext(name)
- if modname[:5] == "test_" and ext == ".py" and modname not in others:
- tests.append(modname)
+ mod, ext = os.path.splitext(name)
+ if mod[:5] == "test_" and ext in (".py", "") and mod not in others:
+ tests.append(mod)
return stdtests + sorted(tests)
def replace_stdout():
@@ -757,7 +802,7 @@ def replace_stdout():
def runtest(test, verbose, quiet,
huntrleaks=False, debug=False, use_resources=None,
- rerun_failed=False):
+ rerun_failed=False, timeout=None):
"""Run a single test.
test -- the name of the test
@@ -767,6 +812,8 @@ def runtest(test, verbose, quiet,
huntrleaks -- run multiple times to test for leaks; requires a debug
build; a triple corresponding to -R's three arguments
rerun_failed -- if true, re-run in verbose mode when failed
+ timeout -- dump the traceback and exit if a test takes more than
+ timeout seconds
Returns one of the test result constants:
INTERRUPTED KeyboardInterrupt when run under -j
@@ -780,6 +827,9 @@ def runtest(test, verbose, quiet,
support.verbose = verbose # Tell tests to be moderately quiet
if use_resources is not None:
support.use_resources = use_resources
+ use_timeout = (timeout is not None and timeout > 0)
+ if use_timeout:
+ faulthandler.dump_tracebacks_later(timeout, exit=True)
try:
result = runtest_inner(test, verbose, quiet, huntrleaks, debug)
if result[0] == FAILED and rerun_failed:
@@ -787,9 +837,11 @@ def runtest(test, verbose, quiet,
sys.stdout.flush()
sys.stderr.flush()
print("Re-running test {} in verbose mode".format(test))
- runtest(test, True, quiet, huntrleaks, debug)
+ runtest(test, True, quiet, huntrleaks, debug, timeout=timeout)
return result
finally:
+ if use_timeout:
+ faulthandler.cancel_dump_tracebacks_later()
cleanup_test_droppings(test, verbose)
# Unit tests are supposed to leave the execution environment unchanged
@@ -834,7 +886,7 @@ class saved_test_environment:
resources = ('sys.argv', 'cwd', 'sys.stdin', 'sys.stdout', 'sys.stderr',
'os.environ', 'sys.path', 'sys.path_hooks', '__import__',
'warnings.filters', 'asyncore.socket_map',
- 'logging._handlers', 'logging._handlerList',
+ 'logging._handlers', 'logging._handlerList', 'sys.gettrace',
'sys.warnoptions')
def get_sys_argv(self):
@@ -882,6 +934,11 @@ class saved_test_environment:
sys.path_hooks = saved_hooks[1]
sys.path_hooks[:] = saved_hooks[2]
+ def get_sys_gettrace(self):
+ return sys.gettrace()
+ def restore_sys_gettrace(self, trace_fxn):
+ sys.settrace(trace_fxn)
+
def get___import__(self):
return builtins.__import__
def restore___import__(self, import_):
@@ -1062,7 +1119,8 @@ def dash_R(the_module, test, indirect_test, huntrleaks):
False if the test didn't leak references; True if we detected refleaks.
"""
# This code is hackish and inelegant, but it seems to do the job.
- import copyreg, _abcoll
+ import copyreg
+ import collections.abc
if not hasattr(sys, 'gettotalrefcount'):
raise Exception("Tracking reference leaks requires a debug build "
@@ -1079,7 +1137,7 @@ def dash_R(the_module, test, indirect_test, huntrleaks):
else:
zdc = zipimport._zip_directory_cache.copy()
abcs = {}
- for abc in [getattr(_abcoll, a) for a in _abcoll.__all__]:
+ for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
if not isabstract(abc):
continue
for obj in abc.__subclasses__() + [abc]:
@@ -1125,7 +1183,7 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs):
import gc, copyreg
import _strptime, linecache
import urllib.parse, urllib.request, mimetypes, doctest
- import struct, filecmp, _abcoll
+ import struct, filecmp, collections.abc
from distutils.dir_util import _path_created
from weakref import WeakSet
@@ -1152,7 +1210,7 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs):
sys._clear_type_cache()
# Clear ABC registries, restoring previously saved ABC registries.
- for abc in [getattr(_abcoll, a) for a in _abcoll.__all__]:
+ for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
if not isabstract(abc):
continue
for obj in abc.__subclasses__() + [abc]:
@@ -1523,6 +1581,9 @@ def _make_temp_dir_for_build(TEMPDIR):
return TEMPDIR, TESTCWD
if __name__ == '__main__':
+ # Display the Python traceback on segfault and division by zero
+ faulthandler.enable()
+
# Remove regrtest.py's own directory from the module search path. Despite
# the elimination of implicit relative imports, this is still needed to
# ensure that submodules of the test package do not inappropriately appear
diff --git a/Lib/test/script_helper.py b/Lib/test/script_helper.py
index 371c33d..e556eca 100644
--- a/Lib/test/script_helper.py
+++ b/Lib/test/script_helper.py
@@ -56,11 +56,12 @@ def assert_python_failure(*args, **env_vars):
"""
return _assert_python(False, *args, **env_vars)
-def spawn_python(*args):
+def spawn_python(*args, **kw):
cmd_line = [sys.executable, '-E']
cmd_line.extend(args)
return subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ **kw)
def kill_python(p):
p.stdin.close()
diff --git a/Lib/test/support.py b/Lib/test/support.py
index 8d8e187..fbf6de4 100644
--- a/Lib/test/support.py
+++ b/Lib/test/support.py
@@ -15,7 +15,7 @@ import shutil
import warnings
import unittest
import importlib
-import collections
+import collections.abc
import re
import subprocess
import imp
@@ -33,7 +33,7 @@ __all__ = [
"verbose", "use_resources", "max_memuse", "record_original_stdout",
"get_original_stdout", "unload", "unlink", "rmtree", "forget",
"is_resource_enabled", "requires", "find_unused_port", "bind_port",
- "fcmp", "is_jython", "TESTFN", "HOST", "FUZZ", "SAVEDCWD", "temp_cwd",
+ "is_jython", "TESTFN", "HOST", "SAVEDCWD", "temp_cwd",
"findfile", "sortdict", "check_syntax_error", "open_urlresource",
"check_warnings", "CleanImport", "EnvironmentVarGuard",
"TransientResource", "captured_output", "captured_stdout",
@@ -381,24 +381,6 @@ def bind_port(sock, host=HOST):
port = sock.getsockname()[1]
return port
-FUZZ = 1e-6
-
-def fcmp(x, y): # fuzzy comparison function
- if isinstance(x, float) or isinstance(y, float):
- try:
- fuzz = (abs(x) + abs(y)) * FUZZ
- if abs(x-y) <= fuzz:
- return 0
- except:
- pass
- elif type(x) == type(y) and isinstance(x, (tuple, list)):
- for i in range(min(len(x), len(y))):
- outcome = fcmp(x[i], y[i])
- if outcome != 0:
- return outcome
- return (len(x) > len(y)) - (len(x) < len(y))
- return (x > y) - (x < y)
-
# decorator for skipping tests on non-IEEE 754 platforms
requires_IEEE_754 = unittest.skipUnless(
float.__getformat__("double").startswith("IEEE"),
@@ -714,7 +696,7 @@ class CleanImport(object):
sys.modules.update(self.original_modules)
-class EnvironmentVarGuard(collections.MutableMapping):
+class EnvironmentVarGuard(collections.abc.MutableMapping):
"""Class to help protect the environment variable properly. Can be used as
a context manager."""
@@ -1047,6 +1029,11 @@ def bigmemtest(minsize, memuse):
return decorator
def precisionbigmemtest(size, memuse):
+ """Decorator for bigmem tests that need exact sizes.
+
+ Like bigmemtest, but without the size scaling upward to fill available
+ memory.
+ """
def decorator(f):
def wrapper(self):
size = wrapper.size
@@ -1142,6 +1129,32 @@ def check_impl_detail(**guards):
return guards.get(platform.python_implementation().lower(), default)
+def no_tracing(func):
+ """Decorator to temporarily turn off tracing for the duration of a test."""
+ if not hasattr(sys, 'gettrace'):
+ return func
+ else:
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ original_trace = sys.gettrace()
+ try:
+ sys.settrace(None)
+ return func(*args, **kwargs)
+ finally:
+ sys.settrace(original_trace)
+ return wrapper
+
+
+def refcount_test(test):
+ """Decorator for tests which involve reference counting.
+
+ To start, the decorator does not run the test if is not run by CPython.
+ After that, any trace function is unset during the test to prevent
+ unexpected refcounts caused by the trace function.
+
+ """
+ return no_tracing(cpython_only(test))
+
def _run_suite(suite):
"""Run tests from a unittest.TestSuite-derived class."""
@@ -1366,7 +1379,7 @@ def strip_python_stderr(stderr):
def args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
- settings in sys.flags."""
+ settings in sys.flags and sys.warnoptions."""
flag_opt_map = {
'bytes_warning': 'b',
'dont_write_bytecode': 'B',
@@ -1381,6 +1394,8 @@ def args_from_interpreter_flags():
v = getattr(sys.flags, flag)
if v > 0:
args.append('-' + opt * v)
+ for opt in sys.warnoptions:
+ args.append('-W' + opt)
return args
#============================================================
@@ -1454,9 +1469,11 @@ def can_symlink():
global _can_symlink
if _can_symlink is not None:
return _can_symlink
+ symlink_path = TESTFN + "can_symlink"
try:
- os.symlink(TESTFN, TESTFN + "can_symlink")
+ os.symlink(TESTFN, symlink_path)
can = True
+ os.remove(symlink_path)
except (OSError, NotImplementedError):
can = False
_can_symlink = can
diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py
index d86f97c..1319a64 100644
--- a/Lib/test/test_abc.py
+++ b/Lib/test/test_abc.py
@@ -121,11 +121,32 @@ class TestABC(unittest.TestCase):
self.assertFalse(issubclass(B, (A,)))
self.assertNotIsInstance(b, A)
self.assertNotIsInstance(b, (A,))
- A.register(B)
+ B1 = A.register(B)
+ self.assertTrue(issubclass(B, A))
+ self.assertTrue(issubclass(B, (A,)))
+ self.assertIsInstance(b, A)
+ self.assertIsInstance(b, (A,))
+ self.assertIs(B1, B)
+ class C(B):
+ pass
+ c = C()
+ self.assertTrue(issubclass(C, A))
+ self.assertTrue(issubclass(C, (A,)))
+ self.assertIsInstance(c, A)
+ self.assertIsInstance(c, (A,))
+
+ def test_register_as_class_deco(self):
+ class A(metaclass=abc.ABCMeta):
+ pass
+ @A.register
+ class B(object):
+ pass
+ b = B()
self.assertTrue(issubclass(B, A))
self.assertTrue(issubclass(B, (A,)))
self.assertIsInstance(b, A)
self.assertIsInstance(b, (A,))
+ @A.register
class C(B):
pass
c = C()
@@ -133,6 +154,7 @@ class TestABC(unittest.TestCase):
self.assertTrue(issubclass(C, (A,)))
self.assertIsInstance(c, A)
self.assertIsInstance(c, (A,))
+ self.assertIs(C, A.register(C))
def test_isinstance_invalidation(self):
class A(metaclass=abc.ABCMeta):
diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py
index 5ecfdc7..2836e7e 100644
--- a/Lib/test/test_argparse.py
+++ b/Lib/test/test_argparse.py
@@ -4017,6 +4017,37 @@ class TestHelpSubparsersWithHelpOrdering(HelpTestCase):
'''
+
+class TestHelpMetavarTypeFormatter(HelpTestCase):
+ """"""
+
+ def custom_type(string):
+ return string
+
+ parser_signature = Sig(prog='PROG', description='description',
+ formatter_class=argparse.MetavarTypeHelpFormatter)
+ argument_signatures = [Sig('a', type=int),
+ Sig('-b', type=custom_type),
+ Sig('-c', type=float, metavar='SOME FLOAT')]
+ argument_group_signatures = []
+ usage = '''\
+ usage: PROG [-h] [-b custom_type] [-c SOME FLOAT] int
+ '''
+ help = usage + '''\
+
+ description
+
+ positional arguments:
+ int
+
+ optional arguments:
+ -h, --help show this help message and exit
+ -b custom_type
+ -c SOME FLOAT
+ '''
+ version = ''
+
+
# =====================================
# Optional/Positional constructor tests
# =====================================
@@ -4407,7 +4438,7 @@ class TestEncoding(TestCase):
def _test_module_encoding(self, path):
path, _ = os.path.splitext(path)
path += ".py"
- with codecs.open(path, 'r', 'utf8') as f:
+ with codecs.open(path, 'r', 'utf-8') as f:
f.read()
def test_argparse_module_encoding(self):
diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py
index 53c49a8..389dc8a 100644
--- a/Lib/test/test_asyncore.py
+++ b/Lib/test/test_asyncore.py
@@ -352,7 +352,7 @@ class DispatcherWithSendTests(unittest.TestCase):
@support.reap_threads
def test_send(self):
evt = threading.Event()
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock = socket.socket()
sock.settimeout(3)
port = support.bind_port(sock)
@@ -367,7 +367,7 @@ class DispatcherWithSendTests(unittest.TestCase):
data = b"Suppose there isn't a 16-ton weight?"
d = dispatcherwithsend_noread()
- d.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ d.create_socket()
d.connect((HOST, port))
# give time for socket to connect
@@ -474,7 +474,7 @@ class TCPServer(asyncore.dispatcher):
def __init__(self, handler=BaseTestHandler, host=HOST, port=0):
asyncore.dispatcher.__init__(self)
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.create_socket()
self.set_reuse_addr()
self.bind((host, port))
self.listen(5)
@@ -495,7 +495,7 @@ class BaseClient(BaseTestHandler):
def __init__(self, address):
BaseTestHandler.__init__(self)
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.create_socket()
self.connect(address)
def handle_connect(self):
@@ -536,7 +536,7 @@ class BaseTestAPI(unittest.TestCase):
def __init__(self):
BaseTestHandler.__init__(self)
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.create_socket()
self.bind((HOST, 0))
self.listen(5)
self.address = self.socket.getsockname()[:2]
@@ -555,7 +555,7 @@ class BaseTestAPI(unittest.TestCase):
def __init__(self):
BaseTestHandler.__init__(self)
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.create_socket()
self.bind((HOST, 0))
self.listen(5)
self.address = self.socket.getsockname()[:2]
@@ -693,20 +693,20 @@ class BaseTestAPI(unittest.TestCase):
def test_create_socket(self):
s = asyncore.dispatcher()
- s.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.create_socket()
self.assertEqual(s.socket.family, socket.AF_INET)
SOCK_NONBLOCK = getattr(socket, 'SOCK_NONBLOCK', 0)
self.assertEqual(s.socket.type, socket.SOCK_STREAM | SOCK_NONBLOCK)
def test_bind(self):
s1 = asyncore.dispatcher()
- s1.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ s1.create_socket()
s1.bind((HOST, 0))
s1.listen(5)
port = s1.socket.getsockname()[1]
s2 = asyncore.dispatcher()
- s2.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ s2.create_socket()
# EADDRINUSE indicates the socket was correctly bound
self.assertRaises(socket.error, s2.bind, (HOST, port))
@@ -723,7 +723,7 @@ class BaseTestAPI(unittest.TestCase):
self.assertFalse(s.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR))
s.socket.close()
- s.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.create_socket()
s.set_reuse_addr()
self.assertTrue(s.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR))
diff --git a/Lib/test/test_bigmem.py b/Lib/test/test_bigmem.py
index ac6b109..91c62af 100644
--- a/Lib/test/test_bigmem.py
+++ b/Lib/test/test_bigmem.py
@@ -1,3 +1,13 @@
+"""Bigmem tests - tests for the 32-bit boundary in containers.
+
+These tests try to exercise the 32-bit boundary that is sometimes, if
+rarely, exceeded in practice, but almost never tested. They are really only
+meaningful on 64-bit builds on machines with a *lot* of memory, but the
+tests are always run, usually with very low memory limits to make sure the
+tests themselves don't suffer from bitrot. To run them for real, pass a
+high memory limit to regrtest, with the -M option.
+"""
+
from test import support
from test.support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest
@@ -6,30 +16,45 @@ import operator
import sys
import functools
+# These tests all use one of the bigmemtest decorators to indicate how much
+# memory they use and how much memory they need to be even meaningful. The
+# decorators take two arguments: a 'memuse' indicator declaring
+# (approximate) bytes per size-unit the test will use (at peak usage), and a
+# 'minsize' indicator declaring a minimum *useful* size. A test that
+# allocates a bytestring to test various operations near the end will have a
+# minsize of at least 2Gb (or it wouldn't reach the 32-bit limit, so the
+# test wouldn't be very useful) and a memuse of 1 (one byte per size-unit,
+# if it allocates only one big string at a time.)
+#
+# When run with a memory limit set, both decorators skip tests that need
+# more memory than available to be meaningful. The precisionbigmemtest will
+# always pass minsize as size, even if there is much more memory available.
+# The bigmemtest decorator will scale size upward to fill available memory.
+#
# Bigmem testing houserules:
#
# - Try not to allocate too many large objects. It's okay to rely on
-# refcounting semantics, but don't forget that 's = create_largestring()'
+# refcounting semantics, and don't forget that 's = create_largestring()'
# doesn't release the old 's' (if it exists) until well after its new
# value has been created. Use 'del s' before the create_largestring call.
#
-# - Do *not* compare large objects using assertEqual or similar. It's a
-# lengthy operation and the errormessage will be utterly useless due to
-# its size. To make sure whether a result has the right contents, better
-# to use the strip or count methods, or compare meaningful slices.
+# - Do *not* compare large objects using assertEqual, assertIn or similar.
+# It's a lengthy operation and the errormessage will be utterly useless
+# due to its size. To make sure whether a result has the right contents,
+# better to use the strip or count methods, or compare meaningful slices.
#
# - Don't forget to test for large indices, offsets and results and such,
-# in addition to large sizes.
+# in addition to large sizes. Anything that probes the 32-bit boundary.
#
# - When repeating an object (say, a substring, or a small list) to create
# a large object, make the subobject of a length that is not a power of
# 2. That way, int-wrapping problems are more easily detected.
#
-# - While the bigmemtest decorator speaks of 'minsize', all tests will
-# actually be called with a much smaller number too, in the normal
-# test run (5Kb currently.) This is so the tests themselves get frequent
-# testing. Consequently, always make all large allocations based on the
-# passed-in 'size', and don't rely on the size being very large. Also,
+# - While the bigmem decorators speak of 'minsize', all tests will actually
+# be called with a much smaller number too, in the normal test run (5Kb
+# currently.) This is so the tests themselves get frequent testing.
+# Consequently, always make all large allocations based on the passed-in
+# 'size', and don't rely on the size being very large. Also,
# memuse-per-size should remain sane (less than a few thousand); if your
# test uses more, adjust 'size' upward, instead.
@@ -92,7 +117,7 @@ class BaseStrTest:
_ = self.from_latin1
s = _('-') * size
tabsize = 8
- self.assertEqual(s.expandtabs(), s)
+ self.assertTrue(s.expandtabs() == s)
del s
slen, remainder = divmod(size, tabsize)
s = _(' \t') * slen
@@ -519,19 +544,19 @@ class BaseStrTest:
edge = _('-') * (size // 2)
s = _('').join([edge, SUBSTR, edge])
del edge
- self.assertIn(SUBSTR, s)
- self.assertNotIn(SUBSTR * 2, s)
- self.assertIn(_('-'), s)
- self.assertNotIn(_('a'), s)
+ self.assertTrue(SUBSTR in s)
+ self.assertFalse(SUBSTR * 2 in s)
+ self.assertTrue(_('-') in s)
+ self.assertFalse(_('a') in s)
s += _('a')
- self.assertIn(_('a'), s)
+ self.assertTrue(_('a') in s)
@bigmemtest(minsize=_2G + 10, memuse=2)
def test_compare(self, size):
_ = self.from_latin1
s1 = _('-') * size
s2 = _('-') * size
- self.assertEqual(s1, s2)
+ self.assertTrue(s1 == s2)
del s2
s2 = s1 + _('a')
self.assertFalse(s1 == s2)
@@ -552,7 +577,7 @@ class BaseStrTest:
h1 = hash(s)
del s
s = _('\x00') * (size + 1)
- self.assertFalse(h1 == hash(s))
+ self.assertNotEqual(h1, hash(s))
class StrTest(unittest.TestCase, BaseStrTest):
@@ -633,7 +658,7 @@ class StrTest(unittest.TestCase, BaseStrTest):
def test_format(self, size):
s = '-' * size
sf = '%s' % (s,)
- self.assertEqual(s, sf)
+ self.assertTrue(s == sf)
del sf
sf = '..%s..' % (s,)
self.assertEqual(len(sf), len(s) + 4)
@@ -707,7 +732,7 @@ class StrTest(unittest.TestCase, BaseStrTest):
class BytesTest(unittest.TestCase, BaseStrTest):
def from_latin1(self, s):
- return s.encode("latin1")
+ return s.encode("latin-1")
@bigmemtest(minsize=_2G + 2, memuse=1 + character_size)
def test_decode(self, size):
@@ -718,7 +743,7 @@ class BytesTest(unittest.TestCase, BaseStrTest):
class BytearrayTest(unittest.TestCase, BaseStrTest):
def from_latin1(self, s):
- return bytearray(s.encode("latin1"))
+ return bytearray(s.encode("latin-1"))
@bigmemtest(minsize=_2G + 2, memuse=1 + character_size)
def test_decode(self, size):
@@ -743,7 +768,7 @@ class TupleTest(unittest.TestCase):
def test_compare(self, size):
t1 = ('',) * size
t2 = ('',) * size
- self.assertEqual(t1, t2)
+ self.assertTrue(t1 == t2)
del t2
t2 = ('',) * (size + 1)
self.assertFalse(t1 == t2)
@@ -774,9 +799,9 @@ class TupleTest(unittest.TestCase):
def test_contains(self, size):
t = (1, 2, 3, 4, 5) * size
self.assertEqual(len(t), size * 5)
- self.assertIn(5, t)
- self.assertNotIn((1, 2, 3, 4, 5), t)
- self.assertNotIn(0, t)
+ self.assertTrue(5 in t)
+ self.assertFalse((1, 2, 3, 4, 5) in t)
+ self.assertFalse(0 in t)
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_hash(self, size):
@@ -879,7 +904,7 @@ class ListTest(unittest.TestCase):
def test_compare(self, size):
l1 = [''] * size
l2 = [''] * size
- self.assertEqual(l1, l2)
+ self.assertTrue(l1 == l2)
del l2
l2 = [''] * (size + 1)
self.assertFalse(l1 == l2)
@@ -925,9 +950,9 @@ class ListTest(unittest.TestCase):
def test_contains(self, size):
l = [1, 2, 3, 4, 5] * size
self.assertEqual(len(l), size * 5)
- self.assertIn(5, l)
- self.assertNotIn([1, 2, 3, 4, 5], l)
- self.assertNotIn(0, l)
+ self.assertTrue(5 in l)
+ self.assertFalse([1, 2, 3, 4, 5] in l)
+ self.assertFalse(0 in l)
@bigmemtest(minsize=_2G + 10, memuse=8)
def test_hash(self, size):
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index 1469e36..b094a65 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -10,7 +10,7 @@ import ast
import types
import builtins
import random
-from test.support import fcmp, TESTFN, unlink, run_unittest, check_warnings
+from test.support import TESTFN, unlink, run_unittest, check_warnings
from operator import neg
@@ -394,10 +394,13 @@ class BuiltinTest(unittest.TestCase):
self.assertEqual(divmod(-sys.maxsize-1, -1), (sys.maxsize+1, 0))
- self.assertTrue(not fcmp(divmod(3.25, 1.0), (3.0, 0.25)))
- self.assertTrue(not fcmp(divmod(-3.25, 1.0), (-4.0, 0.75)))
- self.assertTrue(not fcmp(divmod(3.25, -1.0), (-4.0, -0.75)))
- self.assertTrue(not fcmp(divmod(-3.25, -1.0), (3.0, -0.25)))
+ for num, denom, exp_result in [ (3.25, 1.0, (3.0, 0.25)),
+ (-3.25, 1.0, (-4.0, 0.75)),
+ (3.25, -1.0, (-4.0, -0.75)),
+ (-3.25, -1.0, (3.0, -0.25))]:
+ result = divmod(num, denom)
+ self.assertAlmostEqual(result[0], exp_result[0])
+ self.assertAlmostEqual(result[1], exp_result[1])
self.assertRaises(TypeError, divmod)
@@ -1276,14 +1279,14 @@ class BuiltinTest(unittest.TestCase):
# --------------------------------------------------------------------
# Issue #7994: object.__format__ with a non-empty format string is
- # pending deprecated
+ # deprecated
def test_deprecated_format_string(obj, fmt_str, should_raise_warning):
with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always", PendingDeprecationWarning)
+ warnings.simplefilter("always", DeprecationWarning)
format(obj, fmt_str)
if should_raise_warning:
self.assertEqual(len(w), 1)
- self.assertIsInstance(w[0].message, PendingDeprecationWarning)
+ self.assertIsInstance(w[0].message, DeprecationWarning)
self.assertIn('object.__format__ with a non-empty format '
'string', str(w[0].message))
else:
diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py
index 9be1008..0b70c3a 100644
--- a/Lib/test/test_bytes.py
+++ b/Lib/test/test_bytes.py
@@ -188,24 +188,26 @@ class BaseBytesTest(unittest.TestCase):
def test_encoding(self):
sample = "Hello world\n\u1234\u5678\u9abc"
- for enc in ("utf8", "utf16"):
+ for enc in ("utf-8", "utf-16"):
b = self.type2test(sample, enc)
self.assertEqual(b, self.type2test(sample.encode(enc)))
- self.assertRaises(UnicodeEncodeError, self.type2test, sample, "latin1")
- b = self.type2test(sample, "latin1", "ignore")
+ self.assertRaises(UnicodeEncodeError, self.type2test, sample, "latin-1")
+ b = self.type2test(sample, "latin-1", "ignore")
self.assertEqual(b, self.type2test(sample[:-3], "utf-8"))
def test_decode(self):
sample = "Hello world\n\u1234\u5678\u9abc\def0\def0"
- for enc in ("utf8", "utf16"):
+ for enc in ("utf-8", "utf-16"):
b = self.type2test(sample, enc)
self.assertEqual(b.decode(enc), sample)
sample = "Hello world\n\x80\x81\xfe\xff"
- b = self.type2test(sample, "latin1")
- self.assertRaises(UnicodeDecodeError, b.decode, "utf8")
- self.assertEqual(b.decode("utf8", "ignore"), "Hello world\n")
- self.assertEqual(b.decode(errors="ignore", encoding="utf8"),
+ b = self.type2test(sample, "latin-1")
+ self.assertRaises(UnicodeDecodeError, b.decode, "utf-8")
+ self.assertEqual(b.decode("utf-8", "ignore"), "Hello world\n")
+ self.assertEqual(b.decode(errors="ignore", encoding="utf-8"),
"Hello world\n")
+ # Default encoding is utf-8
+ self.assertEqual(self.type2test(b'\xe2\x98\x83').decode(), '\u2603')
def test_from_int(self):
b = self.type2test(0)
@@ -562,6 +564,39 @@ class ByteArrayTest(BaseBytesTest):
b.reverse()
self.assertFalse(b)
+ def test_clear(self):
+ b = bytearray(b'python')
+ b.clear()
+ self.assertEqual(b, b'')
+
+ b = bytearray(b'')
+ b.clear()
+ self.assertEqual(b, b'')
+
+ b = bytearray(b'')
+ b.append(ord('r'))
+ b.clear()
+ b.append(ord('p'))
+ self.assertEqual(b, b'p')
+
+ def test_copy(self):
+ b = bytearray(b'abc')
+ bb = b.copy()
+ self.assertEqual(bb, b'abc')
+
+ b = bytearray(b'')
+ bb = b.copy()
+ self.assertEqual(bb, b'')
+
+ # test that it's indeed a copy and not a reference
+ b = bytearray(b'abc')
+ bb = b.copy()
+ self.assertEqual(b, bb)
+ self.assertIsNot(b, bb)
+ bb.append(ord('d'))
+ self.assertEqual(bb, b'abcd')
+ self.assertEqual(b, b'abc')
+
def test_regexps(self):
def by(s):
return bytearray(map(ord, s))
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index be35580..3567b36 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -1,10 +1,11 @@
#!/usr/bin/env python3
from test import support
-from test.support import TESTFN
+from test.support import TESTFN, precisionbigmemtest, _4G
import unittest
from io import BytesIO
import os
+import random
import subprocess
import sys
@@ -21,7 +22,30 @@ has_cmdline_bunzip2 = sys.platform not in ("win32", "os2emx")
class BaseTest(unittest.TestCase):
"Base for other testcases."
- TEXT = b'root:x:0:0:root:/root:/bin/bash\nbin:x:1:1:bin:/bin:\ndaemon:x:2:2:daemon:/sbin:\nadm:x:3:4:adm:/var/adm:\nlp:x:4:7:lp:/var/spool/lpd:\nsync:x:5:0:sync:/sbin:/bin/sync\nshutdown:x:6:0:shutdown:/sbin:/sbin/shutdown\nhalt:x:7:0:halt:/sbin:/sbin/halt\nmail:x:8:12:mail:/var/spool/mail:\nnews:x:9:13:news:/var/spool/news:\nuucp:x:10:14:uucp:/var/spool/uucp:\noperator:x:11:0:operator:/root:\ngames:x:12:100:games:/usr/games:\ngopher:x:13:30:gopher:/usr/lib/gopher-data:\nftp:x:14:50:FTP User:/var/ftp:/bin/bash\nnobody:x:65534:65534:Nobody:/home:\npostfix:x:100:101:postfix:/var/spool/postfix:\nniemeyer:x:500:500::/home/niemeyer:/bin/bash\npostgres:x:101:102:PostgreSQL Server:/var/lib/pgsql:/bin/bash\nmysql:x:102:103:MySQL server:/var/lib/mysql:/bin/bash\nwww:x:103:104::/var/www:/bin/false\n'
+ TEXT_LINES = [
+ b'root:x:0:0:root:/root:/bin/bash\n',
+ b'bin:x:1:1:bin:/bin:\n',
+ b'daemon:x:2:2:daemon:/sbin:\n',
+ b'adm:x:3:4:adm:/var/adm:\n',
+ b'lp:x:4:7:lp:/var/spool/lpd:\n',
+ b'sync:x:5:0:sync:/sbin:/bin/sync\n',
+ b'shutdown:x:6:0:shutdown:/sbin:/sbin/shutdown\n',
+ b'halt:x:7:0:halt:/sbin:/sbin/halt\n',
+ b'mail:x:8:12:mail:/var/spool/mail:\n',
+ b'news:x:9:13:news:/var/spool/news:\n',
+ b'uucp:x:10:14:uucp:/var/spool/uucp:\n',
+ b'operator:x:11:0:operator:/root:\n',
+ b'games:x:12:100:games:/usr/games:\n',
+ b'gopher:x:13:30:gopher:/usr/lib/gopher-data:\n',
+ b'ftp:x:14:50:FTP User:/var/ftp:/bin/bash\n',
+ b'nobody:x:65534:65534:Nobody:/home:\n',
+ b'postfix:x:100:101:postfix:/var/spool/postfix:\n',
+ b'niemeyer:x:500:500::/home/niemeyer:/bin/bash\n',
+ b'postgres:x:101:102:PostgreSQL Server:/var/lib/pgsql:/bin/bash\n',
+ b'mysql:x:102:103:MySQL server:/var/lib/mysql:/bin/bash\n',
+ b'www:x:103:104::/var/www:/bin/false\n',
+ ]
+ TEXT = b''.join(TEXT_LINES)
DATA = b'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`'
DATA_CRLF = b'BZh91AY&SY\xaez\xbbN\x00\x01H\xdf\x80\x00\x12@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe0@\x01\xbc\xc6`\x86*\x8d=M\xa9\x9a\x86\xd0L@\x0fI\xa6!\xa1\x13\xc8\x88jdi\x8d@\x03@\x1a\x1a\x0c\x0c\x83 \x00\xc4h2\x19\x01\x82D\x84e\t\xe8\x99\x89\x19\x1ah\x00\r\x1a\x11\xaf\x9b\x0fG\xf5(\x1b\x1f?\t\x12\xcf\xb5\xfc\x95E\x00ps\x89\x12^\xa4\xdd\xa2&\x05(\x87\x04\x98\x89u\xe40%\xb6\x19\'\x8c\xc4\x89\xca\x07\x0e\x1b!\x91UIFU%C\x994!DI\xd2\xfa\xf0\xf1N8W\xde\x13A\xf5\x9cr%?\x9f3;I45A\xd1\x8bT\xb1<l\xba\xcb_\xc00xY\x17r\x17\x88\x08\x08@\xa0\ry@\x10\x04$)`\xf2\xce\x89z\xb0s\xec\x9b.iW\x9d\x81\xb5-+t\x9f\x1a\'\x97dB\xf5x\xb5\xbe.[.\xd7\x0e\x81\xe7\x08\x1cN`\x88\x10\xca\x87\xc3!"\x80\x92R\xa1/\xd1\xc0\xe6mf\xac\xbd\x99\xcca\xb3\x8780>\xa4\xc7\x8d\x1a\\"\xad\xa1\xabyBg\x15\xb9l\x88\x88\x91k"\x94\xa4\xd4\x89\xae*\xa6\x0b\x10\x0c\xd6\xd4m\xe86\xec\xb5j\x8a\x86j\';\xca.\x01I\xf2\xaaJ\xe8\x88\x8cU+t3\xfb\x0c\n\xa33\x13r2\r\x16\xe0\xb3(\xbf\x1d\x83r\xe7M\xf0D\x1365\xd8\x88\xd3\xa4\x92\xcb2\x06\x04\\\xc1\xb0\xea//\xbek&\xd8\xe6+t\xe5\xa1\x13\xada\x16\xder5"w]\xa2i\xb7[\x97R \xe2IT\xcd;Z\x04dk4\xad\x8a\t\xd3\x81z\x10\xf1:^`\xab\x1f\xc5\xdc\x91N\x14$+\x9e\xae\xd3\x80'
@@ -54,13 +78,15 @@ class BZ2FileTest(BaseTest):
if os.path.isfile(self.filename):
os.unlink(self.filename)
- def createTempFile(self, crlf=0):
+ def getData(self, crlf=False):
+ if crlf:
+ return self.DATA_CRLF
+ else:
+ return self.DATA
+
+ def createTempFile(self, crlf=False):
with open(self.filename, "wb") as f:
- if crlf:
- data = self.DATA_CRLF
- else:
- data = self.DATA
- f.write(data)
+ f.write(self.getData(crlf))
def testRead(self):
# "Test BZ2File.read()"
@@ -70,7 +96,7 @@ class BZ2FileTest(BaseTest):
self.assertEqual(bz2f.read(), self.TEXT)
def testRead0(self):
- # Test BBZ2File.read(0)"
+ # "Test BBZ2File.read(0)"
self.createTempFile()
with BZ2File(self.filename) as bz2f:
self.assertRaises(TypeError, bz2f.read, None)
@@ -94,6 +120,28 @@ class BZ2FileTest(BaseTest):
with BZ2File(self.filename) as bz2f:
self.assertEqual(bz2f.read(100), self.TEXT[:100])
+ def testPeek(self):
+ # "Test BZ2File.peek()"
+ self.createTempFile()
+ with BZ2File(self.filename) as bz2f:
+ pdata = bz2f.peek()
+ self.assertNotEqual(len(pdata), 0)
+ self.assertTrue(self.TEXT.startswith(pdata))
+ self.assertEqual(bz2f.read(), self.TEXT)
+
+ def testReadInto(self):
+ # "Test BZ2File.readinto()"
+ self.createTempFile()
+ with BZ2File(self.filename) as bz2f:
+ n = 128
+ b = bytearray(n)
+ self.assertEqual(bz2f.readinto(b), n)
+ self.assertEqual(b, self.TEXT[:n])
+ n = len(self.TEXT) - n
+ b = bytearray(len(self.TEXT))
+ self.assertEqual(bz2f.readinto(b), n)
+ self.assertEqual(b[:n], self.TEXT[-n:])
+
def testReadLine(self):
# "Test BZ2File.readline()"
self.createTempFile()
@@ -125,7 +173,7 @@ class BZ2FileTest(BaseTest):
bz2f = BZ2File(self.filename)
bz2f.close()
self.assertRaises(ValueError, bz2f.__next__)
- # This call will deadlock of the above .__next__ call failed to
+ # This call will deadlock if the above .__next__ call failed to
# release the lock.
self.assertRaises(ValueError, bz2f.readlines)
@@ -217,6 +265,13 @@ class BZ2FileTest(BaseTest):
self.assertEqual(bz2f.tell(), 0)
self.assertEqual(bz2f.read(), self.TEXT)
+ def testFileno(self):
+ # "Test BZ2File.fileno()"
+ self.createTempFile()
+ with open(self.filename) as rawf:
+ with BZ2File(fileobj=rawf) as bz2f:
+ self.assertEqual(bz2f.fileno(), rawf.fileno())
+
def testOpenDel(self):
# "Test opening and deleting a file many times"
self.createTempFile()
@@ -278,17 +333,65 @@ class BZ2FileTest(BaseTest):
t.join()
def testMixedIterationReads(self):
- # Issue #8397: mixed iteration and reads should be forbidden.
- with bz2.BZ2File(self.filename, 'wb') as f:
- # The internal buffer size is hard-wired to 8192 bytes, we must
- # write out more than that for the test to stop half through
- # the buffer.
- f.write(self.TEXT * 100)
- with bz2.BZ2File(self.filename, 'rb') as f:
- next(f)
- self.assertRaises(ValueError, f.read)
- self.assertRaises(ValueError, f.readline)
- self.assertRaises(ValueError, f.readlines)
+ # "Test mixed iteration and reads."
+ self.createTempFile()
+ linelen = len(self.TEXT_LINES[0])
+ halflen = linelen // 2
+ with bz2.BZ2File(self.filename) as bz2f:
+ bz2f.read(halflen)
+ self.assertEqual(next(bz2f), self.TEXT_LINES[0][halflen:])
+ self.assertEqual(bz2f.read(), self.TEXT[linelen:])
+ with bz2.BZ2File(self.filename) as bz2f:
+ bz2f.readline()
+ self.assertEqual(next(bz2f), self.TEXT_LINES[1])
+ self.assertEqual(bz2f.readline(), self.TEXT_LINES[2])
+ with bz2.BZ2File(self.filename) as bz2f:
+ bz2f.readlines()
+ with self.assertRaises(StopIteration):
+ next(bz2f)
+ self.assertEqual(bz2f.readlines(), [])
+
+ def testReadBytesIO(self):
+ # "Test BZ2File.read() with BytesIO source"
+ with BytesIO(self.getData()) as bio:
+ with BZ2File(fileobj=bio) as bz2f:
+ self.assertRaises(TypeError, bz2f.read, None)
+ self.assertEqual(bz2f.read(), self.TEXT)
+ self.assertFalse(bio.closed)
+
+ def testPeekBytesIO(self):
+ # "Test BZ2File.peek() with BytesIO source"
+ with BytesIO(self.getData()) as bio:
+ with BZ2File(fileobj=bio) as bz2f:
+ pdata = bz2f.peek()
+ self.assertNotEqual(len(pdata), 0)
+ self.assertTrue(self.TEXT.startswith(pdata))
+ self.assertEqual(bz2f.read(), self.TEXT)
+
+ def testWriteBytesIO(self):
+ # "Test BZ2File.write() with BytesIO destination"
+ with BytesIO() as bio:
+ with BZ2File(fileobj=bio, mode="w") as bz2f:
+ self.assertRaises(TypeError, bz2f.write)
+ bz2f.write(self.TEXT)
+ self.assertEqual(self.decompress(bio.getvalue()), self.TEXT)
+ self.assertFalse(bio.closed)
+
+ def testSeekForwardBytesIO(self):
+ # "Test BZ2File.seek(150, 0) with BytesIO source"
+ with BytesIO(self.getData()) as bio:
+ with BZ2File(fileobj=bio) as bz2f:
+ self.assertRaises(TypeError, bz2f.seek)
+ bz2f.seek(150)
+ self.assertEqual(bz2f.read(), self.TEXT[150:])
+
+ def testSeekBackwardsBytesIO(self):
+ # "Test BZ2File.seek(-150, 1) with BytesIO source"
+ with BytesIO(self.getData()) as bio:
+ with BZ2File(fileobj=bio) as bz2f:
+ bz2f.read(500)
+ bz2f.seek(-150, 1)
+ self.assertEqual(bz2f.read(), self.TEXT[500-150:])
class BZ2CompressorTest(BaseTest):
def testCompress(self):
@@ -313,6 +416,23 @@ class BZ2CompressorTest(BaseTest):
data += bz2c.flush()
self.assertEqual(self.decompress(data), self.TEXT)
+ @precisionbigmemtest(size=_4G + 100, memuse=2)
+ def testCompress4G(self, size):
+ # "Test BZ2Compressor.compress()/flush() with >4GiB input"
+ bz2c = BZ2Compressor()
+ data = b"x" * size
+ try:
+ compressed = bz2c.compress(data)
+ compressed += bz2c.flush()
+ finally:
+ data = None # Release memory
+ data = bz2.decompress(compressed)
+ try:
+ self.assertEqual(len(data), size)
+ self.assertEqual(len(data.strip(b"x")), 0)
+ finally:
+ data = None
+
class BZ2DecompressorTest(BaseTest):
def test_Constructor(self):
self.assertRaises(TypeError, BZ2Decompressor, 42)
@@ -351,6 +471,22 @@ class BZ2DecompressorTest(BaseTest):
text = bz2d.decompress(self.DATA)
self.assertRaises(EOFError, bz2d.decompress, b"anything")
+ @precisionbigmemtest(size=_4G + 100, memuse=3)
+ def testDecompress4G(self, size):
+ # "Test BZ2Decompressor.decompress() with >4GiB input"
+ blocksize = 10 * 1024 * 1024
+ block = random.getrandbits(blocksize * 8).to_bytes(blocksize, 'little')
+ try:
+ data = block * (size // blocksize + 1)
+ compressed = bz2.compress(data)
+ bz2d = BZ2Decompressor()
+ decompressed = bz2d.decompress(compressed)
+ self.assertTrue(decompressed == data)
+ finally:
+ data = None
+ compressed = None
+ decompressed = None
+
class FuncTest(BaseTest):
"Test module functions"
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
index c4e3adf..57508c7 100644
--- a/Lib/test/test_cmd_line.py
+++ b/Lib/test/test_cmd_line.py
@@ -31,12 +31,6 @@ class CmdLineTest(unittest.TestCase):
self.verify_valid_flag('-O')
self.verify_valid_flag('-OO')
- def test_q(self):
- self.verify_valid_flag('-Qold')
- self.verify_valid_flag('-Qnew')
- self.verify_valid_flag('-Qwarn')
- self.verify_valid_flag('-Qwarnall')
-
def test_site_flag(self):
self.verify_valid_flag('-S')
@@ -151,7 +145,7 @@ class CmdLineTest(unittest.TestCase):
@unittest.skipUnless(sys.platform == 'darwin', 'test specific to Mac OS X')
def test_osx_utf8(self):
def check_output(text):
- decoded = text.decode('utf8', 'surrogateescape')
+ decoded = text.decode('utf-8', 'surrogateescape')
expected = ascii(decoded).encode('ascii') + b'\n'
env = os.environ.copy()
@@ -223,7 +217,7 @@ class CmdLineTest(unittest.TestCase):
self.assertIn(path2.encode('ascii'), out)
def test_displayhook_unencodable(self):
- for encoding in ('ascii', 'latin1', 'utf8'):
+ for encoding in ('ascii', 'latin-1', 'utf-8'):
env = os.environ.copy()
env['PYTHONIOENCODING'] = encoding
p = subprocess.Popen(
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
index d560d7a..67a5aed 100644
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -1250,7 +1250,7 @@ class EncodedFileTest(unittest.TestCase):
self.assertEqual(ef.read(), b'\\\xd5\n\x00\x00\xae')
f = io.BytesIO()
- ef = codecs.EncodedFile(f, 'utf-8', 'latin1')
+ ef = codecs.EncodedFile(f, 'utf-8', 'latin-1')
ef.write(b'\xc3\xbc')
self.assertEqual(f.getvalue(), b'\xfc')
@@ -1611,7 +1611,7 @@ class SurrogateEscapeTest(unittest.TestCase):
def test_latin1(self):
# Issue6373
- self.assertEqual("\udce4\udceb\udcef\udcf6\udcfc".encode("latin1", "surrogateescape"),
+ self.assertEqual("\udce4\udceb\udcef\udcf6\udcfc".encode("latin-1", "surrogateescape"),
b"\xe4\xeb\xef\xf6\xfc")
diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py
index b3e0907..d4cc4a8 100644
--- a/Lib/test/test_collections.py
+++ b/Lib/test/test_collections.py
@@ -1,6 +1,7 @@
"""Unit tests for collections.py."""
import unittest, doctest, operator
+from test.support import TESTFN, forget, unlink
import inspect
from test import support
from collections import namedtuple, Counter, OrderedDict, _count_elements
@@ -10,17 +11,18 @@ from random import randrange, shuffle
import keyword
import re
import sys
-from collections import _ChainMap as ChainMap
-from collections import Hashable, Iterable, Iterator
-from collections import Sized, Container, Callable
-from collections import Set, MutableSet
-from collections import Mapping, MutableMapping, KeysView, ItemsView, UserDict
-from collections import Sequence, MutableSequence
-from collections import ByteString
+from collections import UserDict
+from collections import ChainMap
+from collections.abc import Hashable, Iterable, Iterator
+from collections.abc import Sized, Container, Callable
+from collections.abc import Set, MutableSet
+from collections.abc import Mapping, MutableMapping, KeysView, ItemsView
+from collections.abc import Sequence, MutableSequence
+from collections.abc import ByteString
################################################################################
-### _ChainMap (helper class for configparser)
+### ChainMap (helper class for configparser and the string module)
################################################################################
class TestChainMap(unittest.TestCase):
@@ -71,17 +73,23 @@ class TestChainMap(unittest.TestCase):
for m1, m2 in zip(d.maps, e.maps):
self.assertIsNot(m1, m2, e)
- d.new_child()
- d['b'] = 5
- self.assertEqual(d.maps, [{'b': 5}, {'c':30}, {'a':1, 'b':2}])
- self.assertEqual(d.parents.maps, [{'c':30}, {'a':1, 'b':2}]) # check parents
- self.assertEqual(d['b'], 5) # find first in chain
- self.assertEqual(d.parents['b'], 2) # look beyond maps[0]
+ f = d.new_child()
+ f['b'] = 5
+ self.assertEqual(f.maps, [{'b': 5}, {'c':30}, {'a':1, 'b':2}])
+ self.assertEqual(f.parents.maps, [{'c':30}, {'a':1, 'b':2}]) # check parents
+ self.assertEqual(f['b'], 5) # find first in chain
+ self.assertEqual(f.parents['b'], 2) # look beyond maps[0]
def test_contructor(self):
- self.assertEqual(ChainedContext().maps, [{}]) # no-args --> one new dict
+ self.assertEqual(ChainMap().maps, [{}]) # no-args --> one new dict
self.assertEqual(ChainMap({1:2}).maps, [{1:2}]) # 1 arg --> list
+ def test_bool(self):
+ self.assertFalse(ChainMap())
+ self.assertFalse(ChainMap({}, {}))
+ self.assertTrue(ChainMap({1:2}, {}))
+ self.assertTrue(ChainMap({}, {1:2}))
+
def test_missing(self):
class DefaultChainMap(ChainMap):
def __missing__(self, key):
@@ -120,6 +128,7 @@ class TestNamedTuple(unittest.TestCase):
self.assertEqual(Point.__module__, __name__)
self.assertEqual(Point.__getitem__, tuple.__getitem__)
self.assertEqual(Point._fields, ('x', 'y'))
+ self.assertIn('class Point(tuple)', Point._source)
self.assertRaises(ValueError, namedtuple, 'abc%', 'efg ghi') # type has non-alpha char
self.assertRaises(ValueError, namedtuple, 'class', 'efg ghi') # type has keyword
@@ -319,6 +328,17 @@ class TestNamedTuple(unittest.TestCase):
pass
self.assertEqual(repr(B(1)), 'B(x=1)')
+ def test_source(self):
+ # verify that _source can be run through exec()
+ tmp = namedtuple('NTColor', 'red green blue')
+ globals().pop('NTColor', None) # remove artifacts from other tests
+ exec(tmp._source, globals())
+ self.assertIn('NTColor', globals())
+ c = NTColor(10, 20, 30)
+ self.assertEqual((c.red, c.green, c.blue), (10, 20, 30))
+ self.assertEqual(NTColor._fields, ('red', 'green', 'blue'))
+ globals().pop('NTColor', None) # clean-up after this test
+
################################################################################
### Abstract Base Classes
@@ -721,6 +741,44 @@ class TestCollectionABCs(ABCTestCase):
self.validate_abstract_methods(MutableSequence, '__contains__', '__iter__',
'__len__', '__getitem__', '__setitem__', '__delitem__', 'insert')
+ def test_MutableSequence_mixins(self):
+ # Test the mixins of MutableSequence by creating a miminal concrete
+ # class inherited from it.
+ class MutableSequenceSubclass(MutableSequence):
+ def __init__(self):
+ self.lst = []
+
+ def __setitem__(self, index, value):
+ self.lst[index] = value
+
+ def __getitem__(self, index):
+ return self.lst[index]
+
+ def __len__(self):
+ return len(self.lst)
+
+ def __delitem__(self, index):
+ del self.lst[index]
+
+ def insert(self, index, value):
+ self.lst.insert(index, value)
+
+ mss = MutableSequenceSubclass()
+ mss.append(0)
+ mss.extend((1, 2, 3, 4))
+ self.assertEqual(len(mss), 5)
+ self.assertEqual(mss[3], 3)
+ mss.reverse()
+ self.assertEqual(mss[3], 1)
+ mss.pop()
+ self.assertEqual(len(mss), 4)
+ mss.remove(3)
+ self.assertEqual(len(mss), 3)
+ mss += (10, 20, 30)
+ self.assertEqual(len(mss), 6)
+ self.assertEqual(mss[-1], 30)
+ mss.clear()
+ self.assertEqual(len(mss), 0)
################################################################################
### Counter
@@ -1181,7 +1239,7 @@ import doctest, collections
def test_main(verbose=None):
NamedTupleDocs = doctest.DocTestSuite(module=collections)
test_classes = [TestNamedTuple, NamedTupleDocs, TestOneTrickPonyABCs,
- TestCollectionABCs, TestCounter,
+ TestCollectionABCs, TestCounter, TestChainMap,
TestOrderedDict, GeneralMappingTests, SubclassMappingTests]
support.run_unittest(*test_classes)
support.run_doctest(collections, verbose)
diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py
index 250d31b..a63af4c 100644
--- a/Lib/test/test_compileall.py
+++ b/Lib/test/test_compileall.py
@@ -345,7 +345,7 @@ class CommandLineTests(unittest.TestCase):
def test_invalid_arg_produces_message(self):
out = self.assertRunOK('badfilename')
- self.assertRegex(out, b"Can't list badfilename")
+ self.assertRegex(out, b"Can't list 'badfilename'")
def test_main():
diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py
index 2662af7..ec84a66 100644
--- a/Lib/test/test_concurrent_futures.py
+++ b/Lib/test/test_concurrent_futures.py
@@ -369,7 +369,15 @@ class ExecutorTest(unittest.TestCase):
class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest):
- pass
+ def test_map_submits_without_iteration(self):
+ """Tests verifying issue 11777."""
+ finished = []
+ def record_finished(n):
+ finished.append(n)
+
+ self.executor.map(record_finished, range(10))
+ self.executor.shutdown(wait=True)
+ self.assertCountEqual(finished, range(10))
class ProcessPoolExecutorTest(ProcessPoolMixin, ExecutorTest):
diff --git a/Lib/test/test_cfgparser.py b/Lib/test/test_configparser.py
index f7d9a26..f7d9a26 100644
--- a/Lib/test/test_cfgparser.py
+++ b/Lib/test/test_configparser.py
diff --git a/Lib/test/test_crashers.py b/Lib/test/test_crashers.py
new file mode 100644
index 0000000..336ccbe
--- /dev/null
+++ b/Lib/test/test_crashers.py
@@ -0,0 +1,38 @@
+# Tests that the crashers in the Lib/test/crashers directory actually
+# do crash the interpreter as expected
+#
+# If a crasher is fixed, it should be moved elsewhere in the test suite to
+# ensure it continues to work correctly.
+
+import unittest
+import glob
+import os.path
+import test.support
+from test.script_helper import assert_python_failure
+
+CRASHER_DIR = os.path.join(os.path.dirname(__file__), "crashers")
+CRASHER_FILES = os.path.join(CRASHER_DIR, "*.py")
+
+infinite_loops = ["infinite_loop_re.py", "nasty_eq_vs_dict.py"]
+
+class CrasherTest(unittest.TestCase):
+
+ @unittest.skip("these tests are too fragile")
+ @test.support.cpython_only
+ def test_crashers_crash(self):
+ for fname in glob.glob(CRASHER_FILES):
+ if os.path.basename(fname) in infinite_loops:
+ continue
+ # Some "crashers" only trigger an exception rather than a
+ # segfault. Consider that an acceptable outcome.
+ if test.support.verbose:
+ print("Checking crasher:", fname)
+ assert_python_failure(fname)
+
+
+def test_main():
+ test.support.run_unittest(CrasherTest)
+ test.support.reap_children()
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_crypt.py b/Lib/test/test_crypt.py
index 2adb28d..dc107d8 100644
--- a/Lib/test/test_crypt.py
+++ b/Lib/test/test_crypt.py
@@ -10,6 +10,25 @@ class CryptTestCase(unittest.TestCase):
if support.verbose:
print('Test encryption: ', c)
+ def test_salt(self):
+ self.assertEqual(len(crypt._saltchars), 64)
+ for method in crypt.methods:
+ salt = crypt.mksalt(method)
+ self.assertEqual(len(salt),
+ method.salt_chars + (3 if method.ident else 0))
+
+ def test_saltedcrypt(self):
+ for method in crypt.methods:
+ pw = crypt.crypt('assword', method)
+ self.assertEqual(len(pw), method.total_size)
+ pw = crypt.crypt('assword', crypt.mksalt(method))
+ self.assertEqual(len(pw), method.total_size)
+
+ def test_methods(self):
+ # Gurantee that METHOD_CRYPT is the last method in crypt.methods.
+ self.assertTrue(len(crypt.methods) >= 1)
+ self.assertEqual(crypt.METHOD_CRYPT, crypt.methods[-1])
+
def test_main():
support.run_unittest(CryptTestCase)
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index 41dfa70..b0d531a 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -4243,6 +4243,8 @@ class DictProxyTests(unittest.TestCase):
pass
self.C = C
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __local__')
def test_iter_keys(self):
# Testing dict-proxy keys...
it = self.C.__dict__.keys()
@@ -4252,6 +4254,8 @@ class DictProxyTests(unittest.TestCase):
self.assertEqual(keys, ['__dict__', '__doc__', '__module__',
'__weakref__', 'meth'])
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __local__')
def test_iter_values(self):
# Testing dict-proxy values...
it = self.C.__dict__.values()
@@ -4259,6 +4263,8 @@ class DictProxyTests(unittest.TestCase):
values = list(it)
self.assertEqual(len(values), 5)
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __local__')
def test_iter_items(self):
# Testing dict-proxy iteritems...
it = self.C.__dict__.items()
diff --git a/Lib/test/test_descrtut.py b/Lib/test/test_descrtut.py
index 2db3d33..c3355b9 100644
--- a/Lib/test/test_descrtut.py
+++ b/Lib/test/test_descrtut.py
@@ -199,6 +199,8 @@ You can get the information from the list type:
'__str__',
'__subclasshook__',
'append',
+ 'clear',
+ 'copy',
'count',
'extend',
'index',
diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py
index 7a61493..643e2e6 100644
--- a/Lib/test/test_dis.py
+++ b/Lib/test/test_dis.py
@@ -1,11 +1,35 @@
# Minimal tests for dis module
from test.support import run_unittest, captured_stdout
+import difflib
import unittest
import sys
import dis
import io
+class _C:
+ def __init__(self, x):
+ self.x = x == 1
+
+dis_c_instance_method = """\
+ %-4d 0 LOAD_FAST 1 (x)
+ 3 LOAD_CONST 1 (1)
+ 6 COMPARE_OP 2 (==)
+ 9 LOAD_FAST 0 (self)
+ 12 STORE_ATTR 0 (x)
+ 15 LOAD_CONST 0 (None)
+ 18 RETURN_VALUE
+""" % (_C.__init__.__code__.co_firstlineno + 1,)
+
+dis_c_instance_method_bytes = """\
+ 0 LOAD_FAST 1 (1)
+ 3 LOAD_CONST 1 (1)
+ 6 COMPARE_OP 2 (==)
+ 9 LOAD_FAST 0 (0)
+ 12 STORE_ATTR 0 (0)
+ 15 LOAD_CONST 0 (0)
+ 18 RETURN_VALUE
+"""
def _f(a):
print(a)
@@ -23,6 +47,16 @@ dis_f = """\
_f.__code__.co_firstlineno + 2)
+dis_f_co_code = """\
+ 0 LOAD_GLOBAL 0 (0)
+ 3 LOAD_FAST 0 (0)
+ 6 CALL_FUNCTION 1
+ 9 POP_TOP
+ 10 LOAD_CONST 1 (1)
+ 13 RETURN_VALUE
+"""
+
+
def bug708901():
for res in range(1,
10):
@@ -138,18 +172,27 @@ dis_compound_stmt_str = """\
"""
class DisTests(unittest.TestCase):
- def do_disassembly_test(self, func, expected):
+
+ def get_disassembly(self, func, lasti=-1, wrapper=True):
s = io.StringIO()
save_stdout = sys.stdout
sys.stdout = s
- dis.dis(func)
- sys.stdout = save_stdout
- got = s.getvalue()
+ try:
+ if wrapper:
+ dis.dis(func)
+ else:
+ dis.disassemble(func, lasti)
+ finally:
+ sys.stdout = save_stdout
# Trim trailing blanks (if any).
- lines = got.split('\n')
- lines = [line.rstrip() for line in lines]
- expected = expected.split("\n")
- import difflib
+ return [line.rstrip() for line in s.getvalue().splitlines()]
+
+ def get_disassemble_as_string(self, func, lasti=-1):
+ return '\n'.join(self.get_disassembly(func, lasti, False))
+
+ def do_disassembly_test(self, func, expected):
+ lines = self.get_disassembly(func)
+ expected = expected.splitlines()
if expected != lines:
self.fail(
"events did not match expectation:\n" +
@@ -211,6 +254,44 @@ class DisTests(unittest.TestCase):
self.do_disassembly_test(simple_stmt_str, dis_simple_stmt_str)
self.do_disassembly_test(compound_stmt_str, dis_compound_stmt_str)
+ def test_disassemble_bytes(self):
+ self.do_disassembly_test(_f.__code__.co_code, dis_f_co_code)
+
+ def test_disassemble_method(self):
+ self.do_disassembly_test(_C(1).__init__, dis_c_instance_method)
+
+ def test_disassemble_method_bytes(self):
+ method_bytecode = _C(1).__init__.__code__.co_code
+ self.do_disassembly_test(method_bytecode, dis_c_instance_method_bytes)
+
+ def test_dis_none(self):
+ try:
+ del sys.last_traceback
+ except AttributeError:
+ pass
+ self.assertRaises(RuntimeError, dis.dis, None)
+
+ def test_dis_object(self):
+ self.assertRaises(TypeError, dis.dis, object())
+
+ def test_dis_traceback(self):
+ try:
+ del sys.last_traceback
+ except AttributeError:
+ pass
+
+ try:
+ 1/0
+ except Exception as e:
+ tb = e.__traceback__
+ sys.last_traceback = tb
+
+ tb_dis = self.get_disassemble_as_string(tb.tb_frame.f_code, tb.tb_lasti)
+ self.do_disassembly_test(None, tb_dis)
+
+ def test_dis_object(self):
+ self.assertRaises(TypeError, dis.dis, object())
+
code_info_code_info = """\
Name: code_info
Filename: (.*)
@@ -363,6 +444,13 @@ class CodeInfoTests(unittest.TestCase):
dis.show_code(x)
self.assertRegex(output.getvalue(), expected+"\n")
+ def test_code_info_object(self):
+ self.assertRaises(TypeError, dis.code_info, object())
+
+ def test_pretty_flags_no_flags(self):
+ self.assertEqual(dis.pretty_flags(0), '0x0')
+
+
def test_main():
run_unittest(DisTests, CodeInfoTests)
diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py
index 13836ba..cd87179 100644
--- a/Lib/test/test_doctest.py
+++ b/Lib/test/test_doctest.py
@@ -5,6 +5,7 @@ Test script for doctest.
from test import support
import doctest
import os
+import sys
# NOTE: There are some additional tests relating to interaction with
@@ -373,7 +374,7 @@ We'll simulate a __file__ attr that ends in pyc:
>>> tests = finder.find(sample_func)
>>> print(tests) # doctest: +ELLIPSIS
- [<DocTest sample_func from ...:17 (1 example)>]
+ [<DocTest sample_func from ...:18 (1 example)>]
The exact name depends on how test_doctest was invoked, so allow for
leading path components.
@@ -1686,226 +1687,227 @@ Run the debugger on the docstring, and then restore sys.stdin.
"""
-def test_pdb_set_trace():
- """Using pdb.set_trace from a doctest.
-
- You can use pdb.set_trace from a doctest. To do so, you must
- retrieve the set_trace function from the pdb module at the time
- you use it. The doctest module changes sys.stdout so that it can
- capture program output. It also temporarily replaces pdb.set_trace
- with a version that restores stdout. This is necessary for you to
- see debugger output.
-
- >>> doc = '''
- ... >>> x = 42
- ... >>> raise Exception('clé')
- ... Traceback (most recent call last):
- ... Exception: clé
- ... >>> import pdb; pdb.set_trace()
- ... '''
- >>> parser = doctest.DocTestParser()
- >>> test = parser.get_doctest(doc, {}, "foo-bar@baz", "foo-bar@baz.py", 0)
- >>> runner = doctest.DocTestRunner(verbose=False)
-
- To demonstrate this, we'll create a fake standard input that
- captures our debugger input:
-
- >>> import tempfile
- >>> real_stdin = sys.stdin
- >>> sys.stdin = _FakeInput([
- ... 'print(x)', # print data defined by the example
- ... 'continue', # stop debugging
- ... ''])
-
- >>> try: runner.run(test)
- ... finally: sys.stdin = real_stdin
- --Return--
- > <doctest foo-bar@baz[2]>(1)<module>()->None
- -> import pdb; pdb.set_trace()
- (Pdb) print(x)
- 42
- (Pdb) continue
- TestResults(failed=0, attempted=3)
-
- You can also put pdb.set_trace in a function called from a test:
-
- >>> def calls_set_trace():
- ... y=2
- ... import pdb; pdb.set_trace()
-
- >>> doc = '''
- ... >>> x=1
- ... >>> calls_set_trace()
- ... '''
- >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
- >>> real_stdin = sys.stdin
- >>> sys.stdin = _FakeInput([
- ... 'print(y)', # print data defined in the function
- ... 'up', # out of function
- ... 'print(x)', # print data defined by the example
- ... 'continue', # stop debugging
- ... ''])
-
- >>> try:
- ... runner.run(test)
- ... finally:
- ... sys.stdin = real_stdin
- --Return--
- > <doctest test.test_doctest.test_pdb_set_trace[8]>(3)calls_set_trace()->None
- -> import pdb; pdb.set_trace()
- (Pdb) print(y)
- 2
- (Pdb) up
- > <doctest foo-bar@baz[1]>(1)<module>()
- -> calls_set_trace()
- (Pdb) print(x)
- 1
- (Pdb) continue
- TestResults(failed=0, attempted=2)
-
- During interactive debugging, source code is shown, even for
- doctest examples:
-
- >>> doc = '''
- ... >>> def f(x):
- ... ... g(x*2)
- ... >>> def g(x):
- ... ... print(x+3)
- ... ... import pdb; pdb.set_trace()
- ... >>> f(3)
- ... '''
- >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
- >>> real_stdin = sys.stdin
- >>> sys.stdin = _FakeInput([
- ... 'list', # list source from example 2
- ... 'next', # return from g()
- ... 'list', # list source from example 1
- ... 'next', # return from f()
- ... 'list', # list source from example 3
- ... 'continue', # stop debugging
- ... ''])
- >>> try: runner.run(test)
- ... finally: sys.stdin = real_stdin
- ... # doctest: +NORMALIZE_WHITESPACE
- --Return--
- > <doctest foo-bar@baz[1]>(3)g()->None
- -> import pdb; pdb.set_trace()
- (Pdb) list
- 1 def g(x):
- 2 print(x+3)
- 3 -> import pdb; pdb.set_trace()
- [EOF]
- (Pdb) next
- --Return--
- > <doctest foo-bar@baz[0]>(2)f()->None
- -> g(x*2)
- (Pdb) list
- 1 def f(x):
- 2 -> g(x*2)
- [EOF]
- (Pdb) next
- --Return--
- > <doctest foo-bar@baz[2]>(1)<module>()->None
- -> f(3)
- (Pdb) list
- 1 -> f(3)
- [EOF]
- (Pdb) continue
- **********************************************************************
- File "foo-bar@baz.py", line 7, in foo-bar@baz
- Failed example:
- f(3)
- Expected nothing
- Got:
- 9
- TestResults(failed=1, attempted=3)
- """
-
-def test_pdb_set_trace_nested():
- """This illustrates more-demanding use of set_trace with nested functions.
-
- >>> class C(object):
- ... def calls_set_trace(self):
- ... y = 1
- ... import pdb; pdb.set_trace()
- ... self.f1()
- ... y = 2
- ... def f1(self):
- ... x = 1
- ... self.f2()
- ... x = 2
- ... def f2(self):
- ... z = 1
- ... z = 2
-
- >>> calls_set_trace = C().calls_set_trace
-
- >>> doc = '''
- ... >>> a = 1
- ... >>> calls_set_trace()
- ... '''
- >>> parser = doctest.DocTestParser()
- >>> runner = doctest.DocTestRunner(verbose=False)
- >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
- >>> real_stdin = sys.stdin
- >>> sys.stdin = _FakeInput([
- ... 'print(y)', # print data defined in the function
- ... 'step', 'step', 'step', 'step', 'step', 'step', 'print(z)',
- ... 'up', 'print(x)',
- ... 'up', 'print(y)',
- ... 'up', 'print(foo)',
- ... 'continue', # stop debugging
- ... ''])
-
- >>> try:
- ... runner.run(test)
- ... finally:
- ... sys.stdin = real_stdin
- ... # doctest: +REPORT_NDIFF
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(5)calls_set_trace()
- -> self.f1()
- (Pdb) print(y)
- 1
- (Pdb) step
- --Call--
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(7)f1()
- -> def f1(self):
- (Pdb) step
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(8)f1()
- -> x = 1
- (Pdb) step
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(9)f1()
- -> self.f2()
- (Pdb) step
- --Call--
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(11)f2()
- -> def f2(self):
- (Pdb) step
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(12)f2()
- -> z = 1
- (Pdb) step
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(13)f2()
- -> z = 2
- (Pdb) print(z)
- 1
- (Pdb) up
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(9)f1()
- -> self.f2()
- (Pdb) print(x)
- 1
- (Pdb) up
- > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(5)calls_set_trace()
- -> self.f1()
- (Pdb) print(y)
- 1
- (Pdb) up
- > <doctest foo-bar@baz[1]>(1)<module>()
- -> calls_set_trace()
- (Pdb) print(foo)
- *** NameError: name 'foo' is not defined
- (Pdb) continue
- TestResults(failed=0, attempted=2)
-"""
+if not hasattr(sys, 'gettrace') or not sys.gettrace():
+ def test_pdb_set_trace():
+ """Using pdb.set_trace from a doctest.
+
+ You can use pdb.set_trace from a doctest. To do so, you must
+ retrieve the set_trace function from the pdb module at the time
+ you use it. The doctest module changes sys.stdout so that it can
+ capture program output. It also temporarily replaces pdb.set_trace
+ with a version that restores stdout. This is necessary for you to
+ see debugger output.
+
+ >>> doc = '''
+ ... >>> x = 42
+ ... >>> raise Exception('clé')
+ ... Traceback (most recent call last):
+ ... Exception: clé
+ ... >>> import pdb; pdb.set_trace()
+ ... '''
+ >>> parser = doctest.DocTestParser()
+ >>> test = parser.get_doctest(doc, {}, "foo-bar@baz", "foo-bar@baz.py", 0)
+ >>> runner = doctest.DocTestRunner(verbose=False)
+
+ To demonstrate this, we'll create a fake standard input that
+ captures our debugger input:
+
+ >>> import tempfile
+ >>> real_stdin = sys.stdin
+ >>> sys.stdin = _FakeInput([
+ ... 'print(x)', # print data defined by the example
+ ... 'continue', # stop debugging
+ ... ''])
+
+ >>> try: runner.run(test)
+ ... finally: sys.stdin = real_stdin
+ --Return--
+ > <doctest foo-bar@baz[2]>(1)<module>()->None
+ -> import pdb; pdb.set_trace()
+ (Pdb) print(x)
+ 42
+ (Pdb) continue
+ TestResults(failed=0, attempted=3)
+
+ You can also put pdb.set_trace in a function called from a test:
+
+ >>> def calls_set_trace():
+ ... y=2
+ ... import pdb; pdb.set_trace()
+
+ >>> doc = '''
+ ... >>> x=1
+ ... >>> calls_set_trace()
+ ... '''
+ >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
+ >>> real_stdin = sys.stdin
+ >>> sys.stdin = _FakeInput([
+ ... 'print(y)', # print data defined in the function
+ ... 'up', # out of function
+ ... 'print(x)', # print data defined by the example
+ ... 'continue', # stop debugging
+ ... ''])
+
+ >>> try:
+ ... runner.run(test)
+ ... finally:
+ ... sys.stdin = real_stdin
+ --Return--
+ > <doctest test.test_doctest.test_pdb_set_trace[8]>(3)calls_set_trace()->None
+ -> import pdb; pdb.set_trace()
+ (Pdb) print(y)
+ 2
+ (Pdb) up
+ > <doctest foo-bar@baz[1]>(1)<module>()
+ -> calls_set_trace()
+ (Pdb) print(x)
+ 1
+ (Pdb) continue
+ TestResults(failed=0, attempted=2)
+
+ During interactive debugging, source code is shown, even for
+ doctest examples:
+
+ >>> doc = '''
+ ... >>> def f(x):
+ ... ... g(x*2)
+ ... >>> def g(x):
+ ... ... print(x+3)
+ ... ... import pdb; pdb.set_trace()
+ ... >>> f(3)
+ ... '''
+ >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
+ >>> real_stdin = sys.stdin
+ >>> sys.stdin = _FakeInput([
+ ... 'list', # list source from example 2
+ ... 'next', # return from g()
+ ... 'list', # list source from example 1
+ ... 'next', # return from f()
+ ... 'list', # list source from example 3
+ ... 'continue', # stop debugging
+ ... ''])
+ >>> try: runner.run(test)
+ ... finally: sys.stdin = real_stdin
+ ... # doctest: +NORMALIZE_WHITESPACE
+ --Return--
+ > <doctest foo-bar@baz[1]>(3)g()->None
+ -> import pdb; pdb.set_trace()
+ (Pdb) list
+ 1 def g(x):
+ 2 print(x+3)
+ 3 -> import pdb; pdb.set_trace()
+ [EOF]
+ (Pdb) next
+ --Return--
+ > <doctest foo-bar@baz[0]>(2)f()->None
+ -> g(x*2)
+ (Pdb) list
+ 1 def f(x):
+ 2 -> g(x*2)
+ [EOF]
+ (Pdb) next
+ --Return--
+ > <doctest foo-bar@baz[2]>(1)<module>()->None
+ -> f(3)
+ (Pdb) list
+ 1 -> f(3)
+ [EOF]
+ (Pdb) continue
+ **********************************************************************
+ File "foo-bar@baz.py", line 7, in foo-bar@baz
+ Failed example:
+ f(3)
+ Expected nothing
+ Got:
+ 9
+ TestResults(failed=1, attempted=3)
+ """
+
+ def test_pdb_set_trace_nested():
+ """This illustrates more-demanding use of set_trace with nested functions.
+
+ >>> class C(object):
+ ... def calls_set_trace(self):
+ ... y = 1
+ ... import pdb; pdb.set_trace()
+ ... self.f1()
+ ... y = 2
+ ... def f1(self):
+ ... x = 1
+ ... self.f2()
+ ... x = 2
+ ... def f2(self):
+ ... z = 1
+ ... z = 2
+
+ >>> calls_set_trace = C().calls_set_trace
+
+ >>> doc = '''
+ ... >>> a = 1
+ ... >>> calls_set_trace()
+ ... '''
+ >>> parser = doctest.DocTestParser()
+ >>> runner = doctest.DocTestRunner(verbose=False)
+ >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0)
+ >>> real_stdin = sys.stdin
+ >>> sys.stdin = _FakeInput([
+ ... 'print(y)', # print data defined in the function
+ ... 'step', 'step', 'step', 'step', 'step', 'step', 'print(z)',
+ ... 'up', 'print(x)',
+ ... 'up', 'print(y)',
+ ... 'up', 'print(foo)',
+ ... 'continue', # stop debugging
+ ... ''])
+
+ >>> try:
+ ... runner.run(test)
+ ... finally:
+ ... sys.stdin = real_stdin
+ ... # doctest: +REPORT_NDIFF
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(5)calls_set_trace()
+ -> self.f1()
+ (Pdb) print(y)
+ 1
+ (Pdb) step
+ --Call--
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(7)f1()
+ -> def f1(self):
+ (Pdb) step
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(8)f1()
+ -> x = 1
+ (Pdb) step
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(9)f1()
+ -> self.f2()
+ (Pdb) step
+ --Call--
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(11)f2()
+ -> def f2(self):
+ (Pdb) step
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(12)f2()
+ -> z = 1
+ (Pdb) step
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(13)f2()
+ -> z = 2
+ (Pdb) print(z)
+ 1
+ (Pdb) up
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(9)f1()
+ -> self.f2()
+ (Pdb) print(x)
+ 1
+ (Pdb) up
+ > <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(5)calls_set_trace()
+ -> self.f1()
+ (Pdb) print(y)
+ 1
+ (Pdb) up
+ > <doctest foo-bar@baz[1]>(1)<module>()
+ -> calls_set_trace()
+ (Pdb) print(foo)
+ *** NameError: name 'foo' is not defined
+ (Pdb) continue
+ TestResults(failed=0, attempted=2)
+ """
def test_DocTestSuite():
"""DocTestSuite creates a unittest test suite from a doctest.
diff --git a/Lib/test/test_dummy_thread.py b/Lib/test/test_dummy_thread.py
index c61078d..2fafe1d 100644
--- a/Lib/test/test_dummy_thread.py
+++ b/Lib/test/test_dummy_thread.py
@@ -35,8 +35,8 @@ class LockTests(unittest.TestCase):
"Lock object did not release properly.")
def test_improper_release(self):
- #Make sure release of an unlocked thread raises _thread.error
- self.assertRaises(_thread.error, self.lock.release)
+ #Make sure release of an unlocked thread raises RuntimeError
+ self.assertRaises(RuntimeError, self.lock.release)
def test_cond_acquire_success(self):
#Make sure the conditional acquiring of the lock works.
diff --git a/Lib/test/test_email.py b/Lib/test/test_email.py
deleted file mode 100644
index 5eebba5..0000000
--- a/Lib/test/test_email.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (C) 2001-2007 Python Software Foundation
-# email package unit tests
-
-# The specific tests now live in Lib/email/test
-from email.test.test_email import suite
-from email.test.test_email_codecs import suite as codecs_suite
-from test import support
-
-def test_main():
- support.run_unittest(suite())
- support.run_unittest(codecs_suite())
-
-if __name__ == '__main__':
- test_main()
diff --git a/Lib/test/test_email/__init__.py b/Lib/test/test_email/__init__.py
new file mode 100644
index 0000000..69be678
--- /dev/null
+++ b/Lib/test/test_email/__init__.py
@@ -0,0 +1,43 @@
+import os
+import sys
+import unittest
+import test.support
+import email
+from test.test_email import __file__ as landmark
+
+# used by regrtest and __main__.
+def test_main():
+ here = os.path.dirname(__file__)
+ # Unittest mucks with the path, so we have to save and restore
+ # it to keep regrtest happy.
+ savepath = sys.path[:]
+ test.support._run_suite(unittest.defaultTestLoader.discover(here))
+ sys.path[:] = savepath
+
+
+# helper code used by a number of test modules.
+
+def openfile(filename, *args, **kws):
+ path = os.path.join(os.path.dirname(landmark), 'data', filename)
+ return open(path, *args, **kws)
+
+
+# Base test class
+class TestEmailBase(unittest.TestCase):
+
+ def __init__(self, *args, **kw):
+ super().__init__(*args, **kw)
+ self.addTypeEqualityFunc(bytes, self.assertBytesEqual)
+
+ ndiffAssertEqual = unittest.TestCase.assertEqual
+
+ def _msgobj(self, filename):
+ with openfile(filename) as fp:
+ return email.message_from_file(fp)
+
+ def _bytes_repr(self, b):
+ return [repr(x) for x in b.splitlines(True)]
+
+ def assertBytesEqual(self, first, second, msg):
+ """Our byte strings are really encoded strings; improve diff output"""
+ self.assertEqual(self._bytes_repr(first), self._bytes_repr(second))
diff --git a/Lib/test/test_email/__main__.py b/Lib/test/test_email/__main__.py
new file mode 100644
index 0000000..98af9ec
--- /dev/null
+++ b/Lib/test/test_email/__main__.py
@@ -0,0 +1,3 @@
+from test.test_email import test_main
+
+test_main()
diff --git a/Lib/email/test/data/PyBanner048.gif b/Lib/test/test_email/data/PyBanner048.gif
index 1a5c87f..1a5c87f 100644
--- a/Lib/email/test/data/PyBanner048.gif
+++ b/Lib/test/test_email/data/PyBanner048.gif
Binary files differ
diff --git a/Lib/email/test/data/audiotest.au b/Lib/test/test_email/data/audiotest.au
index f76b050..f76b050 100644
--- a/Lib/email/test/data/audiotest.au
+++ b/Lib/test/test_email/data/audiotest.au
Binary files differ
diff --git a/Lib/email/test/data/msg_01.txt b/Lib/test/test_email/data/msg_01.txt
index 7e33bcf..7e33bcf 100644
--- a/Lib/email/test/data/msg_01.txt
+++ b/Lib/test/test_email/data/msg_01.txt
diff --git a/Lib/email/test/data/msg_02.txt b/Lib/test/test_email/data/msg_02.txt
index 43f2480..43f2480 100644
--- a/Lib/email/test/data/msg_02.txt
+++ b/Lib/test/test_email/data/msg_02.txt
diff --git a/Lib/email/test/data/msg_03.txt b/Lib/test/test_email/data/msg_03.txt
index c748ebf..c748ebf 100644
--- a/Lib/email/test/data/msg_03.txt
+++ b/Lib/test/test_email/data/msg_03.txt
diff --git a/Lib/email/test/data/msg_04.txt b/Lib/test/test_email/data/msg_04.txt
index 1f633c4..1f633c4 100644
--- a/Lib/email/test/data/msg_04.txt
+++ b/Lib/test/test_email/data/msg_04.txt
diff --git a/Lib/email/test/data/msg_05.txt b/Lib/test/test_email/data/msg_05.txt
index 87d5e9c..87d5e9c 100644
--- a/Lib/email/test/data/msg_05.txt
+++ b/Lib/test/test_email/data/msg_05.txt
diff --git a/Lib/email/test/data/msg_06.txt b/Lib/test/test_email/data/msg_06.txt
index 69f3a47..69f3a47 100644
--- a/Lib/email/test/data/msg_06.txt
+++ b/Lib/test/test_email/data/msg_06.txt
diff --git a/Lib/email/test/data/msg_07.txt b/Lib/test/test_email/data/msg_07.txt
index 721f3a0..721f3a0 100644
--- a/Lib/email/test/data/msg_07.txt
+++ b/Lib/test/test_email/data/msg_07.txt
diff --git a/Lib/email/test/data/msg_08.txt b/Lib/test/test_email/data/msg_08.txt
index b563083..b563083 100644
--- a/Lib/email/test/data/msg_08.txt
+++ b/Lib/test/test_email/data/msg_08.txt
diff --git a/Lib/email/test/data/msg_09.txt b/Lib/test/test_email/data/msg_09.txt
index 575c4c2..575c4c2 100644
--- a/Lib/email/test/data/msg_09.txt
+++ b/Lib/test/test_email/data/msg_09.txt
diff --git a/Lib/email/test/data/msg_10.txt b/Lib/test/test_email/data/msg_10.txt
index 0790396..0790396 100644
--- a/Lib/email/test/data/msg_10.txt
+++ b/Lib/test/test_email/data/msg_10.txt
diff --git a/Lib/email/test/data/msg_11.txt b/Lib/test/test_email/data/msg_11.txt
index 8f7f199..8f7f199 100644
--- a/Lib/email/test/data/msg_11.txt
+++ b/Lib/test/test_email/data/msg_11.txt
diff --git a/Lib/email/test/data/msg_12.txt b/Lib/test/test_email/data/msg_12.txt
index 4bec8d9..4bec8d9 100644
--- a/Lib/email/test/data/msg_12.txt
+++ b/Lib/test/test_email/data/msg_12.txt
diff --git a/Lib/email/test/data/msg_12a.txt b/Lib/test/test_email/data/msg_12a.txt
index e94224e..e94224e 100644
--- a/Lib/email/test/data/msg_12a.txt
+++ b/Lib/test/test_email/data/msg_12a.txt
diff --git a/Lib/email/test/data/msg_13.txt b/Lib/test/test_email/data/msg_13.txt
index 8e6d52d..8e6d52d 100644
--- a/Lib/email/test/data/msg_13.txt
+++ b/Lib/test/test_email/data/msg_13.txt
diff --git a/Lib/email/test/data/msg_14.txt b/Lib/test/test_email/data/msg_14.txt
index 5d98d2f..5d98d2f 100644
--- a/Lib/email/test/data/msg_14.txt
+++ b/Lib/test/test_email/data/msg_14.txt
diff --git a/Lib/email/test/data/msg_15.txt b/Lib/test/test_email/data/msg_15.txt
index 0025624..0025624 100644
--- a/Lib/email/test/data/msg_15.txt
+++ b/Lib/test/test_email/data/msg_15.txt
diff --git a/Lib/email/test/data/msg_16.txt b/Lib/test/test_email/data/msg_16.txt
index 56167e9..56167e9 100644
--- a/Lib/email/test/data/msg_16.txt
+++ b/Lib/test/test_email/data/msg_16.txt
diff --git a/Lib/email/test/data/msg_17.txt b/Lib/test/test_email/data/msg_17.txt
index 8d86e41..8d86e41 100644
--- a/Lib/email/test/data/msg_17.txt
+++ b/Lib/test/test_email/data/msg_17.txt
diff --git a/Lib/email/test/data/msg_18.txt b/Lib/test/test_email/data/msg_18.txt
index f9f4904..f9f4904 100644
--- a/Lib/email/test/data/msg_18.txt
+++ b/Lib/test/test_email/data/msg_18.txt
diff --git a/Lib/email/test/data/msg_19.txt b/Lib/test/test_email/data/msg_19.txt
index 49bf7fc..49bf7fc 100644
--- a/Lib/email/test/data/msg_19.txt
+++ b/Lib/test/test_email/data/msg_19.txt
diff --git a/Lib/email/test/data/msg_20.txt b/Lib/test/test_email/data/msg_20.txt
index 1a6a887..1a6a887 100644
--- a/Lib/email/test/data/msg_20.txt
+++ b/Lib/test/test_email/data/msg_20.txt
diff --git a/Lib/email/test/data/msg_21.txt b/Lib/test/test_email/data/msg_21.txt
index 23590b2..23590b2 100644
--- a/Lib/email/test/data/msg_21.txt
+++ b/Lib/test/test_email/data/msg_21.txt
diff --git a/Lib/email/test/data/msg_22.txt b/Lib/test/test_email/data/msg_22.txt
index af9de5f..af9de5f 100644
--- a/Lib/email/test/data/msg_22.txt
+++ b/Lib/test/test_email/data/msg_22.txt
diff --git a/Lib/email/test/data/msg_23.txt b/Lib/test/test_email/data/msg_23.txt
index bb2e8ec..bb2e8ec 100644
--- a/Lib/email/test/data/msg_23.txt
+++ b/Lib/test/test_email/data/msg_23.txt
diff --git a/Lib/email/test/data/msg_24.txt b/Lib/test/test_email/data/msg_24.txt
index 4e52339..4e52339 100644
--- a/Lib/email/test/data/msg_24.txt
+++ b/Lib/test/test_email/data/msg_24.txt
diff --git a/Lib/email/test/data/msg_25.txt b/Lib/test/test_email/data/msg_25.txt
index 9e35275..9e35275 100644
--- a/Lib/email/test/data/msg_25.txt
+++ b/Lib/test/test_email/data/msg_25.txt
diff --git a/Lib/email/test/data/msg_26.txt b/Lib/test/test_email/data/msg_26.txt
index 58efaa9..58efaa9 100644
--- a/Lib/email/test/data/msg_26.txt
+++ b/Lib/test/test_email/data/msg_26.txt
diff --git a/Lib/email/test/data/msg_27.txt b/Lib/test/test_email/data/msg_27.txt
index d019176..d019176 100644
--- a/Lib/email/test/data/msg_27.txt
+++ b/Lib/test/test_email/data/msg_27.txt
diff --git a/Lib/email/test/data/msg_28.txt b/Lib/test/test_email/data/msg_28.txt
index 1e4824c..1e4824c 100644
--- a/Lib/email/test/data/msg_28.txt
+++ b/Lib/test/test_email/data/msg_28.txt
diff --git a/Lib/email/test/data/msg_29.txt b/Lib/test/test_email/data/msg_29.txt
index 1fab561..1fab561 100644
--- a/Lib/email/test/data/msg_29.txt
+++ b/Lib/test/test_email/data/msg_29.txt
diff --git a/Lib/email/test/data/msg_30.txt b/Lib/test/test_email/data/msg_30.txt
index 4334bb6..4334bb6 100644
--- a/Lib/email/test/data/msg_30.txt
+++ b/Lib/test/test_email/data/msg_30.txt
diff --git a/Lib/email/test/data/msg_31.txt b/Lib/test/test_email/data/msg_31.txt
index 1e58e56..1e58e56 100644
--- a/Lib/email/test/data/msg_31.txt
+++ b/Lib/test/test_email/data/msg_31.txt
diff --git a/Lib/email/test/data/msg_32.txt b/Lib/test/test_email/data/msg_32.txt
index 07ec5af..07ec5af 100644
--- a/Lib/email/test/data/msg_32.txt
+++ b/Lib/test/test_email/data/msg_32.txt
diff --git a/Lib/email/test/data/msg_33.txt b/Lib/test/test_email/data/msg_33.txt
index 042787a..042787a 100644
--- a/Lib/email/test/data/msg_33.txt
+++ b/Lib/test/test_email/data/msg_33.txt
diff --git a/Lib/email/test/data/msg_34.txt b/Lib/test/test_email/data/msg_34.txt
index 055dfea..055dfea 100644
--- a/Lib/email/test/data/msg_34.txt
+++ b/Lib/test/test_email/data/msg_34.txt
diff --git a/Lib/email/test/data/msg_35.txt b/Lib/test/test_email/data/msg_35.txt
index be7d5a2..be7d5a2 100644
--- a/Lib/email/test/data/msg_35.txt
+++ b/Lib/test/test_email/data/msg_35.txt
diff --git a/Lib/email/test/data/msg_36.txt b/Lib/test/test_email/data/msg_36.txt
index 5632c30..5632c30 100644
--- a/Lib/email/test/data/msg_36.txt
+++ b/Lib/test/test_email/data/msg_36.txt
diff --git a/Lib/email/test/data/msg_37.txt b/Lib/test/test_email/data/msg_37.txt
index 038d34a..038d34a 100644
--- a/Lib/email/test/data/msg_37.txt
+++ b/Lib/test/test_email/data/msg_37.txt
diff --git a/Lib/email/test/data/msg_38.txt b/Lib/test/test_email/data/msg_38.txt
index 006df81..006df81 100644
--- a/Lib/email/test/data/msg_38.txt
+++ b/Lib/test/test_email/data/msg_38.txt
diff --git a/Lib/email/test/data/msg_39.txt b/Lib/test/test_email/data/msg_39.txt
index 124b269..124b269 100644
--- a/Lib/email/test/data/msg_39.txt
+++ b/Lib/test/test_email/data/msg_39.txt
diff --git a/Lib/email/test/data/msg_40.txt b/Lib/test/test_email/data/msg_40.txt
index 1435fa1..1435fa1 100644
--- a/Lib/email/test/data/msg_40.txt
+++ b/Lib/test/test_email/data/msg_40.txt
diff --git a/Lib/email/test/data/msg_41.txt b/Lib/test/test_email/data/msg_41.txt
index 76cdd1c..76cdd1c 100644
--- a/Lib/email/test/data/msg_41.txt
+++ b/Lib/test/test_email/data/msg_41.txt
diff --git a/Lib/email/test/data/msg_42.txt b/Lib/test/test_email/data/msg_42.txt
index a75f8f4..a75f8f4 100644
--- a/Lib/email/test/data/msg_42.txt
+++ b/Lib/test/test_email/data/msg_42.txt
diff --git a/Lib/email/test/data/msg_43.txt b/Lib/test/test_email/data/msg_43.txt
index 797d12c..797d12c 100644
--- a/Lib/email/test/data/msg_43.txt
+++ b/Lib/test/test_email/data/msg_43.txt
diff --git a/Lib/email/test/data/msg_44.txt b/Lib/test/test_email/data/msg_44.txt
index 15a2252..15a2252 100644
--- a/Lib/email/test/data/msg_44.txt
+++ b/Lib/test/test_email/data/msg_44.txt
diff --git a/Lib/email/test/data/msg_45.txt b/Lib/test/test_email/data/msg_45.txt
index 58fde95..58fde95 100644
--- a/Lib/email/test/data/msg_45.txt
+++ b/Lib/test/test_email/data/msg_45.txt
diff --git a/Lib/email/test/data/msg_46.txt b/Lib/test/test_email/data/msg_46.txt
index 1e22c4f..1e22c4f 100644
--- a/Lib/email/test/data/msg_46.txt
+++ b/Lib/test/test_email/data/msg_46.txt
diff --git a/Lib/email/test/test_email_codecs.py b/Lib/test/test_email/test_asian_codecs.py
index ca85f57..a4dd9a9 100644
--- a/Lib/email/test/test_email_codecs.py
+++ b/Lib/test/test_email/test_asian_codecs.py
@@ -5,7 +5,7 @@
import unittest
from test.support import run_unittest
-from email.test.test_email import TestEmailBase
+from test.test_email.test_email import TestEmailBase
from email.charset import Charset
from email.header import Header, decode_header
from email.message import Message
@@ -78,16 +78,5 @@ Hello World! =?iso-2022-jp?b?GyRCJU8lbSE8JW8hPCVrJUkhKhsoQg==?=
-def suite():
- suite = unittest.TestSuite()
- suite.addTest(unittest.makeSuite(TestEmailAsianCodecs))
- return suite
-
-
-def test_main():
- run_unittest(TestEmailAsianCodecs)
-
-
-
if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+ unittest.main()
diff --git a/Lib/email/test/test_email.py b/Lib/test/test_email/test_email.py
index 605c1be..404282b 100644
--- a/Lib/email/test/test_email.py
+++ b/Lib/test/test_email/test_email.py
@@ -36,40 +36,14 @@ from email import iterators
from email import base64mime
from email import quoprimime
-from test.support import findfile, run_unittest, unlink
-from email.test import __file__ as landmark
-
+from test.support import run_unittest, unlink
+from test.test_email import openfile, TestEmailBase
NL = '\n'
EMPTYSTRING = ''
SPACE = ' '
-
-def openfile(filename, *args, **kws):
- path = os.path.join(os.path.dirname(landmark), 'data', filename)
- return open(path, *args, **kws)
-
-
-
-# Base test class
-class TestEmailBase(unittest.TestCase):
- def ndiffAssertEqual(self, first, second):
- """Like assertEqual except use ndiff for readable output."""
- if first != second:
- sfirst = str(first)
- ssecond = str(second)
- rfirst = [repr(line) for line in sfirst.splitlines()]
- rsecond = [repr(line) for line in ssecond.splitlines()]
- diff = difflib.ndiff(rfirst, rsecond)
- raise self.failureException(NL + NL.join(diff))
-
- def _msgobj(self, filename):
- with openfile(findfile(filename)) as fp:
- return email.message_from_file(fp)
-
-
-
# Test various aspects of the Message class's API
class TestMessageAPI(TestEmailBase):
def test_get_all(self):
@@ -194,7 +168,7 @@ class TestMessageAPI(TestEmailBase):
def test_message_rfc822_only(self):
# Issue 7970: message/rfc822 not in multipart parsed by
# HeaderParser caused an exception when flattened.
- with openfile(findfile('msg_46.txt')) as fp:
+ with openfile('msg_46.txt') as fp:
msgdata = fp.read()
parser = HeaderParser()
msg = parser.parsestr(msgdata)
@@ -1108,13 +1082,7 @@ Blah blah blah
# Test the basic MIMEAudio class
class TestMIMEAudio(unittest.TestCase):
def setUp(self):
- # Make sure we pick up the audiotest.au that lives in email/test/data.
- # In Python, there's an audiotest.au living in Lib/test but that isn't
- # included in some binary distros that don't include the test
- # package. The trailing empty string on the .join() is significant
- # since findfile() will do a dirname().
- datadir = os.path.join(os.path.dirname(landmark), 'data', '')
- with open(findfile('audiotest.au', datadir), 'rb') as fp:
+ with openfile('audiotest.au', 'rb') as fp:
self._audiodata = fp.read()
self._au = MIMEAudio(self._audiodata)
@@ -2405,6 +2373,13 @@ class TestMiscellaneous(TestEmailBase):
(2002, 4, 3, 14, 58, 26, 0, 1, -1, -28800))
+ def test_parsedate_accepts_time_with_dots(self):
+ eq = self.assertEqual
+ eq(utils.parsedate_tz('5 Feb 2003 13.47.26 -0800'),
+ (2003, 2, 5, 13, 47, 26, 0, 1, -1, -28800))
+ eq(utils.parsedate_tz('5 Feb 2003 13.47 -0800'),
+ (2003, 2, 5, 13, 47, 0, 0, 1, -1, -28800))
+
def test_parsedate_acceptable_to_time_functions(self):
eq = self.assertEqual
timetup = utils.parsedate('5 Feb 2003 13:47:26 -0800')
@@ -2454,6 +2429,46 @@ class TestMiscellaneous(TestEmailBase):
b = 'person@dom.ain'
self.assertEqual(utils.parseaddr(utils.formataddr((a, b))), (a, b))
+ def test_quotes_unicode_names(self):
+ # issue 1690608. email.utils.formataddr() should be rfc2047 aware.
+ name = "H\u00e4ns W\u00fcrst"
+ addr = 'person@dom.ain'
+ utf8_base64 = "=?utf-8?b?SMOkbnMgV8O8cnN0?= <person@dom.ain>"
+ latin1_quopri = "=?iso-8859-1?q?H=E4ns_W=FCrst?= <person@dom.ain>"
+ self.assertEqual(utils.formataddr((name, addr)), utf8_base64)
+ self.assertEqual(utils.formataddr((name, addr), 'iso-8859-1'),
+ latin1_quopri)
+
+ def test_accepts_any_charset_like_object(self):
+ # issue 1690608. email.utils.formataddr() should be rfc2047 aware.
+ name = "H\u00e4ns W\u00fcrst"
+ addr = 'person@dom.ain'
+ utf8_base64 = "=?utf-8?b?SMOkbnMgV8O8cnN0?= <person@dom.ain>"
+ foobar = "FOOBAR"
+ class CharsetMock:
+ def header_encode(self, string):
+ return foobar
+ mock = CharsetMock()
+ mock_expected = "%s <%s>" % (foobar, addr)
+ self.assertEqual(utils.formataddr((name, addr), mock), mock_expected)
+ self.assertEqual(utils.formataddr((name, addr), Charset('utf-8')),
+ utf8_base64)
+
+ def test_invalid_charset_like_object_raises_error(self):
+ # issue 1690608. email.utils.formataddr() should be rfc2047 aware.
+ name = "H\u00e4ns W\u00fcrst"
+ addr = 'person@dom.ain'
+ # A object without a header_encode method:
+ bad_charset = object()
+ self.assertRaises(AttributeError, utils.formataddr, (name, addr),
+ bad_charset)
+
+ def test_unicode_address_raises_error(self):
+ # issue 1690608. email.utils.formataddr() should be rfc2047 aware.
+ addr = 'pers\u00f6n@dom.in'
+ self.assertRaises(UnicodeError, utils.formataddr, (None, addr))
+ self.assertRaises(UnicodeError, utils.formataddr, ("Name", addr))
+
def test_name_with_dot(self):
x = 'John X. Doe <jxd@example.com>'
y = '"John X. Doe" <jxd@example.com>'
@@ -3286,12 +3301,7 @@ class BaseTestBytesGeneratorIdempotent:
b = BytesIO()
g = email.generator.BytesGenerator(b, maxheaderlen=0)
g.flatten(msg, unixfrom=unixfrom, linesep=self.linesep)
- self.assertByteStringsEqual(data, b.getvalue())
-
- def assertByteStringsEqual(self, str1, str2):
- # Not using self.blinesep here is intentional. This way the output
- # is more useful when the failure results in mixed line endings.
- self.assertListEqual(str1.split(b'\n'), str2.split(b'\n'))
+ self.assertEqual(data, b.getvalue())
class TestBytesGeneratorIdempotentNL(BaseTestBytesGeneratorIdempotent,
@@ -4413,7 +4423,7 @@ Content-Type: application/x-foo;
class TestSigned(TestEmailBase):
def _msg_and_obj(self, filename):
- with openfile(findfile(filename)) as fp:
+ with openfile(filename) as fp:
original = fp.read()
msg = email.message_from_string(original)
return original, msg
@@ -4445,23 +4455,5 @@ class TestSigned(TestEmailBase):
-def _testclasses():
- mod = sys.modules[__name__]
- return [getattr(mod, name) for name in dir(mod) if name.startswith('Test')]
-
-
-def suite():
- suite = unittest.TestSuite()
- for testclass in _testclasses():
- suite.addTest(unittest.makeSuite(testclass))
- return suite
-
-
-def test_main():
- for testclass in _testclasses():
- run_unittest(testclass)
-
-
-
if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+ unittest.main()
diff --git a/Lib/email/test/test_email_torture.py b/Lib/test/test_email/torture_test.py
index 544b1bb..544b1bb 100644
--- a/Lib/email/test/test_email_torture.py
+++ b/Lib/test/test_email/torture_test.py
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 76f4249..592c765 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -7,7 +7,7 @@ import pickle
import weakref
from test.support import (TESTFN, unlink, run_unittest, captured_output,
- gc_collect, cpython_only)
+ gc_collect, cpython_only, no_tracing)
# XXX This is not really enough, each *operation* should be tested!
@@ -388,6 +388,7 @@ class ExceptionTests(unittest.TestCase):
x = DerivedException(fancy_arg=42)
self.assertEqual(x.fancy_arg, 42)
+ @no_tracing
def testInfiniteRecursion(self):
def f():
return f()
@@ -631,6 +632,7 @@ class ExceptionTests(unittest.TestCase):
u.start = 1000
self.assertEqual(str(u), "can't translate characters in position 1000-4: 965230951443685724997")
+ @no_tracing
def test_badisinstance(self):
# Bug #2542: if issubclass(e, MyException) raises an exception,
# it should be ignored
@@ -741,6 +743,7 @@ class ExceptionTests(unittest.TestCase):
self.fail("MemoryError not raised")
self.assertEqual(wr(), None)
+ @no_tracing
def test_recursion_error_cleanup(self):
# Same test as above, but with "recursion exceeded" errors
class C:
diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py
new file mode 100644
index 0000000..d08347d
--- /dev/null
+++ b/Lib/test/test_faulthandler.py
@@ -0,0 +1,514 @@
+from contextlib import contextmanager
+import datetime
+import faulthandler
+import re
+import signal
+import subprocess
+import sys
+from test import support, script_helper
+import tempfile
+import unittest
+
+try:
+ import threading
+ HAVE_THREADS = True
+except ImportError:
+ HAVE_THREADS = False
+
+TIMEOUT = 0.5
+
+try:
+ from resource import setrlimit, RLIMIT_CORE, error as resource_error
+except ImportError:
+ prepare_subprocess = None
+else:
+ def prepare_subprocess():
+ # don't create core file
+ try:
+ setrlimit(RLIMIT_CORE, (0, 0))
+ except (ValueError, resource_error):
+ pass
+
+def expected_traceback(lineno1, lineno2, header, count=1):
+ regex = header
+ regex += ' File "<string>", line %s in func\n' % lineno1
+ regex += ' File "<string>", line %s in <module>' % lineno2
+ if count != 1:
+ regex = (regex + '\n') * (count - 1) + regex
+ return '^' + regex + '$'
+
+@contextmanager
+def temporary_filename():
+ filename = tempfile.mktemp()
+ try:
+ yield filename
+ finally:
+ support.unlink(filename)
+
+class FaultHandlerTests(unittest.TestCase):
+ def get_output(self, code, filename=None):
+ """
+ Run the specified code in Python (in a new child process) and read the
+ output from the standard error or from a file (if filename is set).
+ Return the output lines as a list.
+
+ Strip the reference count from the standard error for Python debug
+ build, and replace "Current thread 0x00007f8d8fbd9700" by "Current
+ thread XXX".
+ """
+ options = {}
+ if prepare_subprocess:
+ options['preexec_fn'] = prepare_subprocess
+ process = script_helper.spawn_python('-c', code, **options)
+ stdout, stderr = process.communicate()
+ exitcode = process.wait()
+ output = support.strip_python_stderr(stdout)
+ output = output.decode('ascii', 'backslashreplace')
+ if filename:
+ self.assertEqual(output, '')
+ with open(filename, "rb") as fp:
+ output = fp.read()
+ output = output.decode('ascii', 'backslashreplace')
+ output = re.sub('Current thread 0x[0-9a-f]+',
+ 'Current thread XXX',
+ output)
+ return output.splitlines(), exitcode
+
+ def check_fatal_error(self, code, line_number, name_regex,
+ filename=None, all_threads=False, other_regex=None):
+ """
+ Check that the fault handler for fatal errors is enabled and check the
+ traceback from the child process output.
+
+ Raise an error if the output doesn't match the expected format.
+ """
+ if all_threads:
+ header = 'Current thread XXX'
+ else:
+ header = 'Traceback (most recent call first)'
+ regex = """
+^Fatal Python error: {name}
+
+{header}:
+ File "<string>", line {lineno} in <module>$
+""".strip()
+ regex = regex.format(
+ lineno=line_number,
+ name=name_regex,
+ header=re.escape(header))
+ if other_regex:
+ regex += '|' + other_regex
+ output, exitcode = self.get_output(code, filename)
+ output = '\n'.join(output)
+ self.assertRegex(output, regex)
+ self.assertNotEqual(exitcode, 0)
+
+ def test_read_null(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._read_null()
+""".strip(),
+ 3,
+ '(?:Segmentation fault|Bus error)')
+
+ def test_sigsegv(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._sigsegv()
+""".strip(),
+ 3,
+ 'Segmentation fault')
+
+ def test_sigabrt(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._sigabrt()
+""".strip(),
+ 3,
+ 'Aborted')
+
+ @unittest.skipIf(sys.platform == 'win32',
+ "SIGFPE cannot be caught on Windows")
+ def test_sigfpe(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._sigfpe()
+""".strip(),
+ 3,
+ 'Floating point exception')
+
+ @unittest.skipIf(not hasattr(faulthandler, '_sigbus'),
+ "need faulthandler._sigbus()")
+ def test_sigbus(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._sigbus()
+""".strip(),
+ 3,
+ 'Bus error')
+
+ @unittest.skipIf(not hasattr(faulthandler, '_sigill'),
+ "need faulthandler._sigill()")
+ def test_sigill(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._sigill()
+""".strip(),
+ 3,
+ 'Illegal instruction')
+
+ def test_fatal_error(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler._fatal_error(b'xyz')
+""".strip(),
+ 2,
+ 'xyz')
+
+ @unittest.skipIf(not hasattr(faulthandler, '_stack_overflow'),
+ 'need faulthandler._stack_overflow()')
+ def test_stack_overflow(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._stack_overflow()
+""".strip(),
+ 3,
+ '(?:Segmentation fault|Bus error)',
+ other_regex='unable to raise a stack overflow')
+
+ def test_gil_released(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable()
+faulthandler._read_null(True)
+""".strip(),
+ 3,
+ '(?:Segmentation fault|Bus error)')
+
+ def test_enable_file(self):
+ with temporary_filename() as filename:
+ self.check_fatal_error("""
+import faulthandler
+output = open({filename}, 'wb')
+faulthandler.enable(output)
+faulthandler._read_null()
+""".strip().format(filename=repr(filename)),
+ 4,
+ '(?:Segmentation fault|Bus error)',
+ filename=filename)
+
+ def test_enable_threads(self):
+ self.check_fatal_error("""
+import faulthandler
+faulthandler.enable(all_threads=True)
+faulthandler._read_null()
+""".strip(),
+ 3,
+ '(?:Segmentation fault|Bus error)',
+ all_threads=True)
+
+ def test_disable(self):
+ code = """
+import faulthandler
+faulthandler.enable()
+faulthandler.disable()
+faulthandler._read_null()
+""".strip()
+ not_expected = 'Fatal Python error'
+ stderr, exitcode = self.get_output(code)
+ stder = '\n'.join(stderr)
+ self.assertTrue(not_expected not in stderr,
+ "%r is present in %r" % (not_expected, stderr))
+ self.assertNotEqual(exitcode, 0)
+
+ def test_is_enabled(self):
+ was_enabled = faulthandler.is_enabled()
+ try:
+ faulthandler.enable()
+ self.assertTrue(faulthandler.is_enabled())
+ faulthandler.disable()
+ self.assertFalse(faulthandler.is_enabled())
+ finally:
+ if was_enabled:
+ faulthandler.enable()
+ else:
+ faulthandler.disable()
+
+ def check_dump_traceback(self, filename):
+ """
+ Explicitly call dump_traceback() function and check its output.
+ Raise an error if the output doesn't match the expected format.
+ """
+ code = """
+import faulthandler
+
+def funcB():
+ if {has_filename}:
+ with open({filename}, "wb") as fp:
+ faulthandler.dump_traceback(fp)
+ else:
+ faulthandler.dump_traceback()
+
+def funcA():
+ funcB()
+
+funcA()
+""".strip()
+ code = code.format(
+ filename=repr(filename),
+ has_filename=bool(filename),
+ )
+ if filename:
+ lineno = 6
+ else:
+ lineno = 8
+ expected = [
+ 'Traceback (most recent call first):',
+ ' File "<string>", line %s in funcB' % lineno,
+ ' File "<string>", line 11 in funcA',
+ ' File "<string>", line 13 in <module>'
+ ]
+ trace, exitcode = self.get_output(code, filename)
+ self.assertEqual(trace, expected)
+ self.assertEqual(exitcode, 0)
+
+ def test_dump_traceback(self):
+ self.check_dump_traceback(None)
+
+ def test_dump_traceback_file(self):
+ with temporary_filename() as filename:
+ self.check_dump_traceback(filename)
+
+ @unittest.skipIf(not HAVE_THREADS, 'need threads')
+ def check_dump_traceback_threads(self, filename):
+ """
+ Call explicitly dump_traceback(all_threads=True) and check the output.
+ Raise an error if the output doesn't match the expected format.
+ """
+ code = """
+import faulthandler
+from threading import Thread, Event
+import time
+
+def dump():
+ if {filename}:
+ with open({filename}, "wb") as fp:
+ faulthandler.dump_traceback(fp, all_threads=True)
+ else:
+ faulthandler.dump_traceback(all_threads=True)
+
+class Waiter(Thread):
+ # avoid blocking if the main thread raises an exception.
+ daemon = True
+
+ def __init__(self):
+ Thread.__init__(self)
+ self.running = Event()
+ self.stop = Event()
+
+ def run(self):
+ self.running.set()
+ self.stop.wait()
+
+waiter = Waiter()
+waiter.start()
+waiter.running.wait()
+dump()
+waiter.stop.set()
+waiter.join()
+""".strip()
+ code = code.format(filename=repr(filename))
+ output, exitcode = self.get_output(code, filename)
+ output = '\n'.join(output)
+ if filename:
+ lineno = 8
+ else:
+ lineno = 10
+ regex = """
+^Thread 0x[0-9a-f]+:
+(?: File ".*threading.py", line [0-9]+ in [_a-z]+
+){{1,3}} File "<string>", line 23 in run
+ File ".*threading.py", line [0-9]+ in _bootstrap_inner
+ File ".*threading.py", line [0-9]+ in _bootstrap
+
+Current thread XXX:
+ File "<string>", line {lineno} in dump
+ File "<string>", line 28 in <module>$
+""".strip()
+ regex = regex.format(lineno=lineno)
+ self.assertRegex(output, regex)
+ self.assertEqual(exitcode, 0)
+
+ def test_dump_traceback_threads(self):
+ self.check_dump_traceback_threads(None)
+
+ def test_dump_traceback_threads_file(self):
+ with temporary_filename() as filename:
+ self.check_dump_traceback_threads(filename)
+
+ def _check_dump_tracebacks_later(self, repeat, cancel, filename, loops):
+ """
+ Check how many times the traceback is written in timeout x 2.5 seconds,
+ or timeout x 3.5 seconds if cancel is True: 1, 2 or 3 times depending
+ on repeat and cancel options.
+
+ Raise an error if the output doesn't match the expect format.
+ """
+ timeout_str = str(datetime.timedelta(seconds=TIMEOUT))
+ code = """
+import faulthandler
+import time
+
+def func(timeout, repeat, cancel, file, loops):
+ for loop in range(loops):
+ faulthandler.dump_tracebacks_later(timeout, repeat=repeat, file=file)
+ if cancel:
+ faulthandler.cancel_dump_tracebacks_later()
+ time.sleep(timeout * 2.5)
+ faulthandler.cancel_dump_tracebacks_later()
+
+timeout = {timeout}
+repeat = {repeat}
+cancel = {cancel}
+loops = {loops}
+if {has_filename}:
+ file = open({filename}, "wb")
+else:
+ file = None
+func(timeout, repeat, cancel, file, loops)
+if file is not None:
+ file.close()
+""".strip()
+ code = code.format(
+ timeout=TIMEOUT,
+ repeat=repeat,
+ cancel=cancel,
+ loops=loops,
+ has_filename=bool(filename),
+ filename=repr(filename),
+ )
+ trace, exitcode = self.get_output(code, filename)
+ trace = '\n'.join(trace)
+
+ if not cancel:
+ count = loops
+ if repeat:
+ count *= 2
+ header = r'Timeout \(%s\)!\nThread 0x[0-9a-f]+:\n' % timeout_str
+ regex = expected_traceback(9, 20, header, count=count)
+ self.assertRegex(trace, regex)
+ else:
+ self.assertEqual(trace, '')
+ self.assertEqual(exitcode, 0)
+
+ @unittest.skipIf(not hasattr(faulthandler, 'dump_tracebacks_later'),
+ 'need faulthandler.dump_tracebacks_later()')
+ def check_dump_tracebacks_later(self, repeat=False, cancel=False,
+ file=False, twice=False):
+ if twice:
+ loops = 2
+ else:
+ loops = 1
+ if file:
+ with temporary_filename() as filename:
+ self._check_dump_tracebacks_later(repeat, cancel,
+ filename, loops)
+ else:
+ self._check_dump_tracebacks_later(repeat, cancel, None, loops)
+
+ def test_dump_tracebacks_later(self):
+ self.check_dump_tracebacks_later()
+
+ def test_dump_tracebacks_later_repeat(self):
+ self.check_dump_tracebacks_later(repeat=True)
+
+ def test_dump_tracebacks_later_cancel(self):
+ self.check_dump_tracebacks_later(cancel=True)
+
+ def test_dump_tracebacks_later_file(self):
+ self.check_dump_tracebacks_later(file=True)
+
+ def test_dump_tracebacks_later_twice(self):
+ self.check_dump_tracebacks_later(twice=True)
+
+ @unittest.skipIf(not hasattr(faulthandler, "register"),
+ "need faulthandler.register")
+ def check_register(self, filename=False, all_threads=False,
+ unregister=False):
+ """
+ Register a handler displaying the traceback on a user signal. Raise the
+ signal and check the written traceback.
+
+ Raise an error if the output doesn't match the expected format.
+ """
+ signum = signal.SIGUSR1
+ code = """
+import faulthandler
+import os
+import signal
+
+def func(signum):
+ os.kill(os.getpid(), signum)
+
+signum = {signum}
+unregister = {unregister}
+if {has_filename}:
+ file = open({filename}, "wb")
+else:
+ file = None
+faulthandler.register(signum, file=file, all_threads={all_threads})
+if unregister:
+ faulthandler.unregister(signum)
+func(signum)
+if file is not None:
+ file.close()
+""".strip()
+ code = code.format(
+ filename=repr(filename),
+ has_filename=bool(filename),
+ all_threads=all_threads,
+ signum=signum,
+ unregister=unregister,
+ )
+ trace, exitcode = self.get_output(code, filename)
+ trace = '\n'.join(trace)
+ if not unregister:
+ if all_threads:
+ regex = 'Current thread XXX:\n'
+ else:
+ regex = 'Traceback \(most recent call first\):\n'
+ regex = expected_traceback(6, 17, regex)
+ self.assertRegex(trace, regex)
+ else:
+ self.assertEqual(trace, '')
+ if unregister:
+ self.assertNotEqual(exitcode, 0)
+ else:
+ self.assertEqual(exitcode, 0)
+
+ def test_register(self):
+ self.check_register()
+
+ def test_unregister(self):
+ self.check_register(unregister=True)
+
+ def test_register_file(self):
+ with temporary_filename() as filename:
+ self.check_register(filename=filename)
+
+ def test_register_threads(self):
+ self.check_register(all_threads=True)
+
+
+def test_main():
+ support.run_unittest(FaultHandlerTests)
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_fileinput.py b/Lib/test/test_fileinput.py
index f312882..76e4d16 100644
--- a/Lib/test/test_fileinput.py
+++ b/Lib/test/test_fileinput.py
@@ -2,18 +2,31 @@
Tests for fileinput module.
Nick Mathewson
'''
-
+import os
+import sys
+import re
+import fileinput
+import collections
+import gzip
+import types
+import codecs
import unittest
-from test.support import verbose, TESTFN, run_unittest
-from test.support import unlink as safe_unlink
-import sys, re
+
+try:
+ import bz2
+except ImportError:
+ bz2 = None
+
from io import StringIO
from fileinput import FileInput, hook_encoded
+from test.support import verbose, TESTFN, run_unittest
+from test.support import unlink as safe_unlink
+
+
# The fileinput module has 2 interfaces: the FileInput class which does
# all the work, and a few functions (input, etc.) that use a global _state
-# variable. We only test the FileInput class, since the other functions
-# only provide a thin facade over FileInput.
+# variable.
# Write lines (a list of lines) to temp file number i, and return the
# temp file's name.
@@ -121,7 +134,16 @@ class BufferSizesTests(unittest.TestCase):
self.assertEqual(int(m.group(1)), fi.filelineno())
fi.close()
+class UnconditionallyRaise:
+ def __init__(self, exception_type):
+ self.exception_type = exception_type
+ self.invoked = False
+ def __call__(self, *args, **kwargs):
+ self.invoked = True
+ raise self.exception_type()
+
class FileInputTests(unittest.TestCase):
+
def test_zero_byte_files(self):
t1 = t2 = t3 = t4 = None
try:
@@ -219,17 +241,20 @@ class FileInputTests(unittest.TestCase):
self.fail("FileInput should check openhook for being callable")
except ValueError:
pass
- # XXX The rot13 codec was removed.
- # So this test needs to be changed to use something else.
- # (Or perhaps the API needs to change so we can just pass
- # an encoding rather than using a hook?)
-## try:
-## t1 = writeTmp(1, ["A\nB"], mode="wb")
-## fi = FileInput(files=t1, openhook=hook_encoded("rot13"))
-## lines = list(fi)
-## self.assertEqual(lines, ["N\n", "O"])
-## finally:
-## remove_tempfiles(t1)
+
+ class CustomOpenHook:
+ def __init__(self):
+ self.invoked = False
+ def __call__(self, *args):
+ self.invoked = True
+ return open(*args)
+
+ t = writeTmp(1, ["\n"])
+ self.addCleanup(remove_tempfiles, t)
+ custom_open_hook = CustomOpenHook()
+ with FileInput([t], openhook=custom_open_hook) as fi:
+ fi.readline()
+ self.assertTrue(custom_open_hook.invoked, "openhook not invoked")
def test_context_manager(self):
try:
@@ -254,9 +279,584 @@ class FileInputTests(unittest.TestCase):
finally:
remove_tempfiles(t1)
+ def test_empty_files_list_specified_to_constructor(self):
+ with FileInput(files=[]) as fi:
+ self.assertEqual(fi._files, ('-',))
+
+ def test__getitem__(self):
+ """Tests invoking FileInput.__getitem__() with the current
+ line number"""
+ t = writeTmp(1, ["line1\n", "line2\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t]) as fi:
+ retval1 = fi[0]
+ self.assertEqual(retval1, "line1\n")
+ retval2 = fi[1]
+ self.assertEqual(retval2, "line2\n")
+
+ def test__getitem__invalid_key(self):
+ """Tests invoking FileInput.__getitem__() with an index unequal to
+ the line number"""
+ t = writeTmp(1, ["line1\n", "line2\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t]) as fi:
+ with self.assertRaises(RuntimeError) as cm:
+ fi[1]
+ self.assertEqual(cm.exception.args, ("accessing lines out of order",))
+
+ def test__getitem__eof(self):
+ """Tests invoking FileInput.__getitem__() with the line number but at
+ end-of-input"""
+ t = writeTmp(1, [])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t]) as fi:
+ with self.assertRaises(IndexError) as cm:
+ fi[0]
+ self.assertEqual(cm.exception.args, ("end of input reached",))
+
+ def test_nextfile_oserror_deleting_backup(self):
+ """Tests invoking FileInput.nextfile() when the attempt to delete
+ the backup file would raise OSError. This error is expected to be
+ silently ignored"""
+
+ os_unlink_orig = os.unlink
+ os_unlink_replacement = UnconditionallyRaise(OSError)
+ try:
+ t = writeTmp(1, ["\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t], inplace=True) as fi:
+ next(fi) # make sure the file is opened
+ os.unlink = os_unlink_replacement
+ fi.nextfile()
+ finally:
+ os.unlink = os_unlink_orig
+
+ # sanity check to make sure that our test scenario was actually hit
+ self.assertTrue(os_unlink_replacement.invoked,
+ "os.unlink() was not invoked")
+
+ def test_readline_os_fstat_raises_OSError(self):
+ """Tests invoking FileInput.readline() when os.fstat() raises OSError.
+ This exception should be silently discarded."""
+
+ os_fstat_orig = os.fstat
+ os_fstat_replacement = UnconditionallyRaise(OSError)
+ try:
+ t = writeTmp(1, ["\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t], inplace=True) as fi:
+ os.fstat = os_fstat_replacement
+ fi.readline()
+ finally:
+ os.fstat = os_fstat_orig
+
+ # sanity check to make sure that our test scenario was actually hit
+ self.assertTrue(os_fstat_replacement.invoked,
+ "os.fstat() was not invoked")
+
+ @unittest.skipIf(not hasattr(os, "chmod"), "os.chmod does not exist")
+ def test_readline_os_chmod_raises_OSError(self):
+ """Tests invoking FileInput.readline() when os.chmod() raises OSError.
+ This exception should be silently discarded."""
+
+ os_chmod_orig = os.chmod
+ os_chmod_replacement = UnconditionallyRaise(OSError)
+ try:
+ t = writeTmp(1, ["\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t], inplace=True) as fi:
+ os.chmod = os_chmod_replacement
+ fi.readline()
+ finally:
+ os.chmod = os_chmod_orig
+
+ # sanity check to make sure that our test scenario was actually hit
+ self.assertTrue(os_chmod_replacement.invoked,
+ "os.fstat() was not invoked")
+
+ def test_fileno_when_ValueError_raised(self):
+ class FilenoRaisesValueError(UnconditionallyRaise):
+ def __init__(self):
+ UnconditionallyRaise.__init__(self, ValueError)
+ def fileno(self):
+ self.__call__()
+
+ unconditionally_raise_ValueError = FilenoRaisesValueError()
+ t = writeTmp(1, ["\n"])
+ self.addCleanup(remove_tempfiles, t)
+ with FileInput(files=[t]) as fi:
+ file_backup = fi._file
+ try:
+ fi._file = unconditionally_raise_ValueError
+ result = fi.fileno()
+ finally:
+ fi._file = file_backup # make sure the file gets cleaned up
+
+ # sanity check to make sure that our test scenario was actually hit
+ self.assertTrue(unconditionally_raise_ValueError.invoked,
+ "_file.fileno() was not invoked")
+
+ self.assertEqual(result, -1, "fileno() should return -1")
+
+class MockFileInput:
+ """A class that mocks out fileinput.FileInput for use during unit tests"""
+
+ def __init__(self, files=None, inplace=False, backup="", bufsize=0,
+ mode="r", openhook=None):
+ self.files = files
+ self.inplace = inplace
+ self.backup = backup
+ self.bufsize = bufsize
+ self.mode = mode
+ self.openhook = openhook
+ self._file = None
+ self.invocation_counts = collections.defaultdict(lambda: 0)
+ self.return_values = {}
+
+ def close(self):
+ self.invocation_counts["close"] += 1
+
+ def nextfile(self):
+ self.invocation_counts["nextfile"] += 1
+ return self.return_values["nextfile"]
+
+ def filename(self):
+ self.invocation_counts["filename"] += 1
+ return self.return_values["filename"]
+
+ def lineno(self):
+ self.invocation_counts["lineno"] += 1
+ return self.return_values["lineno"]
+
+ def filelineno(self):
+ self.invocation_counts["filelineno"] += 1
+ return self.return_values["filelineno"]
+
+ def fileno(self):
+ self.invocation_counts["fileno"] += 1
+ return self.return_values["fileno"]
+
+ def isfirstline(self):
+ self.invocation_counts["isfirstline"] += 1
+ return self.return_values["isfirstline"]
+
+ def isstdin(self):
+ self.invocation_counts["isstdin"] += 1
+ return self.return_values["isstdin"]
+
+class BaseFileInputGlobalMethodsTest(unittest.TestCase):
+ """Base class for unit tests for the global function of
+ the fileinput module."""
+
+ def setUp(self):
+ self._orig_state = fileinput._state
+ self._orig_FileInput = fileinput.FileInput
+ fileinput.FileInput = MockFileInput
+
+ def tearDown(self):
+ fileinput.FileInput = self._orig_FileInput
+ fileinput._state = self._orig_state
+
+ def assertExactlyOneInvocation(self, mock_file_input, method_name):
+ # assert that the method with the given name was invoked once
+ actual_count = mock_file_input.invocation_counts[method_name]
+ self.assertEqual(actual_count, 1, method_name)
+ # assert that no other unexpected methods were invoked
+ actual_total_count = len(mock_file_input.invocation_counts)
+ self.assertEqual(actual_total_count, 1)
+
+class Test_fileinput_input(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.input()"""
+
+ def test_state_is_not_None_and_state_file_is_not_None(self):
+ """Tests invoking fileinput.input() when fileinput._state is not None
+ and its _file attribute is also not None. Expect RuntimeError to
+ be raised with a meaningful error message and for fileinput._state
+ to *not* be modified."""
+ instance = MockFileInput()
+ instance._file = object()
+ fileinput._state = instance
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.input()
+ self.assertEqual(("input() already active",), cm.exception.args)
+ self.assertIs(instance, fileinput._state, "fileinput._state")
+
+ def test_state_is_not_None_and_state_file_is_None(self):
+ """Tests invoking fileinput.input() when fileinput._state is not None
+ but its _file attribute *is* None. Expect it to create and return
+ a new fileinput.FileInput object with all method parameters passed
+ explicitly to the __init__() method; also ensure that
+ fileinput._state is set to the returned instance."""
+ instance = MockFileInput()
+ instance._file = None
+ fileinput._state = instance
+ self.do_test_call_input()
+
+ def test_state_is_None(self):
+ """Tests invoking fileinput.input() when fileinput._state is None
+ Expect it to create and return a new fileinput.FileInput object
+ with all method parameters passed explicitly to the __init__()
+ method; also ensure that fileinput._state is set to the returned
+ instance."""
+ fileinput._state = None
+ self.do_test_call_input()
+
+ def do_test_call_input(self):
+ """Tests that fileinput.input() creates a new fileinput.FileInput
+ object, passing the given parameters unmodified to
+ fileinput.FileInput.__init__(). Note that this test depends on the
+ monkey patching of fileinput.FileInput done by setUp()."""
+ files = object()
+ inplace = object()
+ backup = object()
+ bufsize = object()
+ mode = object()
+ openhook = object()
+
+ # call fileinput.input() with different values for each argument
+ result = fileinput.input(files=files, inplace=inplace, backup=backup,
+ bufsize=bufsize,
+ mode=mode, openhook=openhook)
+
+ # ensure fileinput._state was set to the returned object
+ self.assertIs(result, fileinput._state, "fileinput._state")
+
+ # ensure the parameters to fileinput.input() were passed directly
+ # to FileInput.__init__()
+ self.assertIs(files, result.files, "files")
+ self.assertIs(inplace, result.inplace, "inplace")
+ self.assertIs(backup, result.backup, "backup")
+ self.assertIs(bufsize, result.bufsize, "bufsize")
+ self.assertIs(mode, result.mode, "mode")
+ self.assertIs(openhook, result.openhook, "openhook")
+
+class Test_fileinput_close(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.close()"""
+
+ def test_state_is_None(self):
+ """Tests that fileinput.close() does nothing if fileinput._state
+ is None"""
+ fileinput._state = None
+ fileinput.close()
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests that fileinput.close() invokes close() on fileinput._state
+ and sets _state=None"""
+ instance = MockFileInput()
+ fileinput._state = instance
+ fileinput.close()
+ self.assertExactlyOneInvocation(instance, "close")
+ self.assertIsNone(fileinput._state)
+
+class Test_fileinput_nextfile(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.nextfile()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.nextfile() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.nextfile()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.nextfile() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.nextfile() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ nextfile_retval = object()
+ instance = MockFileInput()
+ instance.return_values["nextfile"] = nextfile_retval
+ fileinput._state = instance
+ retval = fileinput.nextfile()
+ self.assertExactlyOneInvocation(instance, "nextfile")
+ self.assertIs(retval, nextfile_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_filename(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.filename()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.filename() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.filename()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.filename() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.filename() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ filename_retval = object()
+ instance = MockFileInput()
+ instance.return_values["filename"] = filename_retval
+ fileinput._state = instance
+ retval = fileinput.filename()
+ self.assertExactlyOneInvocation(instance, "filename")
+ self.assertIs(retval, filename_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_lineno(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.lineno()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.lineno() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.lineno()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.lineno() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.lineno() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ lineno_retval = object()
+ instance = MockFileInput()
+ instance.return_values["lineno"] = lineno_retval
+ fileinput._state = instance
+ retval = fileinput.lineno()
+ self.assertExactlyOneInvocation(instance, "lineno")
+ self.assertIs(retval, lineno_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_filelineno(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.filelineno()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.filelineno() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.filelineno()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.filelineno() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.filelineno() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ filelineno_retval = object()
+ instance = MockFileInput()
+ instance.return_values["filelineno"] = filelineno_retval
+ fileinput._state = instance
+ retval = fileinput.filelineno()
+ self.assertExactlyOneInvocation(instance, "filelineno")
+ self.assertIs(retval, filelineno_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_fileno(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.fileno()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.fileno() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.fileno()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.fileno() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.fileno() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ fileno_retval = object()
+ instance = MockFileInput()
+ instance.return_values["fileno"] = fileno_retval
+ instance.fileno_retval = fileno_retval
+ fileinput._state = instance
+ retval = fileinput.fileno()
+ self.assertExactlyOneInvocation(instance, "fileno")
+ self.assertIs(retval, fileno_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_isfirstline(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.isfirstline()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.isfirstline() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.isfirstline()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.isfirstline() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.isfirstline() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ isfirstline_retval = object()
+ instance = MockFileInput()
+ instance.return_values["isfirstline"] = isfirstline_retval
+ fileinput._state = instance
+ retval = fileinput.isfirstline()
+ self.assertExactlyOneInvocation(instance, "isfirstline")
+ self.assertIs(retval, isfirstline_retval)
+ self.assertIs(fileinput._state, instance)
+
+class Test_fileinput_isstdin(BaseFileInputGlobalMethodsTest):
+ """Unit tests for fileinput.isstdin()"""
+
+ def test_state_is_None(self):
+ """Tests fileinput.isstdin() when fileinput._state is None.
+ Ensure that it raises RuntimeError with a meaningful error message
+ and does not modify fileinput._state"""
+ fileinput._state = None
+ with self.assertRaises(RuntimeError) as cm:
+ fileinput.isstdin()
+ self.assertEqual(("no active input()",), cm.exception.args)
+ self.assertIsNone(fileinput._state)
+
+ def test_state_is_not_None(self):
+ """Tests fileinput.isstdin() when fileinput._state is not None.
+ Ensure that it invokes fileinput._state.isstdin() exactly once,
+ returns whatever it returns, and does not modify fileinput._state
+ to point to a different object."""
+ isstdin_retval = object()
+ instance = MockFileInput()
+ instance.return_values["isstdin"] = isstdin_retval
+ fileinput._state = instance
+ retval = fileinput.isstdin()
+ self.assertExactlyOneInvocation(instance, "isstdin")
+ self.assertIs(retval, isstdin_retval)
+ self.assertIs(fileinput._state, instance)
+
+class InvocationRecorder:
+ def __init__(self):
+ self.invocation_count = 0
+ def __call__(self, *args, **kwargs):
+ self.invocation_count += 1
+ self.last_invocation = (args, kwargs)
+
+class Test_hook_compressed(unittest.TestCase):
+ """Unit tests for fileinput.hook_compressed()"""
+
+ def setUp(self):
+ self.fake_open = InvocationRecorder()
+
+ def test_empty_string(self):
+ self.do_test_use_builtin_open("", 1)
+
+ def test_no_ext(self):
+ self.do_test_use_builtin_open("abcd", 2)
+
+ def test_gz_ext_fake(self):
+ original_open = gzip.open
+ gzip.open = self.fake_open
+ try:
+ result = fileinput.hook_compressed("test.gz", 3)
+ finally:
+ gzip.open = original_open
+
+ self.assertEqual(self.fake_open.invocation_count, 1)
+ self.assertEqual(self.fake_open.last_invocation, (("test.gz", 3), {}))
+
+ @unittest.skipUnless(bz2, "Requires bz2")
+ def test_bz2_ext_fake(self):
+ original_open = bz2.BZ2File
+ bz2.BZ2File = self.fake_open
+ try:
+ result = fileinput.hook_compressed("test.bz2", 4)
+ finally:
+ bz2.BZ2File = original_open
+
+ self.assertEqual(self.fake_open.invocation_count, 1)
+ self.assertEqual(self.fake_open.last_invocation, (("test.bz2", 4), {}))
+
+ def test_blah_ext(self):
+ self.do_test_use_builtin_open("abcd.blah", 5)
+
+ def test_gz_ext_builtin(self):
+ self.do_test_use_builtin_open("abcd.Gz", 6)
+
+ def test_bz2_ext_builtin(self):
+ self.do_test_use_builtin_open("abcd.Bz2", 7)
+
+ def do_test_use_builtin_open(self, filename, mode):
+ original_open = self.replace_builtin_open(self.fake_open)
+ try:
+ result = fileinput.hook_compressed(filename, mode)
+ finally:
+ self.replace_builtin_open(original_open)
+
+ self.assertEqual(self.fake_open.invocation_count, 1)
+ self.assertEqual(self.fake_open.last_invocation,
+ ((filename, mode), {}))
+
+ @staticmethod
+ def replace_builtin_open(new_open_func):
+ builtins_type = type(__builtins__)
+ if builtins_type is dict:
+ original_open = __builtins__["open"]
+ __builtins__["open"] = new_open_func
+ elif builtins_type is types.ModuleType:
+ original_open = __builtins__.open
+ __builtins__.open = new_open_func
+ else:
+ raise RuntimeError(
+ "unknown __builtins__ type: %r (unable to replace open)" %
+ builtins_type)
+
+ return original_open
+
+class Test_hook_encoded(unittest.TestCase):
+ """Unit tests for fileinput.hook_encoded()"""
+
+ def test(self):
+ encoding = object()
+ result = fileinput.hook_encoded(encoding)
+
+ fake_open = InvocationRecorder()
+ original_open = codecs.open
+ codecs.open = fake_open
+ try:
+ filename = object()
+ mode = object()
+ open_result = result(filename, mode)
+ finally:
+ codecs.open = original_open
+
+ self.assertEqual(fake_open.invocation_count, 1)
+
+ args = fake_open.last_invocation[0]
+ self.assertIs(args[0], filename)
+ self.assertIs(args[1], mode)
+ self.assertIs(args[2], encoding)
def test_main():
- run_unittest(BufferSizesTests, FileInputTests)
+ run_unittest(
+ BufferSizesTests,
+ FileInputTests,
+ Test_fileinput_input,
+ Test_fileinput_close,
+ Test_fileinput_nextfile,
+ Test_fileinput_filename,
+ Test_fileinput_lineno,
+ Test_fileinput_filelineno,
+ Test_fileinput_fileno,
+ Test_fileinput_isfirstline,
+ Test_fileinput_isstdin,
+ Test_hook_compressed,
+ Test_hook_encoded,
+ )
if __name__ == "__main__":
test_main()
diff --git a/Lib/test/test_float.py b/Lib/test/test_float.py
index 1968b8a..4e6f854 100644
--- a/Lib/test/test_float.py
+++ b/Lib/test/test_float.py
@@ -88,7 +88,7 @@ class GeneralFloatCases(unittest.TestCase):
self.assertRaises(ValueError, float, " -0x3.p-1 ")
self.assertRaises(ValueError, float, " +0x3.p-1 ")
self.assertEqual(float(" 25.e-1 "), 2.5)
- self.assertEqual(support.fcmp(float(" .25e-1 "), .025), 0)
+ self.assertAlmostEqual(float(" .25e-1 "), .025)
def test_floatconversion(self):
# Make sure that calls to __float__() work properly
diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py
index 9d2eab7..9edb197 100644
--- a/Lib/test/test_ftplib.py
+++ b/Lib/test/test_ftplib.py
@@ -608,6 +608,30 @@ class TestFTPClass(TestCase):
self.assertEqual(self.server.handler_instance.last_received_cmd, 'quit')
self.assertFalse(is_client_connected())
+ def test_source_address(self):
+ self.client.quit()
+ port = support.find_unused_port()
+ try:
+ self.client.connect(self.server.host, self.server.port,
+ source_address=(HOST, port))
+ self.assertEqual(self.client.sock.getsockname()[1], port)
+ self.client.quit()
+ except IOError as e:
+ if e.errno == errno.EADDRINUSE:
+ self.skipTest("couldn't bind to port %d" % port)
+ raise
+
+ def test_source_address_passive_connection(self):
+ port = support.find_unused_port()
+ self.client.source_address = (HOST, port)
+ try:
+ with self.client.transfercmd('list') as sock:
+ self.assertEqual(sock.getsockname()[1], port)
+ except IOError as e:
+ if e.errno == errno.EADDRINUSE:
+ self.skipTest("couldn't bind to port %d" % port)
+ raise
+
def test_parse257(self):
self.assertEqual(ftplib.parse257('257 "/foo/bar"'), '/foo/bar')
self.assertEqual(ftplib.parse257('257 "/foo/bar" created'), '/foo/bar')
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index 73a77d6..c50336e 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -435,18 +435,81 @@ class TestReduce(unittest.TestCase):
self.assertEqual(self.func(add, d), "".join(d.keys()))
class TestCmpToKey(unittest.TestCase):
+
def test_cmp_to_key(self):
+ def cmp1(x, y):
+ return (x > y) - (x < y)
+ key = functools.cmp_to_key(cmp1)
+ self.assertEqual(key(3), key(3))
+ self.assertGreater(key(3), key(1))
+ def cmp2(x, y):
+ return int(x) - int(y)
+ key = functools.cmp_to_key(cmp2)
+ self.assertEqual(key(4.0), key('4'))
+ self.assertLess(key(2), key('35'))
+
+ def test_cmp_to_key_arguments(self):
+ def cmp1(x, y):
+ return (x > y) - (x < y)
+ key = functools.cmp_to_key(mycmp=cmp1)
+ self.assertEqual(key(obj=3), key(obj=3))
+ self.assertGreater(key(obj=3), key(obj=1))
+ with self.assertRaises((TypeError, AttributeError)):
+ key(3) > 1 # rhs is not a K object
+ with self.assertRaises((TypeError, AttributeError)):
+ 1 < key(3) # lhs is not a K object
+ with self.assertRaises(TypeError):
+ key = functools.cmp_to_key() # too few args
+ with self.assertRaises(TypeError):
+ key = functools.cmp_to_key(cmp1, None) # too many args
+ key = functools.cmp_to_key(cmp1)
+ with self.assertRaises(TypeError):
+ key() # too few args
+ with self.assertRaises(TypeError):
+ key(None, None) # too many args
+
+ def test_bad_cmp(self):
+ def cmp1(x, y):
+ raise ZeroDivisionError
+ key = functools.cmp_to_key(cmp1)
+ with self.assertRaises(ZeroDivisionError):
+ key(3) > key(1)
+
+ class BadCmp:
+ def __lt__(self, other):
+ raise ZeroDivisionError
+ def cmp1(x, y):
+ return BadCmp()
+ with self.assertRaises(ZeroDivisionError):
+ key(3) > key(1)
+
+ def test_obj_field(self):
+ def cmp1(x, y):
+ return (x > y) - (x < y)
+ key = functools.cmp_to_key(mycmp=cmp1)
+ self.assertEqual(key(50).obj, 50)
+
+ def test_sort_int(self):
def mycmp(x, y):
return y - x
self.assertEqual(sorted(range(5), key=functools.cmp_to_key(mycmp)),
[4, 3, 2, 1, 0])
+ def test_sort_int_str(self):
+ def mycmp(x, y):
+ x, y = int(x), int(y)
+ return (x > y) - (x < y)
+ values = [5, '3', 7, 2, '0', '1', 4, '10', 1]
+ values = sorted(values, key=functools.cmp_to_key(mycmp))
+ self.assertEqual([int(value) for value in values],
+ [0, 1, 1, 2, 3, 4, 5, 7, 10])
+
def test_hash(self):
def mycmp(x, y):
return y - x
key = functools.cmp_to_key(mycmp)
k = key(10)
- self.assertRaises(TypeError, hash(k))
+ self.assertRaises(TypeError, hash, k)
class TestTotalOrdering(unittest.TestCase):
@@ -655,6 +718,7 @@ class TestLRU(unittest.TestCase):
def test_main(verbose=None):
test_classes = (
+ TestCmpToKey,
TestPartial,
TestPartialSubclass,
TestPythonPartial,
diff --git a/Lib/test/test_future.py b/Lib/test/test_future.py
index c6689a1..3a25eb1 100644
--- a/Lib/test/test_future.py
+++ b/Lib/test/test_future.py
@@ -13,14 +13,14 @@ def get_error_location(msg):
class FutureTest(unittest.TestCase):
def test_future1(self):
- support.unload('test_future1')
- from test import test_future1
- self.assertEqual(test_future1.result, 6)
+ support.unload('future_test1')
+ from test import future_test1
+ self.assertEqual(future_test1.result, 6)
def test_future2(self):
- support.unload('test_future2')
- from test import test_future2
- self.assertEqual(test_future2.result, 6)
+ support.unload('future_test2')
+ from test import future_test2
+ self.assertEqual(future_test2.result, 6)
def test_future3(self):
support.unload('test_future3')
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index 100c767..19313db 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -1,5 +1,6 @@
import unittest
-from test.support import verbose, run_unittest, strip_python_stderr
+from test.support import (verbose, refcount_test, run_unittest,
+ strip_python_stderr)
import sys
import gc
import weakref
@@ -175,6 +176,7 @@ class GCTests(unittest.TestCase):
del d
self.assertEqual(gc.collect(), 2)
+ @refcount_test
def test_frame(self):
def f():
frame = sys._getframe()
@@ -242,6 +244,7 @@ class GCTests(unittest.TestCase):
# For example, disposed tuples are not freed, but reused.
# To minimize variations, though, we first store the get_count() results
# and check them at the end.
+ @refcount_test
def test_get_count(self):
gc.collect()
a, b, c = gc.get_count()
@@ -255,6 +258,7 @@ class GCTests(unittest.TestCase):
# created (the list).
self.assertGreater(d, a)
+ @refcount_test
def test_collect_generations(self):
gc.collect()
# This object will "trickle" into generation N + 1 after
diff --git a/Lib/test/test_genexps.py b/Lib/test/test_genexps.py
index 1f46af1..413043c 100644
--- a/Lib/test/test_genexps.py
+++ b/Lib/test/test_genexps.py
@@ -257,11 +257,15 @@ Verify that genexps are weakly referencable
"""
+import sys
-__test__ = {'doctests' : doctests}
+# Trace function can throw off the tuple reuse test.
+if hasattr(sys, 'gettrace') and sys.gettrace():
+ __test__ = {}
+else:
+ __test__ = {'doctests' : doctests}
def test_main(verbose=None):
- import sys
from test import support
from test import test_genexps
support.run_doctest(test_genexps, verbose)
diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py
index 2b0ac36..3ea5c41 100644
--- a/Lib/test/test_gzip.py
+++ b/Lib/test/test_gzip.py
@@ -64,6 +64,21 @@ class TestGzip(unittest.TestCase):
d = f.read()
self.assertEqual(d, data1*50)
+ def test_read1(self):
+ self.test_write()
+ blocks = []
+ nread = 0
+ with gzip.GzipFile(self.filename, 'r') as f:
+ while True:
+ d = f.read1()
+ if not d:
+ break
+ blocks.append(d)
+ nread += len(d)
+ # Check that position was updated correctly (see issue10791).
+ self.assertEqual(f.tell(), nread)
+ self.assertEqual(b''.join(blocks), data1 * 50)
+
def test_io_on_closed_object(self):
# Test that I/O operations on closed GzipFile objects raise a
# ValueError, just like the corresponding functions on file objects.
@@ -323,6 +338,14 @@ class TestGzip(unittest.TestCase):
self.assertEqual(f.read(100), b'')
self.assertEqual(nread, len(uncompressed))
+ def test_textio_readlines(self):
+ # Issue #10791: TextIOWrapper.readlines() fails when wrapping GzipFile.
+ lines = (data1 * 50).decode("ascii").splitlines(True)
+ self.test_write()
+ with gzip.GzipFile(self.filename, 'r') as f:
+ with io.TextIOWrapper(f, encoding="ascii") as t:
+ self.assertEqual(t.readlines(), lines)
+
# Testing compress/decompress shortcut functions
def test_compress(self):
diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py
index d745ae9..88d2a3e 100644
--- a/Lib/test/test_imp.py
+++ b/Lib/test/test_imp.py
@@ -58,6 +58,12 @@ class ImportTests(unittest.TestCase):
with imp.find_module('module_' + mod, self.test_path)[0] as fd:
self.assertEqual(fd.encoding, encoding)
+ path = [os.path.dirname(__file__)]
+ self.assertRaisesRegex(SyntaxError,
+ r"Non-UTF-8 code starting with '\\xf6'"
+ r" in file .*badsyntax_pep3120.py",
+ imp.find_module, 'badsyntax_pep3120', path)
+
def test_issue1267(self):
for mod, encoding, _ in self.test_strings:
fp, filename, info = imp.find_module('module_' + mod,
@@ -210,6 +216,10 @@ class PEP3147Tests(unittest.TestCase):
self.assertEqual(
imp.cache_from_source('/foo/bar/baz/qux.py', True),
'/foo/bar/baz/__pycache__/qux.{}.pyc'.format(self.tag))
+ # Directory with a dot, filename without dot
+ self.assertEqual(
+ imp.cache_from_source('/foo.bar/file', True),
+ '/foo.bar/__pycache__/file{}.pyc'.format(self.tag))
def test_cache_from_source_optimized(self):
# Given the path to a .py file, return the path to its PEP 3147
diff --git a/Lib/test/test_import.py b/Lib/test/test_import.py
index 95a5f48..0332fdd 100644
--- a/Lib/test/test_import.py
+++ b/Lib/test/test_import.py
@@ -286,8 +286,6 @@ class ImportTests(unittest.TestCase):
self.skipTest('path is not encodable to {}'.format(encoding))
with self.assertRaises(ImportError) as c:
__import__(path)
- self.assertEqual("Import by filename is not supported.",
- c.exception.args[0])
def test_import_in_del_does_not_crash(self):
# Issue 4236
diff --git a/Lib/test/test_importhooks.py b/Lib/test/test_importhooks.py
index ec6730e..7a25657 100644
--- a/Lib/test/test_importhooks.py
+++ b/Lib/test/test_importhooks.py
@@ -51,7 +51,7 @@ class TestImporter:
def __init__(self, path=test_path):
if path != test_path:
- # if out class is on sys.path_hooks, we must raise
+ # if our class is on sys.path_hooks, we must raise
# ImportError for any path item that we can't handle.
raise ImportError
self.path = path
@@ -229,7 +229,9 @@ class ImportHooksTestCase(ImportHooksBaseTestCase):
i = ImpWrapper()
sys.meta_path.append(i)
sys.path_hooks.append(ImpWrapper)
- mnames = ("colorsys", "urllib.parse", "distutils.core")
+ mnames = (
+ "colorsys", "urllib.parse", "distutils.core", "sys",
+ )
for mname in mnames:
parent = mname.split(".")[0]
for n in list(sys.modules):
@@ -237,7 +239,8 @@ class ImportHooksTestCase(ImportHooksBaseTestCase):
del sys.modules[n]
for mname in mnames:
m = __import__(mname, globals(), locals(), ["__dummy__"])
- m.__loader__ # to make sure we actually handled the import
+ # to make sure we actually handled the import
+ self.assertTrue(hasattr(m, "__loader__"))
def test_main():
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index b665a6c..1821492 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -46,7 +46,7 @@ except ImportError:
def _default_chunk_size():
"""Get the default TextIOWrapper chunk size"""
- with open(__file__, "r", encoding="latin1") as f:
+ with open(__file__, "r", encoding="latin-1") as f:
return f._CHUNK_SIZE
@@ -1684,11 +1684,11 @@ class TextIOWrapperTest(unittest.TestCase):
r = self.BytesIO(b"\xc3\xa9\n\n")
b = self.BufferedReader(r, 1000)
t = self.TextIOWrapper(b)
- t.__init__(b, encoding="latin1", newline="\r\n")
- self.assertEqual(t.encoding, "latin1")
+ t.__init__(b, encoding="latin-1", newline="\r\n")
+ self.assertEqual(t.encoding, "latin-1")
self.assertEqual(t.line_buffering, False)
- t.__init__(b, encoding="utf8", line_buffering=True)
- self.assertEqual(t.encoding, "utf8")
+ t.__init__(b, encoding="utf-8", line_buffering=True)
+ self.assertEqual(t.encoding, "utf-8")
self.assertEqual(t.line_buffering, True)
self.assertEqual("\xe9\n", t.readline())
self.assertRaises(TypeError, t.__init__, b, newline=42)
@@ -1738,8 +1738,8 @@ class TextIOWrapperTest(unittest.TestCase):
def test_encoding(self):
# Check the encoding attribute is always set, and valid
b = self.BytesIO()
- t = self.TextIOWrapper(b, encoding="utf8")
- self.assertEqual(t.encoding, "utf8")
+ t = self.TextIOWrapper(b, encoding="utf-8")
+ self.assertEqual(t.encoding, "utf-8")
t = self.TextIOWrapper(b)
self.assertTrue(t.encoding is not None)
codecs.lookup(t.encoding)
@@ -1918,7 +1918,7 @@ class TextIOWrapperTest(unittest.TestCase):
def test_basic_io(self):
for chunksize in (1, 2, 3, 4, 5, 15, 16, 17, 31, 32, 33, 63, 64, 65):
- for enc in "ascii", "latin1", "utf8" :# , "utf-16-be", "utf-16-le":
+ for enc in "ascii", "latin-1", "utf-8" :# , "utf-16-be", "utf-16-le":
f = self.open(support.TESTFN, "w+", encoding=enc)
f._CHUNK_SIZE = chunksize
self.assertEqual(f.write("abc"), 3)
@@ -1968,7 +1968,7 @@ class TextIOWrapperTest(unittest.TestCase):
self.assertEqual(rlines, wlines)
def test_telling(self):
- f = self.open(support.TESTFN, "w+", encoding="utf8")
+ f = self.open(support.TESTFN, "w+", encoding="utf-8")
p0 = f.tell()
f.write("\xff\n")
p1 = f.tell()
@@ -2214,6 +2214,7 @@ class TextIOWrapperTest(unittest.TestCase):
with self.open(support.TESTFN, "w", errors="replace") as f:
self.assertEqual(f.errors, "replace")
+ @support.no_tracing
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_threads_write(self):
# Issue6750: concurrent writes could duplicate data
@@ -2670,6 +2671,7 @@ class SignalsTest(unittest.TestCase):
def test_interrupted_write_text(self):
self.check_interrupted_write("xy", b"xy", mode="w", encoding="ascii")
+ @support.no_tracing
def check_reentrant_write(self, data, **fdopen_kwargs):
def on_alarm(*args):
# Will be called reentrantly from the same thread
diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py
index 8cdc597..f4928ca 100644
--- a/Lib/test/test_itertools.py
+++ b/Lib/test/test_itertools.py
@@ -69,11 +69,21 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(list(accumulate('abc')), ['a', 'ab', 'abc']) # works with non-numeric
self.assertEqual(list(accumulate([])), []) # empty iterable
self.assertEqual(list(accumulate([7])), [7]) # iterable of length one
- self.assertRaises(TypeError, accumulate, range(10), 5) # too many args
+ self.assertRaises(TypeError, accumulate, range(10), 5, 6) # too many args
self.assertRaises(TypeError, accumulate) # too few args
self.assertRaises(TypeError, accumulate, x=range(10)) # unexpected kwd arg
self.assertRaises(TypeError, list, accumulate([1, []])) # args that don't add
+ s = [2, 8, 9, 5, 7, 0, 3, 4, 1, 6]
+ self.assertEqual(list(accumulate(s, min)),
+ [2, 2, 2, 2, 2, 0, 0, 0, 0, 0])
+ self.assertEqual(list(accumulate(s, max)),
+ [2, 8, 9, 9, 9, 9, 9, 9, 9, 9])
+ self.assertEqual(list(accumulate(s, operator.mul)),
+ [2, 16, 144, 720, 5040, 0, 0, 0, 0, 0])
+ with self.assertRaises(TypeError):
+ list(accumulate(s, chr)) # unary-operation
+
def test_chain(self):
def chain2(*iterables):
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index b62545c..88f0ebc 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -40,7 +40,7 @@ from socketserver import ThreadingTCPServer, StreamRequestHandler
import struct
import sys
import tempfile
-from test.support import captured_stdout, run_with_locale, run_unittest
+from test.support import captured_stdout, run_with_locale, run_unittest, patch
import textwrap
import unittest
import warnings
@@ -1082,28 +1082,39 @@ class WarningsTest(BaseTest):
def test_warnings(self):
with warnings.catch_warnings():
logging.captureWarnings(True)
- try:
- warnings.filterwarnings("always", category=UserWarning)
- file = io.StringIO()
- h = logging.StreamHandler(file)
- logger = logging.getLogger("py.warnings")
- logger.addHandler(h)
- warnings.warn("I'm warning you...")
- logger.removeHandler(h)
- s = file.getvalue()
- h.close()
- self.assertTrue(s.find("UserWarning: I'm warning you...\n") > 0)
-
- #See if an explicit file uses the original implementation
- file = io.StringIO()
- warnings.showwarning("Explicit", UserWarning, "dummy.py", 42,
- file, "Dummy line")
- s = file.getvalue()
- file.close()
- self.assertEqual(s,
- "dummy.py:42: UserWarning: Explicit\n Dummy line\n")
- finally:
- logging.captureWarnings(False)
+ self.addCleanup(lambda: logging.captureWarnings(False))
+ warnings.filterwarnings("always", category=UserWarning)
+ stream = io.StringIO()
+ h = logging.StreamHandler(stream)
+ logger = logging.getLogger("py.warnings")
+ logger.addHandler(h)
+ warnings.warn("I'm warning you...")
+ logger.removeHandler(h)
+ s = stream.getvalue()
+ h.close()
+ self.assertTrue(s.find("UserWarning: I'm warning you...\n") > 0)
+
+ #See if an explicit file uses the original implementation
+ a_file = io.StringIO()
+ warnings.showwarning("Explicit", UserWarning, "dummy.py", 42,
+ a_file, "Dummy line")
+ s = a_file.getvalue()
+ a_file.close()
+ self.assertEqual(s,
+ "dummy.py:42: UserWarning: Explicit\n Dummy line\n")
+
+ def test_warnings_no_handlers(self):
+ with warnings.catch_warnings():
+ logging.captureWarnings(True)
+ self.addCleanup(lambda: logging.captureWarnings(False))
+
+ # confirm our assumption: no loggers are set
+ logger = logging.getLogger("py.warnings")
+ assert logger.handlers == []
+
+ warnings.showwarning("Explicit", UserWarning, "dummy.py", 42)
+ self.assertTrue(len(logger.handlers) == 1)
+ self.assertIsInstance(logger.handlers[0], logging.NullHandler)
def formatFunc(format, datefmt=None):
@@ -2007,6 +2018,11 @@ class ManagerTest(BaseTest):
self.assertEqual(logged, ['should appear in logged'])
+ def test_set_log_record_factory(self):
+ man = logging.Manager(None)
+ expected = object()
+ man.setLogRecordFactory(expected)
+ self.assertEqual(man.logRecordFactory, expected)
class ChildLoggerTest(BaseTest):
def test_child_loggers(self):
@@ -2198,6 +2214,500 @@ class LastResortTest(BaseTest):
logging.raiseExceptions = old_raise_exceptions
+class FakeHandler:
+
+ def __init__(self, identifier, called):
+ for method in ('acquire', 'flush', 'close', 'release'):
+ setattr(self, method, self.record_call(identifier, method, called))
+
+ def record_call(self, identifier, method_name, called):
+ def inner():
+ called.append('{} - {}'.format(identifier, method_name))
+ return inner
+
+
+class RecordingHandler(logging.NullHandler):
+
+ def __init__(self, *args, **kwargs):
+ super(RecordingHandler, self).__init__(*args, **kwargs)
+ self.records = []
+
+ def handle(self, record):
+ """Keep track of all the emitted records."""
+ self.records.append(record)
+
+
+class ShutdownTest(BaseTest):
+
+ """Tets suite for the shutdown method."""
+
+ def setUp(self):
+ super(ShutdownTest, self).setUp()
+ self.called = []
+
+ raise_exceptions = logging.raiseExceptions
+ self.addCleanup(lambda: setattr(logging, 'raiseExceptions', raise_exceptions))
+
+ def raise_error(self, error):
+ def inner():
+ raise error()
+ return inner
+
+ def test_no_failure(self):
+ # create some fake handlers
+ handler0 = FakeHandler(0, self.called)
+ handler1 = FakeHandler(1, self.called)
+ handler2 = FakeHandler(2, self.called)
+
+ # create live weakref to those handlers
+ handlers = map(logging.weakref.ref, [handler0, handler1, handler2])
+
+ logging.shutdown(handlerList=list(handlers))
+
+ expected = ['2 - acquire', '2 - flush', '2 - close', '2 - release',
+ '1 - acquire', '1 - flush', '1 - close', '1 - release',
+ '0 - acquire', '0 - flush', '0 - close', '0 - release']
+ self.assertEqual(expected, self.called)
+
+ def _test_with_failure_in_method(self, method, error):
+ handler = FakeHandler(0, self.called)
+ setattr(handler, method, self.raise_error(error))
+ handlers = [logging.weakref.ref(handler)]
+
+ logging.shutdown(handlerList=list(handlers))
+
+ self.assertEqual('0 - release', self.called[-1])
+
+ def test_with_ioerror_in_acquire(self):
+ self._test_with_failure_in_method('acquire', IOError)
+
+ def test_with_ioerror_in_flush(self):
+ self._test_with_failure_in_method('flush', IOError)
+
+ def test_with_ioerror_in_close(self):
+ self._test_with_failure_in_method('close', IOError)
+
+ def test_with_valueerror_in_acquire(self):
+ self._test_with_failure_in_method('acquire', ValueError)
+
+ def test_with_valueerror_in_flush(self):
+ self._test_with_failure_in_method('flush', ValueError)
+
+ def test_with_valueerror_in_close(self):
+ self._test_with_failure_in_method('close', ValueError)
+
+ def test_with_other_error_in_acquire_without_raise(self):
+ logging.raiseExceptions = False
+ self._test_with_failure_in_method('acquire', IndexError)
+
+ def test_with_other_error_in_flush_without_raise(self):
+ logging.raiseExceptions = False
+ self._test_with_failure_in_method('flush', IndexError)
+
+ def test_with_other_error_in_close_without_raise(self):
+ logging.raiseExceptions = False
+ self._test_with_failure_in_method('close', IndexError)
+
+ def test_with_other_error_in_acquire_with_raise(self):
+ logging.raiseExceptions = True
+ self.assertRaises(IndexError, self._test_with_failure_in_method,
+ 'acquire', IndexError)
+
+ def test_with_other_error_in_flush_with_raise(self):
+ logging.raiseExceptions = True
+ self.assertRaises(IndexError, self._test_with_failure_in_method,
+ 'flush', IndexError)
+
+ def test_with_other_error_in_close_with_raise(self):
+ logging.raiseExceptions = True
+ self.assertRaises(IndexError, self._test_with_failure_in_method,
+ 'close', IndexError)
+
+
+class ModuleLevelMiscTest(BaseTest):
+
+ """Tets suite for some module level methods."""
+
+ def test_disable(self):
+ old_disable = logging.root.manager.disable
+ # confirm our assumptions are correct
+ assert old_disable == 0
+ self.addCleanup(lambda: logging.disable(old_disable))
+
+ logging.disable(83)
+ self.assertEqual(logging.root.manager.disable, 83)
+
+ def _test_log(self, method, level=None):
+ called = []
+ patch(self, logging, 'basicConfig',
+ lambda *a, **kw: called.append(a, kw))
+
+ recording = RecordingHandler()
+ logging.root.addHandler(recording)
+
+ log_method = getattr(logging, method)
+ if level is not None:
+ log_method(level, "test me: %r", recording)
+ else:
+ log_method("test me: %r", recording)
+
+ self.assertEqual(len(recording.records), 1)
+ record = recording.records[0]
+ self.assertEqual(record.getMessage(), "test me: %r" % recording)
+
+ expected_level = level if level is not None else getattr(logging, method.upper())
+ self.assertEqual(record.levelno, expected_level)
+
+ # basicConfig was not called!
+ self.assertEqual(called, [])
+
+ def test_log(self):
+ self._test_log('log', logging.ERROR)
+
+ def test_debug(self):
+ self._test_log('debug')
+
+ def test_info(self):
+ self._test_log('info')
+
+ def test_warning(self):
+ self._test_log('warning')
+
+ def test_error(self):
+ self._test_log('error')
+
+ def test_critical(self):
+ self._test_log('critical')
+
+ def test_set_logger_class(self):
+ self.assertRaises(TypeError, logging.setLoggerClass, object)
+
+ class MyLogger(logging.Logger):
+ pass
+
+ logging.setLoggerClass(MyLogger)
+ self.assertEqual(logging.getLoggerClass(), MyLogger)
+
+ logging.setLoggerClass(logging.Logger)
+ self.assertEqual(logging.getLoggerClass(), logging.Logger)
+
+
+class BasicConfigTest(unittest.TestCase):
+
+ """Tets suite for logging.basicConfig."""
+
+ def setUp(self):
+ super(BasicConfigTest, self).setUp()
+ handlers = logging.root.handlers
+ self.addCleanup(lambda: setattr(logging.root, 'handlers', handlers))
+ logging.root.handlers = []
+
+ def tearDown(self):
+ logging.shutdown()
+ super(BasicConfigTest, self).tearDown()
+
+ @unittest.skipIf(True, "test disabled, issue #11557")
+ def test_no_kwargs(self):
+ logging.basicConfig()
+
+ # handler defaults to a StreamHandler to sys.stderr
+ self.assertEqual(len(logging.root.handlers), 1)
+ handler = logging.root.handlers[0]
+ self.assertIsInstance(handler, logging.StreamHandler)
+ self.assertEqual(handler.stream, sys.stderr)
+
+ formatter = handler.formatter
+ # format defaults to logging.BASIC_FORMAT
+ self.assertEqual(formatter._style._fmt, logging.BASIC_FORMAT)
+ # datefmt defaults to None
+ self.assertIsNone(formatter.datefmt)
+ # style defaults to %
+ self.assertIsInstance(formatter._style, logging.PercentStyle)
+
+ # level is not explicitely set
+ self.assertEqual(logging.root.level, logging.WARNING)
+
+ def test_filename(self):
+ logging.basicConfig(filename='test.log')
+
+ self.assertEqual(len(logging.root.handlers), 1)
+ handler = logging.root.handlers[0]
+ self.assertIsInstance(handler, logging.FileHandler)
+
+ expected = logging.FileHandler('test.log', 'a')
+ self.addCleanup(expected.close)
+ self.assertEqual(handler.stream.mode, expected.stream.mode)
+ self.assertEqual(handler.stream.name, expected.stream.name)
+
+ def test_filemode(self):
+ logging.basicConfig(filename='test.log', filemode='wb')
+
+ handler = logging.root.handlers[0]
+ expected = logging.FileHandler('test.log', 'wb')
+ self.addCleanup(expected.close)
+ self.assertEqual(handler.stream.mode, expected.stream.mode)
+
+ def test_stream(self):
+ stream = io.StringIO()
+ self.addCleanup(stream.close)
+ logging.basicConfig(stream=stream)
+
+ self.assertEqual(len(logging.root.handlers), 1)
+ handler = logging.root.handlers[0]
+ self.assertIsInstance(handler, logging.StreamHandler)
+ self.assertEqual(handler.stream, stream)
+
+ def test_format(self):
+ logging.basicConfig(format='foo')
+
+ formatter = logging.root.handlers[0].formatter
+ self.assertEqual(formatter._style._fmt, 'foo')
+
+ def test_datefmt(self):
+ logging.basicConfig(datefmt='bar')
+
+ formatter = logging.root.handlers[0].formatter
+ self.assertEqual(formatter.datefmt, 'bar')
+
+ def test_style(self):
+ logging.basicConfig(style='$')
+
+ formatter = logging.root.handlers[0].formatter
+ self.assertIsInstance(formatter._style, logging.StringTemplateStyle)
+
+ def test_level(self):
+ old_level = logging.root.level
+ self.addCleanup(lambda: logging.root.setLevel(old_level))
+
+ logging.basicConfig(level=57)
+ self.assertEqual(logging.root.level, 57)
+
+ def test_incompatible(self):
+ assertRaises = self.assertRaises
+ handlers = [logging.StreamHandler()]
+ stream = sys.stderr
+ assertRaises(ValueError, logging.basicConfig, filename='test.log',
+ stream=stream)
+ assertRaises(ValueError, logging.basicConfig, filename='test.log',
+ handlers=handlers)
+ assertRaises(ValueError, logging.basicConfig, stream=stream,
+ handlers=handlers)
+
+ def test_handlers(self):
+ handlers = [logging.StreamHandler(), logging.StreamHandler(sys.stdout)]
+ logging.basicConfig(handlers=handlers)
+ self.assertIs(handlers[0], logging.root.handlers[0])
+ self.assertIs(handlers[1], logging.root.handlers[1])
+ self.assertIsNotNone(handlers[0].formatter)
+ self.assertIsNotNone(handlers[1].formatter)
+ self.assertIs(handlers[0].formatter, handlers[1].formatter)
+
+ def _test_log(self, method, level=None):
+ # logging.root has no handlers so basicConfig should be called
+ called = []
+
+ old_basic_config = logging.basicConfig
+ def my_basic_config(*a, **kw):
+ old_basic_config()
+ old_level = logging.root.level
+ logging.root.setLevel(100) # avoid having messages in stderr
+ self.addCleanup(lambda: logging.root.setLevel(old_level))
+ called.append((a, kw))
+
+ patch(self, logging, 'basicConfig', my_basic_config)
+
+ log_method = getattr(logging, method)
+ if level is not None:
+ log_method(level, "test me")
+ else:
+ log_method("test me")
+
+ # basicConfig was called with no arguments
+ self.assertEqual(called, [((), {})])
+
+ def test_log(self):
+ self._test_log('log', logging.WARNING)
+
+ def test_debug(self):
+ self._test_log('debug')
+
+ def test_info(self):
+ self._test_log('info')
+
+ def test_warning(self):
+ self._test_log('warning')
+
+ def test_error(self):
+ self._test_log('error')
+
+ def test_critical(self):
+ self._test_log('critical')
+
+
+class LoggerAdapterTest(unittest.TestCase):
+
+ def setUp(self):
+ super(LoggerAdapterTest, self).setUp()
+ old_handler_list = logging._handlerList[:]
+
+ self.recording = RecordingHandler()
+ self.logger = logging.root
+ self.logger.addHandler(self.recording)
+ self.addCleanup(lambda: self.logger.removeHandler(self.recording))
+ self.addCleanup(self.recording.close)
+
+ def cleanup():
+ logging._handlerList[:] = old_handler_list
+
+ self.addCleanup(cleanup)
+ self.addCleanup(logging.shutdown)
+ self.adapter = logging.LoggerAdapter(logger=self.logger, extra=None)
+
+ def test_exception(self):
+ msg = 'testing exception: %r'
+ exc = None
+ try:
+ assert False
+ except AssertionError as e:
+ exc = e
+ self.adapter.exception(msg, self.recording)
+
+ self.assertEqual(len(self.recording.records), 1)
+ record = self.recording.records[0]
+ self.assertEqual(record.levelno, logging.ERROR)
+ self.assertEqual(record.msg, msg)
+ self.assertEqual(record.args, (self.recording,))
+ self.assertEqual(record.exc_info,
+ (exc.__class__, exc, exc.__traceback__))
+
+ def test_critical(self):
+ msg = 'critical test! %r'
+ self.adapter.critical(msg, self.recording)
+
+ self.assertEqual(len(self.recording.records), 1)
+ record = self.recording.records[0]
+ self.assertEqual(record.levelno, logging.CRITICAL)
+ self.assertEqual(record.msg, msg)
+ self.assertEqual(record.args, (self.recording,))
+
+ def test_is_enabled_for(self):
+ old_disable = self.adapter.logger.manager.disable
+ self.adapter.logger.manager.disable = 33
+ self.addCleanup(lambda: setattr(self.adapter.logger.manager,
+ 'disable', old_disable))
+ self.assertFalse(self.adapter.isEnabledFor(32))
+
+ def test_has_handlers(self):
+ self.assertTrue(self.adapter.hasHandlers())
+
+ for handler in self.logger.handlers:
+ self.logger.removeHandler(handler)
+ assert not self.logger.hasHandlers()
+
+ self.assertFalse(self.adapter.hasHandlers())
+
+
+class LoggerTest(BaseTest):
+
+ def setUp(self):
+ super(LoggerTest, self).setUp()
+ self.recording = RecordingHandler()
+ self.logger = logging.Logger(name='blah')
+ self.logger.addHandler(self.recording)
+ self.addCleanup(lambda: self.logger.removeHandler(self.recording))
+ self.addCleanup(self.recording.close)
+ self.addCleanup(logging.shutdown)
+
+ def test_set_invalid_level(self):
+ self.assertRaises(TypeError, self.logger.setLevel, object())
+
+ def test_exception(self):
+ msg = 'testing exception: %r'
+ exc = None
+ try:
+ assert False
+ except AssertionError as e:
+ exc = e
+ self.logger.exception(msg, self.recording)
+
+ self.assertEqual(len(self.recording.records), 1)
+ record = self.recording.records[0]
+ self.assertEqual(record.levelno, logging.ERROR)
+ self.assertEqual(record.msg, msg)
+ self.assertEqual(record.args, (self.recording,))
+ self.assertEqual(record.exc_info,
+ (exc.__class__, exc, exc.__traceback__))
+
+ def test_log_invalid_level_with_raise(self):
+ old_raise = logging.raiseExceptions
+ self.addCleanup(lambda: setattr(logging, 'raiseExecptions', old_raise))
+
+ logging.raiseExceptions = True
+ self.assertRaises(TypeError, self.logger.log, '10', 'test message')
+
+ def test_log_invalid_level_no_raise(self):
+ old_raise = logging.raiseExceptions
+ self.addCleanup(lambda: setattr(logging, 'raiseExecptions', old_raise))
+
+ logging.raiseExceptions = False
+ self.logger.log('10', 'test message') # no exception happens
+
+ def test_find_caller_with_stack_info(self):
+ called = []
+ patch(self, logging.traceback, 'print_stack',
+ lambda f, file: called.append(file.getvalue()))
+
+ self.logger.findCaller(stack_info=True)
+
+ self.assertEqual(len(called), 1)
+ self.assertEqual('Stack (most recent call last):\n', called[0])
+
+ def test_make_record_with_extra_overwrite(self):
+ name = 'my record'
+ level = 13
+ fn = lno = msg = args = exc_info = func = sinfo = None
+ rv = logging._logRecordFactory(name, level, fn, lno, msg, args,
+ exc_info, func, sinfo)
+
+ for key in ('message', 'asctime') + tuple(rv.__dict__.keys()):
+ extra = {key: 'some value'}
+ self.assertRaises(KeyError, self.logger.makeRecord, name, level,
+ fn, lno, msg, args, exc_info,
+ extra=extra, sinfo=sinfo)
+
+ def test_make_record_with_extra_no_overwrite(self):
+ name = 'my record'
+ level = 13
+ fn = lno = msg = args = exc_info = func = sinfo = None
+ extra = {'valid_key': 'some value'}
+ result = self.logger.makeRecord(name, level, fn, lno, msg, args,
+ exc_info, extra=extra, sinfo=sinfo)
+ self.assertIn('valid_key', result.__dict__)
+
+ def test_has_handlers(self):
+ self.assertTrue(self.logger.hasHandlers())
+
+ for handler in self.logger.handlers:
+ self.logger.removeHandler(handler)
+ assert not self.logger.hasHandlers()
+
+ self.assertFalse(self.logger.hasHandlers())
+
+ def test_has_handlers_no_propagate(self):
+ child_logger = logging.getLogger('blah.child')
+ child_logger.propagate = False
+ assert child_logger.handlers == []
+
+ self.assertFalse(child_logger.hasHandlers())
+
+ def test_is_enabled_for(self):
+ old_disable = self.logger.manager.disable
+ self.logger.manager.disable = 23
+ self.addCleanup(lambda: setattr(self.logger.manager,
+ 'disable', old_disable))
+ self.assertFalse(self.logger.isEnabledFor(22))
+
+
class BaseFileTest(BaseTest):
"Base class for handler tests that write log files"
@@ -2319,6 +2829,8 @@ def test_main():
EncodingTest, WarningsTest, ConfigDictTest, ManagerTest,
FormatterTest,
LogRecordFactoryTest, ChildLoggerTest, QueueHandlerTest,
+ ShutdownTest, ModuleLevelMiscTest, BasicConfigTest,
+ LoggerAdapterTest, LoggerTest,
RotatingFileHandlerTest,
LastResortTest,
TimedRotatingFileHandlerTest
diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py
index 45abcc8..9b4613e 100644
--- a/Lib/test/test_mailbox.py
+++ b/Lib/test/test_mailbox.py
@@ -95,14 +95,14 @@ class TestMailbox(TestBase):
""")
def test_add_invalid_8bit_bytes_header(self):
- key = self._box.add(self._nonascii_msg.encode('latin1'))
+ key = self._box.add(self._nonascii_msg.encode('latin-1'))
self.assertEqual(len(self._box), 1)
self.assertEqual(self._box.get_bytes(key),
- self._nonascii_msg.encode('latin1'))
+ self._nonascii_msg.encode('latin-1'))
def test_invalid_nonascii_header_as_string(self):
subj = self._nonascii_msg.splitlines()[1]
- key = self._box.add(subj.encode('latin1'))
+ key = self._box.add(subj.encode('latin-1'))
self.assertEqual(self._box.get_string(key),
'Subject: =?unknown-8bit?b?RmFsaW5hcHThciBo4Xpob3pzeuFsbO104XNz'
'YWwuIE3hciByZW5kZWx06Ww/?=\n\n')
diff --git a/Lib/test/test_metaclass.py b/Lib/test/test_metaclass.py
index 219ab99..6862900 100644
--- a/Lib/test/test_metaclass.py
+++ b/Lib/test/test_metaclass.py
@@ -246,7 +246,13 @@ Test failures in looking up the __prepare__ method work.
"""
-__test__ = {'doctests' : doctests}
+import sys
+
+# Trace function introduces __locals__ which causes various tests to fail.
+if hasattr(sys, 'gettrace') and sys.gettrace():
+ __test__ = {}
+else:
+ __test__ = {'doctests' : doctests}
def test_main(verbose=False):
from test import support
diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py
index 4c2b34a..acc4819 100644
--- a/Lib/test/test_minidom.py
+++ b/Lib/test/test_minidom.py
@@ -4,9 +4,7 @@ import pickle
from test.support import verbose, run_unittest, findfile
import unittest
-import xml.dom
import xml.dom.minidom
-import xml.parsers.expat
from xml.dom.minidom import parse, Node, Document, parseString
from xml.dom.minidom import getDOMImplementation
@@ -14,7 +12,6 @@ from xml.dom.minidom import getDOMImplementation
tstfile = findfile("test.xml", subdir="xmltestdata")
-
# The tests of DocumentType importing use these helpers to construct
# the documents to work with, since not all DOM builders actually
# create the DocumentType nodes.
@@ -1009,41 +1006,6 @@ class MinidomTest(unittest.TestCase):
"test NodeList.item()")
doc.unlink()
- def testSAX2DOM(self):
- from xml.dom import pulldom
-
- sax2dom = pulldom.SAX2DOM()
- sax2dom.startDocument()
- sax2dom.startElement("doc", {})
- sax2dom.characters("text")
- sax2dom.startElement("subelm", {})
- sax2dom.characters("text")
- sax2dom.endElement("subelm")
- sax2dom.characters("text")
- sax2dom.endElement("doc")
- sax2dom.endDocument()
-
- doc = sax2dom.document
- root = doc.documentElement
- (text1, elm1, text2) = root.childNodes
- text3 = elm1.childNodes[0]
-
- self.confirm(text1.previousSibling is None and
- text1.nextSibling is elm1 and
- elm1.previousSibling is text1 and
- elm1.nextSibling is text2 and
- text2.previousSibling is elm1 and
- text2.nextSibling is None and
- text3.previousSibling is None and
- text3.nextSibling is None, "testSAX2DOM - siblings")
-
- self.confirm(root.parentNode is doc and
- text1.parentNode is root and
- elm1.parentNode is root and
- text2.parentNode is root and
- text3.parentNode is elm1, "testSAX2DOM - parents")
- doc.unlink()
-
def testEncodings(self):
doc = parseString('<foo>&#x20ac;</foo>')
self.assertEqual(doc.toxml(),
@@ -1490,6 +1452,7 @@ class MinidomTest(unittest.TestCase):
doc.appendChild(doc.createComment("foo--bar"))
self.assertRaises(ValueError, doc.toxml)
+
def testEmptyXMLNSValue(self):
doc = parseString("<element xmlns=''>\n"
"<foo/>\n</element>")
diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py
index 768119c..d53a0f8 100644
--- a/Lib/test/test_multiprocessing.py
+++ b/Lib/test/test_multiprocessing.py
@@ -164,6 +164,18 @@ class _TestProcess(BaseTestCase):
self.assertEqual(current.ident, os.getpid())
self.assertEqual(current.exitcode, None)
+ def test_daemon_argument(self):
+ if self.TYPE == "threads":
+ return
+
+ # By default uses the current process's daemon flag.
+ proc0 = self.Process(target=self._test)
+ self.assertEqual(proc0.daemon, self.current_process().daemon)
+ proc1 = self.Process(target=self._test, daemon=True)
+ self.assertTrue(proc1.daemon)
+ proc2 = self.Process(target=self._test, daemon=False)
+ self.assertFalse(proc2.daemon)
+
@classmethod
def _test(cls, q, *args, **kwds):
current = cls.current_process()
diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py
index a387f61..ec790ad 100644
--- a/Lib/test/test_nntplib.py
+++ b/Lib/test/test_nntplib.py
@@ -1,10 +1,11 @@
import io
+import socket
import datetime
import textwrap
import unittest
import functools
import contextlib
-import collections
+import collections.abc
from test import support
from nntplib import NNTP, GroupInfo, _have_ssl
import nntplib
@@ -246,12 +247,32 @@ class NetworkedNNTPTestsMixin:
if not name.startswith('test_'):
continue
meth = getattr(cls, name)
- if not isinstance(meth, collections.Callable):
+ if not isinstance(meth, collections.abc.Callable):
continue
# Need to use a closure so that meth remains bound to its current
# value
setattr(cls, name, wrap_meth(meth))
+ def test_with_statement(self):
+ def is_connected():
+ if not hasattr(server, 'file'):
+ return False
+ try:
+ server.help()
+ except (socket.error, EOFError):
+ return False
+ return True
+
+ with self.NNTP_CLASS(self.NNTP_HOST, timeout=TIMEOUT, usenetrc=False) as server:
+ self.assertTrue(is_connected())
+ self.assertTrue(server.help())
+ self.assertFalse(is_connected())
+
+ with self.NNTP_CLASS(self.NNTP_HOST, timeout=TIMEOUT, usenetrc=False) as server:
+ server.quit()
+ self.assertFalse(is_connected())
+
+
NetworkedNNTPTestsMixin.wrap_methods()
@@ -813,7 +834,7 @@ class NNTPv1v2TestsMixin:
def _check_article_body(self, lines):
self.assertEqual(len(lines), 4)
- self.assertEqual(lines[-1].decode('utf8'), "-- Signed by André.")
+ self.assertEqual(lines[-1].decode('utf-8'), "-- Signed by André.")
self.assertEqual(lines[-2], b"")
self.assertEqual(lines[-3], b".Here is a dot-starting line.")
self.assertEqual(lines[-4], b"This is just a test article.")
diff --git a/Lib/test/test_optparse.py b/Lib/test/test_optparse.py
index 7b95612..61f44ee 100644
--- a/Lib/test/test_optparse.py
+++ b/Lib/test/test_optparse.py
@@ -631,7 +631,7 @@ class TestStandard(BaseTest):
option_list=options)
def test_required_value(self):
- self.assertParseFail(["-a"], "-a option requires an argument")
+ self.assertParseFail(["-a"], "-a option requires 1 argument")
def test_invalid_integer(self):
self.assertParseFail(["-b", "5x"],
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
index 56be375..35aa7fa 100644
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -15,6 +15,13 @@ from test import support
import contextlib
import mmap
import uuid
+import asyncore
+import asynchat
+import socket
+try:
+ import threading
+except ImportError:
+ threading = None
# Detect whether we're on a Linux system that uses the (now outdated
# and unmaintained) linuxthreads threading library. There's an issue
@@ -1280,6 +1287,287 @@ class LoginTests(unittest.TestCase):
self.assertNotEqual(len(user_name), 0)
+@unittest.skipUnless(hasattr(os, 'getpriority') and hasattr(os, 'setpriority'),
+ "needs os.getpriority and os.setpriority")
+class ProgramPriorityTests(unittest.TestCase):
+ """Tests for os.getpriority() and os.setpriority()."""
+
+ def test_set_get_priority(self):
+
+ base = os.getpriority(os.PRIO_PROCESS, os.getpid())
+ os.setpriority(os.PRIO_PROCESS, os.getpid(), base + 1)
+ try:
+ new_prio = os.getpriority(os.PRIO_PROCESS, os.getpid())
+ if base >= 19 and new_prio <= 19:
+ raise unittest.SkipTest(
+ "unable to reliably test setpriority at current nice level of %s" % base)
+ else:
+ self.assertEqual(new_prio, base + 1)
+ finally:
+ try:
+ os.setpriority(os.PRIO_PROCESS, os.getpid(), base)
+ except OSError as err:
+ if err.errno != errno.EACCES:
+ raise
+
+
+class SendfileTestServer(asyncore.dispatcher, threading.Thread):
+
+ class Handler(asynchat.async_chat):
+
+ def __init__(self, conn):
+ asynchat.async_chat.__init__(self, conn)
+ self.in_buffer = []
+ self.closed = False
+ self.push(b"220 ready\r\n")
+
+ def handle_read(self):
+ data = self.recv(4096)
+ self.in_buffer.append(data)
+
+ def get_data(self):
+ return b''.join(self.in_buffer)
+
+ def handle_close(self):
+ self.close()
+ self.closed = True
+
+ def handle_error(self):
+ raise
+
+ def __init__(self, address):
+ threading.Thread.__init__(self)
+ asyncore.dispatcher.__init__(self)
+ self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.bind(address)
+ self.listen(5)
+ self.host, self.port = self.socket.getsockname()[:2]
+ self.handler_instance = None
+ self._active = False
+ self._active_lock = threading.Lock()
+
+ # --- public API
+
+ @property
+ def running(self):
+ return self._active
+
+ def start(self):
+ assert not self.running
+ self.__flag = threading.Event()
+ threading.Thread.start(self)
+ self.__flag.wait()
+
+ def stop(self):
+ assert self.running
+ self._active = False
+ self.join()
+
+ def wait(self):
+ # wait for handler connection to be closed, then stop the server
+ while not getattr(self.handler_instance, "closed", False):
+ time.sleep(0.001)
+ self.stop()
+
+ # --- internals
+
+ def run(self):
+ self._active = True
+ self.__flag.set()
+ while self._active and asyncore.socket_map:
+ self._active_lock.acquire()
+ asyncore.loop(timeout=0.001, count=1)
+ self._active_lock.release()
+ asyncore.close_all()
+
+ def handle_accept(self):
+ conn, addr = self.accept()
+ self.handler_instance = self.Handler(conn)
+
+ def handle_connect(self):
+ self.close()
+ handle_read = handle_connect
+
+ def writable(self):
+ return 0
+
+ def handle_error(self):
+ raise
+
+
+@unittest.skipUnless(threading is not None, "test needs threading module")
+@unittest.skipUnless(hasattr(os, 'sendfile'), "test needs os.sendfile()")
+class TestSendfile(unittest.TestCase):
+
+ DATA = b"12345abcde" * 16 * 1024 # 160 KB
+ SUPPORT_HEADERS_TRAILERS = not sys.platform.startswith("linux") and \
+ not sys.platform.startswith("solaris") and \
+ not sys.platform.startswith("sunos")
+
+ @classmethod
+ def setUpClass(cls):
+ with open(support.TESTFN, "wb") as f:
+ f.write(cls.DATA)
+
+ @classmethod
+ def tearDownClass(cls):
+ support.unlink(support.TESTFN)
+
+ def setUp(self):
+ self.server = SendfileTestServer((support.HOST, 0))
+ self.server.start()
+ self.client = socket.socket()
+ self.client.connect((self.server.host, self.server.port))
+ self.client.settimeout(1)
+ # synchronize by waiting for "220 ready" response
+ self.client.recv(1024)
+ self.sockno = self.client.fileno()
+ self.file = open(support.TESTFN, 'rb')
+ self.fileno = self.file.fileno()
+
+ def tearDown(self):
+ self.file.close()
+ self.client.close()
+ if self.server.running:
+ self.server.stop()
+
+ def sendfile_wrapper(self, sock, file, offset, nbytes, headers=[], trailers=[]):
+ """A higher level wrapper representing how an application is
+ supposed to use sendfile().
+ """
+ while 1:
+ try:
+ if self.SUPPORT_HEADERS_TRAILERS:
+ return os.sendfile(sock, file, offset, nbytes, headers,
+ trailers)
+ else:
+ return os.sendfile(sock, file, offset, nbytes)
+ except OSError as err:
+ if err.errno == errno.ECONNRESET:
+ # disconnected
+ raise
+ elif err.errno in (errno.EAGAIN, errno.EBUSY):
+ # we have to retry send data
+ continue
+ else:
+ raise
+
+ def test_send_whole_file(self):
+ # normal send
+ total_sent = 0
+ offset = 0
+ nbytes = 4096
+ while total_sent < len(self.DATA):
+ sent = self.sendfile_wrapper(self.sockno, self.fileno, offset, nbytes)
+ if sent == 0:
+ break
+ offset += sent
+ total_sent += sent
+ self.assertTrue(sent <= nbytes)
+ self.assertEqual(offset, total_sent)
+
+ self.assertEqual(total_sent, len(self.DATA))
+ self.client.shutdown(socket.SHUT_RDWR)
+ self.client.close()
+ self.server.wait()
+ data = self.server.handler_instance.get_data()
+ self.assertEqual(len(data), len(self.DATA))
+ self.assertEqual(data, self.DATA)
+
+ def test_send_at_certain_offset(self):
+ # start sending a file at a certain offset
+ total_sent = 0
+ offset = len(self.DATA) // 2
+ must_send = len(self.DATA) - offset
+ nbytes = 4096
+ while total_sent < must_send:
+ sent = self.sendfile_wrapper(self.sockno, self.fileno, offset, nbytes)
+ if sent == 0:
+ break
+ offset += sent
+ total_sent += sent
+ self.assertTrue(sent <= nbytes)
+
+ self.client.shutdown(socket.SHUT_RDWR)
+ self.client.close()
+ self.server.wait()
+ data = self.server.handler_instance.get_data()
+ expected = self.DATA[len(self.DATA) // 2:]
+ self.assertEqual(total_sent, len(expected))
+ self.assertEqual(len(data), len(expected))
+ self.assertEqual(data, expected)
+
+ def test_offset_overflow(self):
+ # specify an offset > file size
+ offset = len(self.DATA) + 4096
+ try:
+ sent = os.sendfile(self.sockno, self.fileno, offset, 4096)
+ except OSError as e:
+ # Solaris can raise EINVAL if offset >= file length, ignore.
+ if e.errno != errno.EINVAL:
+ raise
+ else:
+ self.assertEqual(sent, 0)
+ self.client.shutdown(socket.SHUT_RDWR)
+ self.client.close()
+ self.server.wait()
+ data = self.server.handler_instance.get_data()
+ self.assertEqual(data, b'')
+
+ def test_invalid_offset(self):
+ with self.assertRaises(OSError) as cm:
+ os.sendfile(self.sockno, self.fileno, -1, 4096)
+ self.assertEqual(cm.exception.errno, errno.EINVAL)
+
+ # --- headers / trailers tests
+
+ if SUPPORT_HEADERS_TRAILERS:
+
+ def test_headers(self):
+ total_sent = 0
+ sent = os.sendfile(self.sockno, self.fileno, 0, 4096,
+ headers=[b"x" * 512])
+ total_sent += sent
+ offset = 4096
+ nbytes = 4096
+ while 1:
+ sent = self.sendfile_wrapper(self.sockno, self.fileno,
+ offset, nbytes)
+ if sent == 0:
+ break
+ total_sent += sent
+ offset += sent
+
+ expected_data = b"x" * 512 + self.DATA
+ self.assertEqual(total_sent, len(expected_data))
+ self.client.close()
+ self.server.wait()
+ data = self.server.handler_instance.get_data()
+ self.assertEqual(hash(data), hash(expected_data))
+
+ def test_trailers(self):
+ TESTFN2 = support.TESTFN + "2"
+ with open(TESTFN2, 'wb') as f:
+ f.write(b"abcde")
+ with open(TESTFN2, 'rb')as f:
+ self.addCleanup(os.remove, TESTFN2)
+ os.sendfile(self.sockno, f.fileno(), 0, 4096,
+ trailers=[b"12345"])
+ self.client.close()
+ self.server.wait()
+ data = self.server.handler_instance.get_data()
+ self.assertEqual(data, b"abcde12345")
+
+ if hasattr(os, "SF_NODISKIO"):
+ def test_flags(self):
+ try:
+ os.sendfile(self.sockno, self.fileno, 0, 4096,
+ flags=os.SF_NODISKIO)
+ except OSError as err:
+ if err.errno not in (errno.EBUSY, errno.EAGAIN):
+ raise
+
+
def test_main():
support.run_unittest(
FileTests,
@@ -1300,6 +1588,8 @@ def test_main():
PidTests,
LoginTests,
LinkTests,
+ TestSendfile,
+ ProgramPriorityTests,
)
if __name__ == "__main__":
diff --git a/Lib/test/test_osx_env.py b/Lib/test/test_osx_env.py
index 8b3df37..24ec2b4 100644
--- a/Lib/test/test_osx_env.py
+++ b/Lib/test/test_osx_env.py
@@ -5,6 +5,7 @@ Test suite for OS X interpreter environment variables.
from test.support import EnvironmentVarGuard, run_unittest
import subprocess
import sys
+import sysconfig
import unittest
class OSXEnvironmentVariableTestCase(unittest.TestCase):
@@ -27,8 +28,6 @@ class OSXEnvironmentVariableTestCase(unittest.TestCase):
self._check_sys('PYTHONEXECUTABLE', '==', 'sys.executable')
def test_main():
- from distutils import sysconfig
-
if sys.platform == 'darwin' and sysconfig.get_config_var('WITH_NEXT_FRAMEWORK'):
run_unittest(OSXEnvironmentVariableTestCase)
diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py
index d861df5..c197aff 100644
--- a/Lib/test/test_pdb.py
+++ b/Lib/test/test_pdb.py
@@ -20,9 +20,12 @@ class PdbTestInput(object):
def __enter__(self):
self.real_stdin = sys.stdin
sys.stdin = _FakeInput(self.input)
+ self.orig_trace = sys.gettrace() if hasattr(sys, 'gettrace') else None
def __exit__(self, *exc):
sys.stdin = self.real_stdin
+ if self.orig_trace:
+ sys.settrace(self.orig_trace)
def test_pdb_displayhook():
diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py
index b7d446f..78de909 100644
--- a/Lib/test/test_peepholer.py
+++ b/Lib/test/test_peepholer.py
@@ -3,13 +3,16 @@ import re
import sys
from io import StringIO
import unittest
+from math import copysign
def disassemble(func):
f = StringIO()
tmp = sys.stdout
sys.stdout = f
- dis.dis(func)
- sys.stdout = tmp
+ try:
+ dis.dis(func)
+ finally:
+ sys.stdout = tmp
result = f.getvalue()
f.close()
return result
@@ -17,6 +20,7 @@ def disassemble(func):
def dis_single(line):
return disassemble(compile(line, '', 'single'))
+
class TestTranforms(unittest.TestCase):
def test_unot(self):
@@ -99,6 +103,12 @@ class TestTranforms(unittest.TestCase):
self.assertIn(elem, asm)
self.assertNotIn('BUILD_TUPLE', asm)
+ # Long tuples should be folded too.
+ asm = dis_single(repr(tuple(range(10000))))
+ # One LOAD_CONST for the tuple, one for the None return value
+ self.assertEqual(asm.count('LOAD_CONST'), 2)
+ self.assertNotIn('BUILD_TUPLE', asm)
+
# Bug 1053819: Tuple of constants misidentified when presented with:
# . . . opcode_with_arg 100 unary_opcode BUILD_TUPLE 1 . . .
# The following would segfault upon compilation
@@ -198,6 +208,9 @@ class TestTranforms(unittest.TestCase):
def test_folding_of_unaryops_on_constants(self):
for line, elem in (
('-0.5', '(-0.5)'), # unary negative
+ ('-0.0', '(-0.0)'), # -0.0
+ ('-(1.0-1.0)','(-0.0)'), # -0.0 after folding
+ ('-0', '(0)'), # -0
('~-2', '(1)'), # unary invert
('+1', '(1)'), # unary positive
):
@@ -205,6 +218,13 @@ class TestTranforms(unittest.TestCase):
self.assertIn(elem, asm, asm)
self.assertNotIn('UNARY_', asm)
+ # Check that -0.0 works after marshaling
+ def negzero():
+ return -(1.0-1.0)
+
+ self.assertNotIn('UNARY_', disassemble(negzero))
+ self.assertTrue(copysign(1.0, negzero()) < 0)
+
# Verify that unfoldables are skipped
for line, elem in (
('-"abc"', "('abc')"), # unary negative
@@ -267,6 +287,25 @@ class TestTranforms(unittest.TestCase):
asm = disassemble(f)
self.assertNotIn('BINARY_ADD', asm)
+ def test_constant_folding(self):
+ # Issue #11244: aggressive constant folding.
+ exprs = [
+ "3 * -5",
+ "-3 * 5",
+ "2 * (3 * 4)",
+ "(2 * 3) * 4",
+ "(-1, 2, 3)",
+ "(1, -2, 3)",
+ "(1, 2, -3)",
+ "(1, 2, -3) * 6",
+ "lambda x: x in {(3 * -5) + (-1 - 6), (1, -2, 3) * 2, None}",
+ ]
+ for e in exprs:
+ asm = dis_single(e)
+ self.assertNotIn('UNARY_', asm, e)
+ self.assertNotIn('BINARY_', asm, e)
+ self.assertNotIn('BUILD_', asm, e)
+
class TestBuglets(unittest.TestCase):
def test_bug_11510(self):
diff --git a/Lib/test/test_pep292.py b/Lib/test/test_pep292.py
index 119c7ea..a1e52e9 100644
--- a/Lib/test/test_pep292.py
+++ b/Lib/test/test_pep292.py
@@ -49,11 +49,11 @@ class TestTemplate(unittest.TestCase):
(?P<invalid>) |
(?P<escaped>%(delim)s) |
@(?P<named>%(id)s) |
- @{(?P<braced>%(id)s)}
+ @{(?P<braced>%(id)s)}
)
"""
s = MyPattern('$')
- self.assertRaises(ValueError, s.substitute, dict())
+ self.assertRaises(ValueError, s.substitute, dict())
def test_percents(self):
eq = self.assertEqual
diff --git a/Lib/test/test_pep3120.py b/Lib/test/test_pep3120.py
index 09fedf0..496f8da 100644
--- a/Lib/test/test_pep3120.py
+++ b/Lib/test/test_pep3120.py
@@ -19,8 +19,8 @@ class PEP3120Test(unittest.TestCase):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
- msg = str(msg)
- self.assertTrue('UTF-8' in msg or 'utf8' in msg)
+ msg = str(msg).lower()
+ self.assertTrue('utf-8' in msg or 'utf8' in msg)
else:
self.fail("expected exception didn't occur")
diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py
index ec134a1..b59f6e6 100644
--- a/Lib/test/test_platform.py
+++ b/Lib/test/test_platform.py
@@ -56,13 +56,11 @@ class PlatformTest(unittest.TestCase):
def setUp(self):
self.save_version = sys.version
- self.save_subversion = sys.subversion
self.save_mercurial = sys._mercurial
self.save_platform = sys.platform
def tearDown(self):
sys.version = self.save_version
- sys.subversion = self.save_subversion
sys._mercurial = self.save_mercurial
sys.platform = self.save_platform
@@ -77,7 +75,7 @@ class PlatformTest(unittest.TestCase):
('IronPython', '1.0.0', '', '', '', '', '.NET 2.0.50727.42')),
):
# branch and revision are not "parsed", but fetched
- # from sys.subversion. Ignore them
+ # from sys._mercurial. Ignore them
(name, version, branch, revision, buildno, builddate, compiler) \
= platform._sys_version(input)
self.assertEqual(
@@ -113,8 +111,6 @@ class PlatformTest(unittest.TestCase):
if subversion is None:
if hasattr(sys, "_mercurial"):
del sys._mercurial
- if hasattr(sys, "subversion"):
- del sys.subversion
else:
sys._mercurial = subversion
if sys_platform is not None:
@@ -247,6 +243,34 @@ class PlatformTest(unittest.TestCase):
):
self.assertEqual(platform._parse_release_file(input), output)
+ def test_popen(self):
+ mswindows = (sys.platform == "win32")
+
+ if mswindows:
+ command = '"{}" -c "print(\'Hello\')"'.format(sys.executable)
+ else:
+ command = "'{}' -c 'print(\"Hello\")'".format(sys.executable)
+ with platform.popen(command) as stdout:
+ hello = stdout.read().strip()
+ stdout.close()
+ self.assertEqual(hello, "Hello")
+
+ data = 'plop'
+ if mswindows:
+ command = '"{}" -c "import sys; data=sys.stdin.read(); exit(len(data))"'
+ else:
+ command = "'{}' -c 'import sys; data=sys.stdin.read(); exit(len(data))'"
+ command = command.format(sys.executable)
+ with platform.popen(command, 'w') as stdin:
+ stdout = stdin.write(data)
+ ret = stdin.close()
+ self.assertIsNotNone(ret)
+ if os.name == 'nt':
+ returncode = ret
+ else:
+ returncode = ret >> 8
+ self.assertEqual(returncode, len(data))
+
def test_main():
support.run_unittest(
diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py
index 81af569..0a3adcc 100644
--- a/Lib/test/test_poplib.py
+++ b/Lib/test/test_poplib.py
@@ -108,6 +108,10 @@ class DummyPOP3Handler(asynchat.async_chat):
def cmd_apop(self, arg):
self.push('+OK done nothing.')
+ def cmd_quit(self, arg):
+ self.push('+OK closing.')
+ self.close_when_done()
+
class DummyPOP3Server(asyncore.dispatcher, threading.Thread):
@@ -165,10 +169,10 @@ class TestPOP3Class(TestCase):
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.start()
- self.client = poplib.POP3(self.server.host, self.server.port)
+ self.client = poplib.POP3(self.server.host, self.server.port, timeout=3)
def tearDown(self):
- self.client.quit()
+ self.client.close()
self.server.stop()
def test_getwelcome(self):
@@ -228,6 +232,12 @@ class TestPOP3Class(TestCase):
self.client.uidl()
self.client.uidl('foo')
+ def test_quit(self):
+ resp = self.client.quit()
+ self.assertTrue(resp)
+ self.assertIsNone(self.client.sock)
+ self.assertIsNone(self.client.file)
+
SUPPORTS_SSL = False
if hasattr(poplib, 'POP3_SSL'):
@@ -274,6 +284,7 @@ if hasattr(poplib, 'POP3_SSL'):
else:
DummyPOP3Handler.handle_read(self)
+
class TestPOP3_SSLClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3_SSL
diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py
index 45b3afc..0e9ac75 100644
--- a/Lib/test/test_posix.py
+++ b/Lib/test/test_posix.py
@@ -37,7 +37,7 @@ class PosixTester(unittest.TestCase):
NO_ARG_FUNCTIONS = [ "ctermid", "getcwd", "getcwdb", "uname",
"times", "getloadavg",
"getegid", "geteuid", "getgid", "getgroups",
- "getpid", "getpgrp", "getppid", "getuid",
+ "getpid", "getpgrp", "getppid", "getuid", "sync",
]
for name in NO_ARG_FUNCTIONS:
@@ -132,6 +132,156 @@ class PosixTester(unittest.TestCase):
finally:
fp.close()
+ @unittest.skipUnless(hasattr(posix, 'truncate'), "test needs posix.truncate()")
+ def test_truncate(self):
+ with open(support.TESTFN, 'w') as fp:
+ fp.write('test')
+ fp.flush()
+ posix.truncate(support.TESTFN, 0)
+
+ @unittest.skipUnless(hasattr(posix, 'fexecve'), "test needs posix.fexecve()")
+ @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()")
+ @unittest.skipUnless(hasattr(os, 'waitpid'), "test needs os.waitpid()")
+ def test_fexecve(self):
+ fp = os.open(sys.executable, os.O_RDONLY)
+ try:
+ pid = os.fork()
+ if pid == 0:
+ os.chdir(os.path.split(sys.executable)[0])
+ posix.fexecve(fp, [sys.executable, '-c', 'pass'], os.environ)
+ else:
+ self.assertEqual(os.waitpid(pid, 0), (pid, 0))
+ finally:
+ os.close(fp)
+
+ @unittest.skipUnless(hasattr(posix, 'waitid'), "test needs posix.waitid()")
+ @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()")
+ def test_waitid(self):
+ pid = os.fork()
+ if pid == 0:
+ os.chdir(os.path.split(sys.executable)[0])
+ posix.execve(sys.executable, [sys.executable, '-c', 'pass'], os.environ)
+ else:
+ res = posix.waitid(posix.P_PID, pid, posix.WEXITED)
+ self.assertEqual(pid, res.si_pid)
+
+ @unittest.skipUnless(hasattr(posix, 'lockf'), "test needs posix.lockf()")
+ def test_lockf(self):
+ fd = os.open(support.TESTFN, os.O_WRONLY | os.O_CREAT)
+ try:
+ os.write(fd, b'test')
+ os.lseek(fd, 0, os.SEEK_SET)
+ posix.lockf(fd, posix.F_LOCK, 4)
+ # section is locked
+ posix.lockf(fd, posix.F_ULOCK, 4)
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'pread'), "test needs posix.pread()")
+ def test_pread(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.write(fd, b'test')
+ os.lseek(fd, 0, os.SEEK_SET)
+ self.assertEqual(b'es', posix.pread(fd, 2, 1))
+ # the first pread() shoudn't disturb the file offset
+ self.assertEqual(b'te', posix.read(fd, 2))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'pwrite'), "test needs posix.pwrite()")
+ def test_pwrite(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.write(fd, b'test')
+ os.lseek(fd, 0, os.SEEK_SET)
+ posix.pwrite(fd, b'xx', 1)
+ self.assertEqual(b'txxt', posix.read(fd, 4))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'posix_fallocate'),
+ "test needs posix.posix_fallocate()")
+ def test_posix_fallocate(self):
+ fd = os.open(support.TESTFN, os.O_WRONLY | os.O_CREAT)
+ try:
+ posix.posix_fallocate(fd, 0, 10)
+ except OSError as inst:
+ # issue10812, ZFS doesn't appear to support posix_fallocate,
+ # so skip Solaris-based since they are likely to have ZFS.
+ if inst.errno != errno.EINVAL or not sys.platform.startswith("sunos"):
+ raise
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'posix_fadvise'),
+ "test needs posix.posix_fadvise()")
+ def test_posix_fadvise(self):
+ fd = os.open(support.TESTFN, os.O_RDONLY)
+ try:
+ posix.posix_fadvise(fd, 0, 0, posix.POSIX_FADV_WILLNEED)
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'futimes'), "test needs posix.futimes()")
+ def test_futimes(self):
+ now = time.time()
+ fd = os.open(support.TESTFN, os.O_RDONLY)
+ try:
+ posix.futimes(fd, None)
+ self.assertRaises(TypeError, posix.futimes, fd, (None, None))
+ self.assertRaises(TypeError, posix.futimes, fd, (now, None))
+ self.assertRaises(TypeError, posix.futimes, fd, (None, now))
+ posix.futimes(fd, (int(now), int(now)))
+ posix.futimes(fd, (now, now))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'lutimes'), "test needs posix.lutimes()")
+ def test_lutimes(self):
+ now = time.time()
+ posix.lutimes(support.TESTFN, None)
+ self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (None, None))
+ self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (now, None))
+ self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (None, now))
+ posix.lutimes(support.TESTFN, (int(now), int(now)))
+ posix.lutimes(support.TESTFN, (now, now))
+
+ @unittest.skipUnless(hasattr(posix, 'futimens'), "test needs posix.futimens()")
+ def test_futimens(self):
+ now = time.time()
+ fd = os.open(support.TESTFN, os.O_RDONLY)
+ try:
+ self.assertRaises(TypeError, posix.futimens, fd, (None, None), (None, None))
+ self.assertRaises(TypeError, posix.futimens, fd, (now, 0), None)
+ self.assertRaises(TypeError, posix.futimens, fd, None, (now, 0))
+ posix.futimens(fd, (int(now), int((now - int(now)) * 1e9)),
+ (int(now), int((now - int(now)) * 1e9)))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'writev'), "test needs posix.writev()")
+ def test_writev(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.writev(fd, (b'test1', b'tt2', b't3'))
+ os.lseek(fd, 0, os.SEEK_SET)
+ self.assertEqual(b'test1tt2t3', posix.read(fd, 10))
+ finally:
+ os.close(fd)
+
+ @unittest.skipUnless(hasattr(posix, 'readv'), "test needs posix.readv()")
+ def test_readv(self):
+ fd = os.open(support.TESTFN, os.O_RDWR | os.O_CREAT)
+ try:
+ os.write(fd, b'test1tt2t3')
+ os.lseek(fd, 0, os.SEEK_SET)
+ buf = [bytearray(i) for i in [5, 3, 2]]
+ self.assertEqual(posix.readv(fd, buf), 10)
+ self.assertEqual([b'test1', b'tt2', b't3'], [bytes(i) for i in buf])
+ finally:
+ os.close(fd)
+
def test_dup(self):
if hasattr(posix, 'dup'):
fp = open(support.TESTFN)
@@ -285,6 +435,18 @@ class PosixTester(unittest.TestCase):
if hasattr(posix, 'listdir'):
self.assertTrue(support.TESTFN in posix.listdir())
+ @unittest.skipUnless(hasattr(posix, 'fdlistdir'), "test needs posix.fdlistdir()")
+ def test_fdlistdir(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ self.assertEqual(
+ sorted(posix.listdir('.')),
+ sorted(posix.fdlistdir(f))
+ )
+ # Check the fd was closed by fdlistdir
+ with self.assertRaises(OSError) as ctx:
+ posix.close(f)
+ self.assertEqual(ctx.exception.errno, errno.EBADF)
+
def test_access(self):
if hasattr(posix, 'access'):
self.assertTrue(posix.access(support.TESTFN, os.R_OK))
@@ -389,6 +551,198 @@ class PosixTester(unittest.TestCase):
set([int(x) for x in groups.split()]),
set(posix.getgroups() + [posix.getegid()]))
+ # tests for the posix *at functions follow
+
+ @unittest.skipUnless(hasattr(posix, 'faccessat'), "test needs posix.faccessat()")
+ def test_faccessat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ self.assertTrue(posix.faccessat(f, support.TESTFN, os.R_OK))
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'fchmodat'), "test needs posix.fchmodat()")
+ def test_fchmodat(self):
+ os.chmod(support.TESTFN, stat.S_IRUSR)
+
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.fchmodat(f, support.TESTFN, stat.S_IRUSR | stat.S_IWUSR)
+
+ s = posix.stat(support.TESTFN)
+ self.assertEqual(s[0] & stat.S_IRWXU, stat.S_IRUSR | stat.S_IWUSR)
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'fchownat'), "test needs posix.fchownat()")
+ def test_fchownat(self):
+ support.unlink(support.TESTFN)
+ open(support.TESTFN, 'w').close()
+
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.fchownat(f, support.TESTFN, os.getuid(), os.getgid())
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'fstatat'), "test needs posix.fstatat()")
+ def test_fstatat(self):
+ support.unlink(support.TESTFN)
+ with open(support.TESTFN, 'w') as outfile:
+ outfile.write("testline\n")
+
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ s1 = posix.stat(support.TESTFN)
+ s2 = posix.fstatat(f, support.TESTFN)
+ self.assertEqual(s1, s2)
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'futimesat'), "test needs posix.futimesat()")
+ def test_futimesat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ now = time.time()
+ posix.futimesat(f, support.TESTFN, None)
+ self.assertRaises(TypeError, posix.futimesat, f, support.TESTFN, (None, None))
+ self.assertRaises(TypeError, posix.futimesat, f, support.TESTFN, (now, None))
+ self.assertRaises(TypeError, posix.futimesat, f, support.TESTFN, (None, now))
+ posix.futimesat(f, support.TESTFN, (int(now), int(now)))
+ posix.futimesat(f, support.TESTFN, (now, now))
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'linkat'), "test needs posix.linkat()")
+ def test_linkat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.linkat(f, support.TESTFN, f, support.TESTFN + 'link')
+ # should have same inodes
+ self.assertEqual(posix.stat(support.TESTFN)[1],
+ posix.stat(support.TESTFN + 'link')[1])
+ finally:
+ posix.close(f)
+ support.unlink(support.TESTFN + 'link')
+
+ @unittest.skipUnless(hasattr(posix, 'mkdirat'), "test needs posix.mkdirat()")
+ def test_mkdirat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.mkdirat(f, support.TESTFN + 'dir')
+ posix.stat(support.TESTFN + 'dir') # should not raise exception
+ finally:
+ posix.close(f)
+ support.rmtree(support.TESTFN + 'dir')
+
+ @unittest.skipUnless(hasattr(posix, 'mknodat') and hasattr(stat, 'S_IFIFO'),
+ "don't have mknodat()/S_IFIFO")
+ def test_mknodat(self):
+ # Test using mknodat() to create a FIFO (the only use specified
+ # by POSIX).
+ support.unlink(support.TESTFN)
+ mode = stat.S_IFIFO | stat.S_IRUSR | stat.S_IWUSR
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.mknodat(f, support.TESTFN, mode, 0)
+ except OSError as e:
+ # Some old systems don't allow unprivileged users to use
+ # mknod(), or only support creating device nodes.
+ self.assertIn(e.errno, (errno.EPERM, errno.EINVAL))
+ else:
+ self.assertTrue(stat.S_ISFIFO(posix.stat(support.TESTFN).st_mode))
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'openat'), "test needs posix.openat()")
+ def test_openat(self):
+ support.unlink(support.TESTFN)
+ with open(support.TESTFN, 'w') as outfile:
+ outfile.write("testline\n")
+ a = posix.open(posix.getcwd(), posix.O_RDONLY)
+ b = posix.openat(a, support.TESTFN, posix.O_RDONLY)
+ try:
+ res = posix.read(b, 9).decode(encoding="utf-8")
+ self.assertEqual("testline\n", res)
+ finally:
+ posix.close(a)
+ posix.close(b)
+
+ @unittest.skipUnless(hasattr(posix, 'readlinkat'), "test needs posix.readlinkat()")
+ def test_readlinkat(self):
+ os.symlink(support.TESTFN, support.TESTFN + 'link')
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ self.assertEqual(posix.readlink(support.TESTFN + 'link'),
+ posix.readlinkat(f, support.TESTFN + 'link'))
+ finally:
+ support.unlink(support.TESTFN + 'link')
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'renameat'), "test needs posix.renameat()")
+ def test_renameat(self):
+ support.unlink(support.TESTFN)
+ open(support.TESTFN + 'ren', 'w').close()
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.renameat(f, support.TESTFN + 'ren', f, support.TESTFN)
+ except:
+ posix.rename(support.TESTFN + 'ren', support.TESTFN)
+ raise
+ else:
+ posix.stat(support.TESTFN) # should not throw exception
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'symlinkat'), "test needs posix.symlinkat()")
+ def test_symlinkat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.symlinkat(support.TESTFN, f, support.TESTFN + 'link')
+ self.assertEqual(posix.readlink(support.TESTFN + 'link'), support.TESTFN)
+ finally:
+ posix.close(f)
+ support.unlink(support.TESTFN + 'link')
+
+ @unittest.skipUnless(hasattr(posix, 'unlinkat'), "test needs posix.unlinkat()")
+ def test_unlinkat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ open(support.TESTFN + 'del', 'w').close()
+ posix.stat(support.TESTFN + 'del') # should not throw exception
+ try:
+ posix.unlinkat(f, support.TESTFN + 'del')
+ except:
+ support.unlink(support.TESTFN + 'del')
+ raise
+ else:
+ self.assertRaises(OSError, posix.stat, support.TESTFN + 'link')
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'utimensat'), "test needs posix.utimensat()")
+ def test_utimensat(self):
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ now = time.time()
+ posix.utimensat(f, support.TESTFN, None, None)
+ self.assertRaises(TypeError, posix.utimensat, f, support.TESTFN, (None, None), (None, None))
+ self.assertRaises(TypeError, posix.utimensat, f, support.TESTFN, (now, 0), None)
+ self.assertRaises(TypeError, posix.utimensat, f, support.TESTFN, None, (now, 0))
+ posix.utimensat(f, support.TESTFN, (int(now), int((now - int(now)) * 1e9)),
+ (int(now), int((now - int(now)) * 1e9)))
+ finally:
+ posix.close(f)
+
+ @unittest.skipUnless(hasattr(posix, 'mkfifoat'), "don't have mkfifoat()")
+ def test_mkfifoat(self):
+ support.unlink(support.TESTFN)
+ f = posix.open(posix.getcwd(), posix.O_RDONLY)
+ try:
+ posix.mkfifoat(f, support.TESTFN, stat.S_IRUSR | stat.S_IWUSR)
+ self.assertTrue(stat.S_ISFIFO(posix.stat(support.TESTFN).st_mode))
+ finally:
+ posix.close(f)
+
class PosixGroupsTester(unittest.TestCase):
def setUp(self):
@@ -426,7 +780,10 @@ class PosixGroupsTester(unittest.TestCase):
def test_main():
- support.run_unittest(PosixTester, PosixGroupsTester)
+ try:
+ support.run_unittest(PosixTester, PosixGroupsTester)
+ finally:
+ support.reap_children()
if __name__ == '__main__':
test_main()
diff --git a/Lib/test/test_pulldom.py b/Lib/test/test_pulldom.py
new file mode 100644
index 0000000..b81a595
--- /dev/null
+++ b/Lib/test/test_pulldom.py
@@ -0,0 +1,347 @@
+import io
+import unittest
+import sys
+import xml.sax
+
+from xml.sax.xmlreader import AttributesImpl
+from xml.dom import pulldom
+
+from test.support import run_unittest, findfile
+
+
+tstfile = findfile("test.xml", subdir="xmltestdata")
+
+# A handy XML snippet, containing attributes, a namespace prefix, and a
+# self-closing tag:
+SMALL_SAMPLE = """<?xml version="1.0"?>
+<html xmlns="http://www.w3.org/1999/xhtml" xmlns:xdc="http://www.xml.com/books">
+<!-- A comment -->
+<title>Introduction to XSL</title>
+<hr/>
+<p><xdc:author xdc:attrib="prefixed attribute" attrib="other attrib">A. Namespace</xdc:author></p>
+</html>"""
+
+
+class PullDOMTestCase(unittest.TestCase):
+
+ def test_parse(self):
+ """Minimal test of DOMEventStream.parse()"""
+
+ # This just tests that parsing from a stream works. Actual parser
+ # semantics are tested using parseString with a more focused XML
+ # fragment.
+
+ # Test with a filename:
+ handler = pulldom.parse(tstfile)
+ self.addCleanup(handler.stream.close)
+ list(handler)
+
+ # Test with a file object:
+ with open(tstfile, "rb") as fin:
+ list(pulldom.parse(fin))
+
+ def test_parse_semantics(self):
+ """Test DOMEventStream parsing semantics."""
+
+ items = pulldom.parseString(SMALL_SAMPLE)
+ evt, node = next(items)
+ # Just check the node is a Document:
+ self.assertTrue(hasattr(node, "createElement"))
+ self.assertEqual(pulldom.START_DOCUMENT, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("html", node.tagName)
+ self.assertEqual(2, len(node.attributes))
+ self.assertEqual(node.attributes.getNamedItem("xmlns:xdc").value,
+ "http://www.xml.com/books")
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt) # Line break
+ evt, node = next(items)
+ # XXX - A comment should be reported here!
+ # self.assertEqual(pulldom.COMMENT, evt)
+ # Line break after swallowed comment:
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ evt, node = next(items)
+ self.assertEqual("title", node.tagName)
+ title_node = node
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ self.assertEqual("Introduction to XSL", node.data)
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ self.assertEqual("title", node.tagName)
+ self.assertTrue(title_node is node)
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("hr", node.tagName)
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ self.assertEqual("hr", node.tagName)
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("p", node.tagName)
+ evt, node = next(items)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("xdc:author", node.tagName)
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ self.assertEqual("xdc:author", node.tagName)
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ # XXX No END_DOCUMENT item is ever obtained:
+ #evt, node = next(items)
+ #self.assertEqual(pulldom.END_DOCUMENT, evt)
+
+ def test_expandItem(self):
+ """Ensure expandItem works as expected."""
+ items = pulldom.parseString(SMALL_SAMPLE)
+ # Loop through the nodes until we get to a "title" start tag:
+ for evt, item in items:
+ if evt == pulldom.START_ELEMENT and item.tagName == "title":
+ items.expandNode(item)
+ self.assertEqual(1, len(item.childNodes))
+ break
+ else:
+ self.fail("No \"title\" element detected in SMALL_SAMPLE!")
+ # Loop until we get to the next start-element:
+ for evt, node in items:
+ if evt == pulldom.START_ELEMENT:
+ break
+ self.assertEqual("hr", node.tagName,
+ "expandNode did not leave DOMEventStream in the correct state.")
+ # Attempt to expand a standalone element:
+ items.expandNode(node)
+ self.assertEqual(next(items)[0], pulldom.CHARACTERS)
+ evt, node = next(items)
+ self.assertEqual(node.tagName, "p")
+ items.expandNode(node)
+ next(items) # Skip character data
+ evt, node = next(items)
+ self.assertEqual(node.tagName, "html")
+ with self.assertRaises(StopIteration):
+ next(items)
+ items.clear()
+ self.assertIsNone(items.parser)
+ self.assertIsNone(items.stream)
+
+ @unittest.expectedFailure
+ def test_comment(self):
+ """PullDOM does not receive "comment" events."""
+ items = pulldom.parseString(SMALL_SAMPLE)
+ for evt, _ in items:
+ if evt == pulldom.COMMENT:
+ break
+ else:
+ self.fail("No comment was encountered")
+
+ @unittest.expectedFailure
+ def test_end_document(self):
+ """PullDOM does not receive "end-document" events."""
+ items = pulldom.parseString(SMALL_SAMPLE)
+ # Read all of the nodes up to and including </html>:
+ for evt, node in items:
+ if evt == pulldom.END_ELEMENT and node.tagName == "html":
+ break
+ try:
+ # Assert that the next node is END_DOCUMENT:
+ evt, node = next(items)
+ self.assertEqual(pulldom.END_DOCUMENT, evt)
+ except StopIteration:
+ self.fail(
+ "Ran out of events, but should have received END_DOCUMENT")
+
+
+class ThoroughTestCase(unittest.TestCase):
+ """Test the hard-to-reach parts of pulldom."""
+
+ def test_thorough_parse(self):
+ """Test some of the hard-to-reach parts of PullDOM."""
+ self._test_thorough(pulldom.parse(None, parser=SAXExerciser()))
+
+ @unittest.expectedFailure
+ def test_sax2dom_fail(self):
+ """SAX2DOM can"t handle a PI before the root element."""
+ pd = SAX2DOMTestHelper(None, SAXExerciser(), 12)
+ self._test_thorough(pd)
+
+ def test_thorough_sax2dom(self):
+ """Test some of the hard-to-reach parts of SAX2DOM."""
+ pd = SAX2DOMTestHelper(None, SAX2DOMExerciser(), 12)
+ self._test_thorough(pd, False)
+
+ def _test_thorough(self, pd, before_root=True):
+ """Test some of the hard-to-reach parts of the parser, using a mock
+ parser."""
+
+ evt, node = next(pd)
+ self.assertEqual(pulldom.START_DOCUMENT, evt)
+ # Just check the node is a Document:
+ self.assertTrue(hasattr(node, "createElement"))
+
+ if before_root:
+ evt, node = next(pd)
+ self.assertEqual(pulldom.COMMENT, evt)
+ self.assertEqual("a comment", node.data)
+ evt, node = next(pd)
+ self.assertEqual(pulldom.PROCESSING_INSTRUCTION, evt)
+ self.assertEqual("target", node.target)
+ self.assertEqual("data", node.data)
+
+ evt, node = next(pd)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("html", node.tagName)
+
+ evt, node = next(pd)
+ self.assertEqual(pulldom.COMMENT, evt)
+ self.assertEqual("a comment", node.data)
+ evt, node = next(pd)
+ self.assertEqual(pulldom.PROCESSING_INSTRUCTION, evt)
+ self.assertEqual("target", node.target)
+ self.assertEqual("data", node.data)
+
+ evt, node = next(pd)
+ self.assertEqual(pulldom.START_ELEMENT, evt)
+ self.assertEqual("p", node.tagName)
+
+ evt, node = next(pd)
+ self.assertEqual(pulldom.CHARACTERS, evt)
+ self.assertEqual("text", node.data)
+ evt, node = next(pd)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ self.assertEqual("p", node.tagName)
+ evt, node = next(pd)
+ self.assertEqual(pulldom.END_ELEMENT, evt)
+ self.assertEqual("html", node.tagName)
+ evt, node = next(pd)
+ self.assertEqual(pulldom.END_DOCUMENT, evt)
+
+
+class SAXExerciser(object):
+ """A fake sax parser that calls some of the harder-to-reach sax methods to
+ ensure it emits the correct events"""
+
+ def setContentHandler(self, handler):
+ self._handler = handler
+
+ def parse(self, _):
+ h = self._handler
+ h.startDocument()
+
+ # The next two items ensure that items preceding the first
+ # start_element are properly stored and emitted:
+ h.comment("a comment")
+ h.processingInstruction("target", "data")
+
+ h.startElement("html", AttributesImpl({}))
+
+ h.comment("a comment")
+ h.processingInstruction("target", "data")
+
+ h.startElement("p", AttributesImpl({"class": "paraclass"}))
+ h.characters("text")
+ h.endElement("p")
+ h.endElement("html")
+ h.endDocument()
+
+ def stub(self, *args, **kwargs):
+ """Stub method. Does nothing."""
+ pass
+ setProperty = stub
+ setFeature = stub
+
+
+class SAX2DOMExerciser(SAXExerciser):
+ """The same as SAXExerciser, but without the processing instruction and
+ comment before the root element, because S2D can"t handle it"""
+
+ def parse(self, _):
+ h = self._handler
+ h.startDocument()
+ h.startElement("html", AttributesImpl({}))
+ h.comment("a comment")
+ h.processingInstruction("target", "data")
+ h.startElement("p", AttributesImpl({"class": "paraclass"}))
+ h.characters("text")
+ h.endElement("p")
+ h.endElement("html")
+ h.endDocument()
+
+
+class SAX2DOMTestHelper(pulldom.DOMEventStream):
+ """Allows us to drive SAX2DOM from a DOMEventStream."""
+
+ def reset(self):
+ self.pulldom = pulldom.SAX2DOM()
+ # This content handler relies on namespace support
+ self.parser.setFeature(xml.sax.handler.feature_namespaces, 1)
+ self.parser.setContentHandler(self.pulldom)
+
+
+class SAX2DOMTestCase(unittest.TestCase):
+
+ def confirm(self, test, testname="Test"):
+ self.assertTrue(test, testname)
+
+ def test_basic(self):
+ """Ensure SAX2DOM can parse from a stream."""
+ with io.StringIO(SMALL_SAMPLE) as fin:
+ sd = SAX2DOMTestHelper(fin, xml.sax.make_parser(),
+ len(SMALL_SAMPLE))
+ for evt, node in sd:
+ if evt == pulldom.START_ELEMENT and node.tagName == "html":
+ break
+ # Because the buffer is the same length as the XML, all the
+ # nodes should have been parsed and added:
+ self.assertGreater(len(node.childNodes), 0)
+
+ def testSAX2DOM(self):
+ """Ensure SAX2DOM expands nodes as expected."""
+ sax2dom = pulldom.SAX2DOM()
+ sax2dom.startDocument()
+ sax2dom.startElement("doc", {})
+ sax2dom.characters("text")
+ sax2dom.startElement("subelm", {})
+ sax2dom.characters("text")
+ sax2dom.endElement("subelm")
+ sax2dom.characters("text")
+ sax2dom.endElement("doc")
+ sax2dom.endDocument()
+
+ doc = sax2dom.document
+ root = doc.documentElement
+ (text1, elm1, text2) = root.childNodes
+ text3 = elm1.childNodes[0]
+
+ self.assertIsNone(text1.previousSibling)
+ self.assertIs(text1.nextSibling, elm1)
+ self.assertIs(elm1.previousSibling, text1)
+ self.assertIs(elm1.nextSibling, text2)
+ self.assertIs(text2.previousSibling, elm1)
+ self.assertIsNone(text2.nextSibling)
+ self.assertIsNone(text3.previousSibling)
+ self.assertIsNone(text3.nextSibling)
+
+ self.assertIs(root.parentNode, doc)
+ self.assertIs(text1.parentNode, root)
+ self.assertIs(elm1.parentNode, root)
+ self.assertIs(text2.parentNode, root)
+ self.assertIs(text3.parentNode, elm1)
+ doc.unlink()
+
+
+def test_main():
+ run_unittest(PullDOMTestCase, ThoroughTestCase, SAX2DOMTestCase)
+
+
+if __name__ == "__main__":
+ test_main()
diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py
index 92f7963..4a83dff 100644
--- a/Lib/test/test_pydoc.py
+++ b/Lib/test/test_pydoc.py
@@ -191,7 +191,7 @@ war</tt></dd></dl>
missing_pattern = "no Python documentation found for '%s'"
# output pattern for module with bad imports
-badimport_pattern = "problem in %s - ImportError: No module named %s"
+badimport_pattern = "problem in %s - ImportError: No module named %r"
def run_pydoc(module_name, *args):
"""
@@ -249,6 +249,8 @@ class PydocDocTest(unittest.TestCase):
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __locals__ unexpectedly')
def test_html_doc(self):
result, doc_loc = get_pydoc_html(pydoc_mod)
mod_file = inspect.getabsfile(pydoc_mod)
@@ -264,6 +266,8 @@ class PydocDocTest(unittest.TestCase):
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __locals__ unexpectedly')
def test_text_doc(self):
result, doc_loc = get_pydoc_text(pydoc_mod)
expected_text = expected_text_pattern % \
@@ -341,6 +345,8 @@ class PydocDocTest(unittest.TestCase):
@unittest.skipIf(sys.flags.optimize >= 2,
'Docstrings are omitted with -O2 and above')
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'trace function introduces __locals__ unexpectedly')
def test_help_output_redirect(self):
# issue 940286, if output is set in Helper, then all output from
# Helper.help should be redirected
diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py
index fe8bc34..e3d10f7 100644
--- a/Lib/test/test_re.py
+++ b/Lib/test/test_re.py
@@ -428,7 +428,7 @@ class ReTests(unittest.TestCase):
self.assertEqual(m.span(), span)
def test_re_escape(self):
- alnum_chars = string.ascii_letters + string.digits
+ alnum_chars = string.ascii_letters + string.digits + '_'
p = ''.join(chr(i) for i in range(256))
for c in p:
if c in alnum_chars:
@@ -441,7 +441,7 @@ class ReTests(unittest.TestCase):
self.assertMatch(re.escape(p), p)
def test_re_escape_byte(self):
- alnum_chars = (string.ascii_letters + string.digits).encode('ascii')
+ alnum_chars = (string.ascii_letters + string.digits + '_').encode('ascii')
p = bytes(range(256))
for i in p:
b = bytes([i])
diff --git a/Lib/test/test_reprlib.py b/Lib/test/test_reprlib.py
index b0dc4d7..e476941 100644
--- a/Lib/test/test_reprlib.py
+++ b/Lib/test/test_reprlib.py
@@ -234,7 +234,7 @@ class LongReprTest(unittest.TestCase):
touch(os.path.join(self.subpkgname, self.pkgname + '.py'))
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import areallylongpackageandmodulenametotestreprtruncation
eq(repr(areallylongpackageandmodulenametotestreprtruncation),
- "<module '%s' from '%s'>" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__))
+ "<module %r from %r>" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__))
eq(repr(sys), "<module 'sys' (built-in)>")
def test_type(self):
diff --git a/Lib/test/test_richcmp.py b/Lib/test/test_richcmp.py
index f8f3717..0b629dc 100644
--- a/Lib/test/test_richcmp.py
+++ b/Lib/test/test_richcmp.py
@@ -220,6 +220,7 @@ class MiscTest(unittest.TestCase):
for func in (do, operator.not_):
self.assertRaises(Exc, func, Bad())
+ @support.no_tracing
def test_recursion(self):
# Check that comparison for recursive objects fails gracefully
from collections import UserList
diff --git a/Lib/test/test_runpy.py b/Lib/test/test_runpy.py
index ad3ab39..00f34b1 100644
--- a/Lib/test/test_runpy.py
+++ b/Lib/test/test_runpy.py
@@ -6,7 +6,8 @@ import sys
import re
import tempfile
import py_compile
-from test.support import forget, make_legacy_pyc, run_unittest, unload, verbose
+from test.support import (
+ forget, make_legacy_pyc, run_unittest, unload, verbose, no_tracing)
from test.script_helper import (
make_pkg, make_script, make_zip_pkg, make_zip_script, temp_dir)
@@ -395,6 +396,7 @@ argv0 = sys.argv[0]
msg = "can't find '__main__' module in %r" % zip_name
self._check_import_error(zip_name, msg)
+ @no_tracing
def test_main_recursion_error(self):
with temp_dir() as script_dir, temp_dir() as dummy_dir:
mod_name = '__main__'
diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py
index 0f6a1ca..bddb375 100644
--- a/Lib/test/test_sax.py
+++ b/Lib/test/test_sax.py
@@ -20,8 +20,8 @@ import unittest
TEST_XMLFILE = findfile("test.xml", subdir="xmltestdata")
TEST_XMLFILE_OUT = findfile("test.xml.out", subdir="xmltestdata")
try:
- TEST_XMLFILE.encode("utf8")
- TEST_XMLFILE_OUT.encode("utf8")
+ TEST_XMLFILE.encode("utf-8")
+ TEST_XMLFILE_OUT.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("filename is not encodable to utf8")
diff --git a/Lib/test/test_scope.py b/Lib/test/test_scope.py
index fbc87aa..129a18a 100644
--- a/Lib/test/test_scope.py
+++ b/Lib/test/test_scope.py
@@ -1,5 +1,5 @@
import unittest
-from test.support import check_syntax_error, run_unittest
+from test.support import check_syntax_error, cpython_only, run_unittest
class ScopeTests(unittest.TestCase):
@@ -496,23 +496,22 @@ class ScopeTests(unittest.TestCase):
self.assertNotIn("x", varnames)
self.assertIn("y", varnames)
+ @cpython_only
def testLocalsClass_WithTrace(self):
# Issue23728: after the trace function returns, the locals()
# dictionary is used to update all variables, this used to
# include free variables. But in class statements, free
# variables are not inserted...
import sys
+ self.addCleanup(sys.settrace, sys.gettrace())
sys.settrace(lambda a,b,c:None)
- try:
- x = 12
+ x = 12
- class C:
- def f(self):
- return x
+ class C:
+ def f(self):
+ return x
- self.assertEqual(x, 12) # Used to raise UnboundLocalError
- finally:
- sys.settrace(None)
+ self.assertEqual(x, 12) # Used to raise UnboundLocalError
def testBoundAndFree(self):
# var is bound and free in class
@@ -527,6 +526,7 @@ class ScopeTests(unittest.TestCase):
inst = f(3)()
self.assertEqual(inst.a, inst.m())
+ @cpython_only
def testInteractionWithTraceFunc(self):
import sys
@@ -543,6 +543,7 @@ class ScopeTests(unittest.TestCase):
class TestClass:
pass
+ self.addCleanup(sys.settrace, sys.gettrace())
sys.settrace(tracer)
adaptgetter("foo", TestClass, (1, ""))
sys.settrace(None)
diff --git a/Lib/test/test_select.py b/Lib/test/test_select.py
index fe92f45..4a13ade 100644
--- a/Lib/test/test_select.py
+++ b/Lib/test/test_select.py
@@ -20,6 +20,7 @@ class SelectTestCase(unittest.TestCase):
self.assertRaises(TypeError, select.select, [self.Nope()], [], [])
self.assertRaises(TypeError, select.select, [self.Almost()], [], [])
self.assertRaises(TypeError, select.select, [], [], [], "not a number")
+ self.assertRaises(ValueError, select.select, [], [], [], -1)
def test_returned_list_identity(self):
# See issue #8329
diff --git a/Lib/test/test_shelve.py b/Lib/test/test_shelve.py
index 3e73f52..13c1265 100644
--- a/Lib/test/test_shelve.py
+++ b/Lib/test/test_shelve.py
@@ -2,7 +2,7 @@ import unittest
import shelve
import glob
from test import support
-from collections import MutableMapping
+from collections.abc import MutableMapping
from test.test_dbm import dbm_iterator
def L1(s):
@@ -129,8 +129,8 @@ class TestCase(unittest.TestCase):
shelve.Shelf(d)[key] = [1]
self.assertIn(key.encode('utf-8'), d)
# but a different one can be given
- shelve.Shelf(d, keyencoding='latin1')[key] = [1]
- self.assertIn(key.encode('latin1'), d)
+ shelve.Shelf(d, keyencoding='latin-1')[key] = [1]
+ self.assertIn(key.encode('latin-1'), d)
# with all consequences
s = shelve.Shelf(d, keyencoding='ascii')
self.assertRaises(UnicodeEncodeError, s.__setitem__, key, [1])
diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py
index 4651f37..d973faa 100644
--- a/Lib/test/test_smtplib.py
+++ b/Lib/test/test_smtplib.py
@@ -424,6 +424,9 @@ sim_lists = {'list-1':['Mr.A@somewhere.com','Mrs.C@somewhereesle.com'],
# Simulated SMTP channel & server
class SimSMTPChannel(smtpd.SMTPChannel):
+ # For testing failures in QUIT when using the context manager API.
+ quit_response = None
+
def __init__(self, extra_features, *args, **kw):
self._extrafeatures = ''.join(
[ "250-{0}\r\n".format(x) for x in extra_features ])
@@ -475,19 +478,31 @@ class SimSMTPChannel(smtpd.SMTPChannel):
else:
self.push('550 No access for you!')
+ def smtp_QUIT(self, arg):
+ # args is ignored
+ if self.quit_response is None:
+ super(SimSMTPChannel, self).smtp_QUIT(arg)
+ else:
+ self.push(self.quit_response)
+ self.close_when_done()
+
def handle_error(self):
raise
class SimSMTPServer(smtpd.SMTPServer):
+ # For testing failures in QUIT when using the context manager API.
+ quit_response = None
+
def __init__(self, *args, **kw):
self._extra_features = []
smtpd.SMTPServer.__init__(self, *args, **kw)
def handle_accepted(self, conn, addr):
- self._SMTPchannel = SimSMTPChannel(self._extra_features,
- self, conn, addr)
+ self._SMTPchannel = SimSMTPChannel(
+ self._extra_features, self, conn, addr)
+ self._SMTPchannel.quit_response = self.quit_response
def process_message(self, peer, mailfrom, rcpttos, data):
pass
@@ -620,6 +635,25 @@ class SMTPSimTests(unittest.TestCase):
self.assertIn(sim_auth_credentials['cram-md5'], str(err))
smtp.close()
+ def test_with_statement(self):
+ with smtplib.SMTP(HOST, self.port) as smtp:
+ code, message = smtp.noop()
+ self.assertEqual(code, 250)
+ self.assertRaises(smtplib.SMTPServerDisconnected, smtp.send, b'foo')
+ with smtplib.SMTP(HOST, self.port) as smtp:
+ smtp.close()
+ self.assertRaises(smtplib.SMTPServerDisconnected, smtp.send, b'foo')
+
+ def test_with_statement_QUIT_failure(self):
+ self.serv.quit_response = '421 QUIT FAILED'
+ with self.assertRaises(smtplib.SMTPResponseException) as error:
+ with smtplib.SMTP(HOST, self.port) as smtp:
+ smtp.noop()
+ self.assertEqual(error.exception.smtp_code, 421)
+ self.assertEqual(error.exception.smtp_error, b'QUIT FAILED')
+ # We don't need to clean up self.serv.quit_response because a new
+ # server is always instantiated in the setUp().
+
#TODO: add tests for correct AUTH method fallback now that the
#test infrastructure can support it.
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
index e745502..6ee94b5 100644
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -18,6 +18,7 @@ import contextlib
from weakref import proxy
import signal
import math
+import pickle
try:
import fcntl
except ImportError:
@@ -44,7 +45,7 @@ def linux_version():
return 0, 0, 0
HOST = support.HOST
-MSG = 'Michael Gilfix was here\u1234\r\n'.encode('utf8') ## test unicode string and carriage return
+MSG = 'Michael Gilfix was here\u1234\r\n'.encode('utf-8') ## test unicode string and carriage return
SUPPORTS_IPV6 = socket.has_ipv6 and try_address('::1', family=socket.AF_INET6)
try:
@@ -325,6 +326,26 @@ class GeneralModuleTests(unittest.TestCase):
if not fqhn in all_host_names:
self.fail("Error testing host resolution mechanisms. (fqdn: %s, all: %s)" % (fqhn, repr(all_host_names)))
+ @unittest.skipUnless(hasattr(socket, 'sethostname'), "test needs socket.sethostname()")
+ @unittest.skipUnless(hasattr(socket, 'gethostname'), "test needs socket.gethostname()")
+ def test_sethostname(self):
+ oldhn = socket.gethostname()
+ try:
+ socket.sethostname('new')
+ except socket.error as e:
+ if e.errno == errno.EPERM:
+ self.skipTest("test should be run as root")
+ else:
+ raise
+ try:
+ # running test as root!
+ self.assertEqual(socket.gethostname(), 'new')
+ # Should work with bytes objects too
+ socket.sethostname(b'bar')
+ self.assertEqual(socket.gethostname(), 'bar')
+ finally:
+ socket.sethostname(oldhn)
+
def testRefCountGetNameInfo(self):
# Testing reference count for getnameinfo
if hasattr(sys, "getrefcount"):
@@ -744,6 +765,12 @@ class GeneralModuleTests(unittest.TestCase):
fp.close()
self.assertEqual(repr(fp), "<_io.BufferedReader name=-1>")
+ def test_pickle(self):
+ sock = socket.socket()
+ with sock:
+ for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
+ self.assertRaises(TypeError, pickle.dumps, sock, protocol)
+
@unittest.skipUnless(thread, 'Threading required for this test.')
class BasicTCPTest(SocketConnectedTest):
@@ -1065,7 +1092,7 @@ class FileObjectClassTestCase(SocketConnectedTest):
"""
bufsize = -1 # Use default buffer size
- encoding = 'utf8'
+ encoding = 'utf-8'
errors = 'strict'
newline = None
@@ -1286,7 +1313,7 @@ class FileObjectInterruptedTestCase(unittest.TestCase):
data = b''
else:
data = ''
- expecting = expecting.decode('utf8')
+ expecting = expecting.decode('utf-8')
while len(data) != len(expecting):
part = fo.read(size)
if not part:
@@ -1448,7 +1475,7 @@ class UnicodeReadFileObjectClassTestCase(FileObjectClassTestCase):
"""Tests for socket.makefile() in text mode (rather than binary)"""
read_mode = 'r'
- read_msg = MSG.decode('utf8')
+ read_msg = MSG.decode('utf-8')
write_mode = 'wb'
write_msg = MSG
newline = ''
@@ -1460,7 +1487,7 @@ class UnicodeWriteFileObjectClassTestCase(FileObjectClassTestCase):
read_mode = 'rb'
read_msg = MSG
write_mode = 'w'
- write_msg = MSG.decode('utf8')
+ write_msg = MSG.decode('utf-8')
newline = ''
@@ -1468,9 +1495,9 @@ class UnicodeReadWriteFileObjectClassTestCase(FileObjectClassTestCase):
"""Tests for socket.makefile() in text mode (rather than binary)"""
read_mode = 'r'
- read_msg = MSG.decode('utf8')
+ read_msg = MSG.decode('utf-8')
write_mode = 'w'
- write_msg = MSG.decode('utf8')
+ write_msg = MSG.decode('utf-8')
newline = ''
diff --git a/Lib/test/test_strlit.py b/Lib/test/test_strlit.py
index 9eb30e9..fb9cdbb 100644
--- a/Lib/test/test_strlit.py
+++ b/Lib/test/test_strlit.py
@@ -130,7 +130,7 @@ class TestLiterals(unittest.TestCase):
self.assertRaises(SyntaxError, self.check_encoding, "utf-8", extra)
def test_file_utf8(self):
- self.check_encoding("utf8")
+ self.check_encoding("utf-8")
def test_file_iso_8859_1(self):
self.check_encoding("iso-8859-1")
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index 4ec754c..e8abfef 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -58,6 +58,8 @@ class BaseTestCase(unittest.TestCase):
# shutdown time. That frustrates tests trying to check stderr produced
# from a spawned Python process.
actual = support.strip_python_stderr(stderr)
+ # strip_python_stderr also strips whitespace, so we do too.
+ expected = expected.strip()
self.assertEqual(actual, expected, msg)
@@ -69,6 +71,15 @@ class ProcessTestCase(BaseTestCase):
"import sys; sys.exit(47)"])
self.assertEqual(rc, 47)
+ def test_call_timeout(self):
+ # call() function with timeout argument; we want to test that the child
+ # process gets killed when the timeout expires. If the child isn't
+ # killed, this call will deadlock since subprocess.call waits for the
+ # child.
+ self.assertRaises(subprocess.TimeoutExpired, subprocess.call,
+ [sys.executable, "-c", "while True: pass"],
+ timeout=0.1)
+
def test_check_call_zero(self):
# check_call() function with zero return code
rc = subprocess.check_call([sys.executable, "-c",
@@ -111,6 +122,20 @@ class ProcessTestCase(BaseTestCase):
self.fail("Expected ValueError when stdout arg supplied.")
self.assertIn('stdout', c.exception.args[0])
+ def test_check_output_timeout(self):
+ # check_output() function with timeout arg
+ with self.assertRaises(subprocess.TimeoutExpired) as c:
+ output = subprocess.check_output(
+ [sys.executable, "-c",
+ "import sys; sys.stdout.write('BDFL')\n"
+ "sys.stdout.flush()\n"
+ "while True: pass"],
+ # Some heavily loaded buildbots (sparc Debian 3.x) require
+ # this much time to start and print.
+ timeout=3)
+ self.fail("Expected TimeoutExpired.")
+ self.assertEqual(c.exception.output, b'BDFL')
+
def test_call_kwargs(self):
# call() function with keyword args
newenv = os.environ.copy()
@@ -300,6 +325,31 @@ class ProcessTestCase(BaseTestCase):
rc = subprocess.call([sys.executable, "-c", cmd], stdout=1)
self.assertEqual(rc, 2)
+ def test_stdout_devnull(self):
+ p = subprocess.Popen([sys.executable, "-c",
+ 'for i in range(10240):'
+ 'print("x" * 1024)'],
+ stdout=subprocess.DEVNULL)
+ p.wait()
+ self.assertEqual(p.stdout, None)
+
+ def test_stderr_devnull(self):
+ p = subprocess.Popen([sys.executable, "-c",
+ 'import sys\n'
+ 'for i in range(10240):'
+ 'sys.stderr.write("x" * 1024)'],
+ stderr=subprocess.DEVNULL)
+ p.wait()
+ self.assertEqual(p.stderr, None)
+
+ def test_stdin_devnull(self):
+ p = subprocess.Popen([sys.executable, "-c",
+ 'import sys;'
+ 'sys.stdin.read(1)'],
+ stdin=subprocess.DEVNULL)
+ p.wait()
+ self.assertEqual(p.stdin, None)
+
def test_cwd(self):
tmpdir = tempfile.gettempdir()
# We cannot use os.path.realpath to canonicalize the path,
@@ -368,6 +418,41 @@ class ProcessTestCase(BaseTestCase):
self.assertEqual(stdout, b"banana")
self.assertStderrEqual(stderr, b"pineapple")
+ def test_communicate_timeout(self):
+ p = subprocess.Popen([sys.executable, "-c",
+ 'import sys,os,time;'
+ 'sys.stderr.write("pineapple\\n");'
+ 'time.sleep(1);'
+ 'sys.stderr.write("pear\\n");'
+ 'sys.stdout.write(sys.stdin.read())'],
+ universal_newlines=True,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ self.assertRaises(subprocess.TimeoutExpired, p.communicate, "banana",
+ timeout=0.3)
+ # Make sure we can keep waiting for it, and that we get the whole output
+ # after it completes.
+ (stdout, stderr) = p.communicate()
+ self.assertEqual(stdout, "banana")
+ self.assertStderrEqual(stderr.encode(), b"pineapple\npear\n")
+
+ def test_communicate_timeout_large_ouput(self):
+ # Test a expring timeout while the child is outputting lots of data.
+ p = subprocess.Popen([sys.executable, "-c",
+ 'import sys,os,time;'
+ 'sys.stdout.write("a" * (64 * 1024));'
+ 'time.sleep(0.2);'
+ 'sys.stdout.write("a" * (64 * 1024));'
+ 'time.sleep(0.2);'
+ 'sys.stdout.write("a" * (64 * 1024));'
+ 'time.sleep(0.2);'
+ 'sys.stdout.write("a" * (64 * 1024));'],
+ stdout=subprocess.PIPE)
+ self.assertRaises(subprocess.TimeoutExpired, p.communicate, timeout=0.4)
+ (stdout, _) = p.communicate()
+ self.assertEqual(len(stdout), 4 * 64 * 1024)
+
# Test for the fd leak reported in http://bugs.python.org/issue2791.
def test_communicate_pipe_fd_leak(self):
for stdin_pipe in (False, True):
@@ -564,6 +649,15 @@ class ProcessTestCase(BaseTestCase):
# Subsequent invocations should just return the returncode
self.assertEqual(p.wait(), 0)
+ def test_wait_timeout(self):
+ p = subprocess.Popen([sys.executable,
+ "-c", "import time; time.sleep(0.1)"])
+ with self.assertRaises(subprocess.TimeoutExpired) as c:
+ p.wait(timeout=0.01)
+ self.assertIn("0.01", str(c.exception)) # For coverage of __str__.
+ # Some heavily loaded buildbots (sparc Debian 3.x) require this much
+ # time to start.
+ self.assertEqual(p.wait(timeout=3), 0)
def test_invalid_bufsize(self):
# an invalid type of the bufsize argument should raise
@@ -1083,6 +1177,11 @@ class POSIXProcessTestCase(BaseTestCase):
exitcode = subprocess.call([abs_program, "-c", "pass"])
self.assertEqual(exitcode, 0)
+ # absolute bytes path as a string
+ cmd = b"'" + abs_program + b"' -c pass"
+ exitcode = subprocess.call(cmd, shell=True)
+ self.assertEqual(exitcode, 0)
+
# bytes program, unicode PATH
env = os.environ.copy()
env["PATH"] = path
@@ -1233,7 +1332,7 @@ class POSIXProcessTestCase(BaseTestCase):
stdout, stderr = p.communicate()
self.assertEqual(0, p.returncode, "sigchild_ignore.py exited"
" non-zero with this error:\n%s" %
- stderr.decode('utf8'))
+ stderr.decode('utf-8'))
def test_select_unbuffered(self):
# Issue #11459: bufsize=0 should really set the pipes as
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index db27cdf..e18019c 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -303,6 +303,7 @@ class SysModuleTest(unittest.TestCase):
self.assertEqual(sys.getdlopenflags(), oldflags+1)
sys.setdlopenflags(oldflags)
+ @test.support.refcount_test
def test_refcount(self):
# n here must be a global in order for this test to pass while
# tracing with a python function. Tracing calls PyFrame_FastToLocals
@@ -500,7 +501,7 @@ class SysModuleTest(unittest.TestCase):
def test_sys_flags(self):
self.assertTrue(sys.flags)
- attrs = ("debug", "division_warning",
+ attrs = ("debug",
"inspect", "interactive", "optimize", "dont_write_bytecode",
"no_user_site", "no_site", "ignore_environment", "verbose",
"bytes_warning", "quiet")
diff --git a/Lib/test/test_sys_settrace.py b/Lib/test/test_sys_settrace.py
index 43134e9..acb8e29 100644
--- a/Lib/test/test_sys_settrace.py
+++ b/Lib/test/test_sys_settrace.py
@@ -251,6 +251,7 @@ class TraceTestCase(unittest.TestCase):
def setUp(self):
self.using_gc = gc.isenabled()
gc.disable()
+ self.addCleanup(sys.settrace, sys.gettrace())
def tearDown(self):
if self.using_gc:
@@ -389,6 +390,9 @@ class TraceTestCase(unittest.TestCase):
class RaisingTraceFuncTestCase(unittest.TestCase):
+ def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
+
def trace(self, frame, event, arg):
"""A trace function that raises an exception in response to a
specific trace event."""
@@ -688,6 +692,10 @@ def no_jump_without_trace_function():
class JumpTestCase(unittest.TestCase):
+ def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
+ sys.settrace(None)
+
def compare_jump_output(self, expected, received):
if received != expected:
self.fail( "Outputs don't match:\n" +
@@ -739,6 +747,8 @@ class JumpTestCase(unittest.TestCase):
def test_18_no_jump_to_non_integers(self):
self.run_test(no_jump_to_non_integers)
def test_19_no_jump_without_trace_function(self):
+ # Must set sys.settrace(None) in setUp(), else condition is not
+ # triggered.
no_jump_without_trace_function()
def test_20_large_function(self):
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index 68e094d..a645bf2 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -1289,7 +1289,7 @@ class UstarUnicodeTest(unittest.TestCase):
self._test_unicode_filename("utf7")
def test_utf8_filename(self):
- self._test_unicode_filename("utf8")
+ self._test_unicode_filename("utf-8")
def _test_unicode_filename(self, encoding):
tar = tarfile.open(tmpname, "w", format=self.format, encoding=encoding, errors="strict")
@@ -1368,7 +1368,7 @@ class GNUUnicodeTest(UstarUnicodeTest):
def test_bad_pax_header(self):
# Test for issue #8633. GNU tar <= 1.23 creates raw binary fields
# without a hdrcharset=BINARY header.
- for encoding, name in (("utf8", "pax/bad-pax-\udce4\udcf6\udcfc"),
+ for encoding, name in (("utf-8", "pax/bad-pax-\udce4\udcf6\udcfc"),
("iso8859-1", "pax/bad-pax-\xe4\xf6\xfc"),):
with tarfile.open(tarname, encoding=encoding, errors="surrogateescape") as tar:
try:
@@ -1383,7 +1383,7 @@ class PAXUnicodeTest(UstarUnicodeTest):
def test_binary_header(self):
# Test a POSIX.1-2008 compatible header with a hdrcharset=BINARY field.
- for encoding, name in (("utf8", "pax/hdrcharset-\udce4\udcf6\udcfc"),
+ for encoding, name in (("utf-8", "pax/hdrcharset-\udce4\udcf6\udcfc"),
("iso8859-1", "pax/hdrcharset-\xe4\xf6\xfc"),):
with tarfile.open(tarname, encoding=encoding, errors="surrogateescape") as tar:
try:
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index 5f99b2e..c107652 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -427,6 +427,14 @@ class ThreadTests(BaseTestCase):
t.daemon = True
self.assertTrue('daemon' in repr(t))
+ def test_deamon_param(self):
+ t = threading.Thread()
+ self.assertFalse(t.daemon)
+ t = threading.Thread(daemon=False)
+ self.assertFalse(t.daemon)
+ t = threading.Thread(daemon=True)
+ self.assertTrue(t.daemon)
+
class ThreadJoinOnShutdown(BaseTestCase):
@@ -677,6 +685,10 @@ class ThreadingExceptionTests(BaseTestCase):
thread.start()
self.assertRaises(RuntimeError, setattr, thread, "daemon", True)
+ def test_releasing_unacquired_lock(self):
+ lock = threading.Lock()
+ self.assertRaises(RuntimeError, lock.release)
+
class LockTests(lock_tests.LockTests):
locktype = staticmethod(threading.Lock)
diff --git a/Lib/test/test_threadsignals.py b/Lib/test/test_threadsignals.py
index 46e405a..fa26583 100644
--- a/Lib/test/test_threadsignals.py
+++ b/Lib/test/test_threadsignals.py
@@ -30,9 +30,14 @@ def handle_signals(sig,frame):
# a function that will be spawned as a separate thread.
def send_signals():
+ print("send_signals: enter (thread %s)" % thread.get_ident(), file=sys.stderr)
+ print("send_signals: raise SIGUSR1", file=sys.stderr)
os.kill(process_pid, signal.SIGUSR1)
+ print("send_signals: raise SIGUSR2", file=sys.stderr)
os.kill(process_pid, signal.SIGUSR2)
+ print("send_signals: release signalled_all", file=sys.stderr)
signalled_all.release()
+ print("send_signals: exit (thread %s)" % thread.get_ident(), file=sys.stderr)
class ThreadSignals(unittest.TestCase):
@@ -41,9 +46,12 @@ class ThreadSignals(unittest.TestCase):
# We spawn a thread, have the thread send two signals, and
# wait for it to finish. Check that we got both signals
# and that they were run by the main thread.
+ print("test_signals: acquire lock (thread %s)" % thread.get_ident(), file=sys.stderr)
signalled_all.acquire()
self.spawnSignallingThread()
+ print("test_signals: wait lock (thread %s)" % thread.get_ident(), file=sys.stderr)
signalled_all.acquire()
+ print("test_signals: lock acquired", file=sys.stderr)
# the signals that we asked the kernel to send
# will come back, but we don't know when.
# (it might even be after the thread exits
diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py
index 461d1d8..d9bef38 100644
--- a/Lib/test/test_trace.py
+++ b/Lib/test/test_trace.py
@@ -102,6 +102,7 @@ class TracedClass(object):
class TestLineCounts(unittest.TestCase):
"""White-box testing of line-counting, via runfunc"""
def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
self.tracer = Trace(count=1, trace=0, countfuncs=0, countcallers=0)
self.my_py_filename = fix_ext_py(__file__)
@@ -192,6 +193,7 @@ class TestRunExecCounts(unittest.TestCase):
"""A simple sanity test of line-counting, via runctx (exec)"""
def setUp(self):
self.my_py_filename = fix_ext_py(__file__)
+ self.addCleanup(sys.settrace, sys.gettrace())
def test_exec_counts(self):
self.tracer = Trace(count=1, trace=0, countfuncs=0, countcallers=0)
@@ -218,6 +220,7 @@ class TestRunExecCounts(unittest.TestCase):
class TestFuncs(unittest.TestCase):
"""White-box testing of funcs tracing"""
def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
self.tracer = Trace(count=0, trace=0, countfuncs=1)
self.filemod = my_file_and_modname()
@@ -242,6 +245,8 @@ class TestFuncs(unittest.TestCase):
}
self.assertEqual(self.tracer.results().calledfuncs, expected)
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'pre-existing trace function throws off measurements')
def test_inst_method_calling(self):
obj = TracedClass(20)
self.tracer.runfunc(obj.inst_method_calling, 1)
@@ -257,9 +262,12 @@ class TestFuncs(unittest.TestCase):
class TestCallers(unittest.TestCase):
"""White-box testing of callers tracing"""
def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
self.tracer = Trace(count=0, trace=0, countcallers=1)
self.filemod = my_file_and_modname()
+ @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
+ 'pre-existing trace function throws off measurements')
def test_loop_caller_importing(self):
self.tracer.runfunc(traced_func_importing_caller, 1)
@@ -280,6 +288,9 @@ class TestCallers(unittest.TestCase):
# Created separately for issue #3821
class TestCoverage(unittest.TestCase):
+ def setUp(self):
+ self.addCleanup(sys.settrace, sys.gettrace())
+
def tearDown(self):
rmtree(TESTFN)
unlink(TESTFN)
diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py
index 65b26c5..ace3736 100644
--- a/Lib/test/test_unicode.py
+++ b/Lib/test/test_unicode.py
@@ -614,7 +614,7 @@ class UnicodeTest(string_tests.CommonTest,
self.assertEqual('{0!s}'.format(G('data')), 'string is data')
msg = 'object.__format__ with a non-empty format string is deprecated'
- with support.check_warnings((msg, PendingDeprecationWarning)):
+ with support.check_warnings((msg, DeprecationWarning)):
self.assertEqual('{0:^10}'.format(E('data')), ' E(data) ')
self.assertEqual('{0:^10s}'.format(E('data')), ' E(data) ')
self.assertEqual('{0:>15s}'.format(G('data')), ' string is data')
@@ -1182,11 +1182,14 @@ class UnicodeTest(string_tests.CommonTest,
self.assertEqual('hello'.encode('ascii'), b'hello')
self.assertEqual('hello'.encode('utf-7'), b'hello')
self.assertEqual('hello'.encode('utf-8'), b'hello')
- self.assertEqual('hello'.encode('utf8'), b'hello')
+ self.assertEqual('hello'.encode('utf-8'), b'hello')
self.assertEqual('hello'.encode('utf-16-le'), b'h\000e\000l\000l\000o\000')
self.assertEqual('hello'.encode('utf-16-be'), b'\000h\000e\000l\000l\000o')
self.assertEqual('hello'.encode('latin-1'), b'hello')
+ # Default encoding is utf-8
+ self.assertEqual('\u2603'.encode(), b'\xe2\x98\x83')
+
# Roundtrip safety for BMP (just the first 1024 chars)
for c in range(1024):
u = chr(c)
@@ -1427,7 +1430,9 @@ class UnicodeTest(string_tests.CommonTest,
# Test PyUnicode_FromFormat()
def test_from_format(self):
support.import_module('ctypes')
- from ctypes import pythonapi, py_object, c_int
+ from ctypes import (pythonapi, py_object,
+ c_int, c_long, c_longlong, c_ssize_t,
+ c_uint, c_ulong, c_ulonglong, c_size_t)
if sys.maxunicode == 65535:
name = "PyUnicodeUCS2_FromFormat"
else:
@@ -1452,13 +1457,40 @@ class UnicodeTest(string_tests.CommonTest,
'string, got a non-ASCII byte: 0xe9$',
PyUnicode_FromFormat, b'unicode\xe9=%s', 'ascii')
+ # test "%c"
self.assertEqual(PyUnicode_FromFormat(b'%c', c_int(0xabcd)), '\uabcd')
self.assertEqual(PyUnicode_FromFormat(b'%c', c_int(0x10ffff)), '\U0010ffff')
- # other tests
+ # test "%"
+ self.assertEqual(PyUnicode_FromFormat(b'%'), '%')
+ self.assertEqual(PyUnicode_FromFormat(b'%%'), '%')
+ self.assertEqual(PyUnicode_FromFormat(b'%%s'), '%s')
+ self.assertEqual(PyUnicode_FromFormat(b'[%%]'), '[%]')
+ self.assertEqual(PyUnicode_FromFormat(b'%%%s', b'abc'), '%abc')
+
+ # test integer formats (%i, %d, %u)
+ self.assertEqual(PyUnicode_FromFormat(b'%03i', c_int(10)), '010')
+ self.assertEqual(PyUnicode_FromFormat(b'%0.4i', c_int(10)), '0010')
+ self.assertEqual(PyUnicode_FromFormat(b'%i', c_int(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%li', c_long(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%lli', c_longlong(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%zi', c_ssize_t(-123)), '-123')
+
+ self.assertEqual(PyUnicode_FromFormat(b'%d', c_int(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%ld', c_long(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%lld', c_longlong(-123)), '-123')
+ self.assertEqual(PyUnicode_FromFormat(b'%zd', c_ssize_t(-123)), '-123')
+
+ self.assertEqual(PyUnicode_FromFormat(b'%u', c_uint(123)), '123')
+ self.assertEqual(PyUnicode_FromFormat(b'%lu', c_ulong(123)), '123')
+ self.assertEqual(PyUnicode_FromFormat(b'%llu', c_ulonglong(123)), '123')
+ self.assertEqual(PyUnicode_FromFormat(b'%zu', c_size_t(123)), '123')
+
+ # test %A
text = PyUnicode_FromFormat(b'%%A:%A', 'abc\xe9\uabcd\U0010ffff')
self.assertEqual(text, r"%A:'abc\xe9\uabcd\U0010ffff'")
+ # test %V
text = PyUnicode_FromFormat(b'repr=%V', 'abc', b'xyz')
self.assertEqual(text, 'repr=abc')
@@ -1472,6 +1504,13 @@ class UnicodeTest(string_tests.CommonTest,
text = PyUnicode_FromFormat(b'repr=%V', None, b'abc\xff')
self.assertEqual(text, 'repr=abc\ufffd')
+ # not supported: copy the raw format string. these tests are just here
+ # to check for crashs and should not be considered as specifications
+ self.assertEqual(PyUnicode_FromFormat(b'%1%s', b'abc'), '%s')
+ self.assertEqual(PyUnicode_FromFormat(b'%1abc'), '%1abc')
+ self.assertEqual(PyUnicode_FromFormat(b'%+i', c_int(10)), '%+i')
+ self.assertEqual(PyUnicode_FromFormat(b'%.%s', b'abc'), '%.%s')
+
# Test PyUnicode_AsWideChar()
def test_aswidechar(self):
from _testcapi import unicode_aswidechar
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index 275b2eb..526760d 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -285,7 +285,7 @@ class urlretrieve_FileTests(unittest.TestCase):
def constructLocalFileUrl(self, filePath):
filePath = os.path.abspath(filePath)
try:
- filePath.encode("utf8")
+ filePath.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("filePath is not encodable to utf8")
return "file://%s" % urllib.request.pathname2url(filePath)
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index e9fb2fc..b6e4e91 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -602,7 +602,7 @@ class OpenerDirectorTests(unittest.TestCase):
def sanepathname2url(path):
try:
- path.encode("utf8")
+ path.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("path is not encodable to utf8")
urlpath = urllib.request.pathname2url(path)
diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py
index 43fa656..7bc59ed 100644
--- a/Lib/test/test_uuid.py
+++ b/Lib/test/test_uuid.py
@@ -471,14 +471,14 @@ class TestUUID(TestCase):
if pid == 0:
os.close(fds[0])
value = uuid.uuid4()
- os.write(fds[1], value.hex.encode('latin1'))
+ os.write(fds[1], value.hex.encode('latin-1'))
os._exit(0)
else:
os.close(fds[1])
parent_value = uuid.uuid4().hex
os.waitpid(pid, 0)
- child_value = os.read(fds[0], 100).decode('latin1')
+ child_value = os.read(fds[0], 100).decode('latin-1')
self.assertNotEqual(parent_value, child_value)
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
index 22fafa9..40c2291 100644
--- a/Lib/test/test_xml_etree.py
+++ b/Lib/test/test_xml_etree.py
@@ -22,7 +22,7 @@ from xml.etree import ElementTree as ET
SIMPLE_XMLFILE = findfile("simple.xml", subdir="xmltestdata")
try:
- SIMPLE_XMLFILE.encode("utf8")
+ SIMPLE_XMLFILE.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("filename is not encodable to utf8")
SIMPLE_NS_XMLFILE = findfile("simple-ns.xml", subdir="xmltestdata")
@@ -1255,8 +1255,8 @@ def processinginstruction():
>>> ET.tostring(ET.PI('test', '<testing&>'))
b'<?test <testing&>?>'
- >>> ET.tostring(ET.PI('test', '<testing&>\xe3'), 'latin1')
- b"<?xml version='1.0' encoding='latin1'?>\\n<?test <testing&>\\xe3?>"
+ >>> ET.tostring(ET.PI('test', '<testing&>\xe3'), 'latin-1')
+ b"<?xml version='1.0' encoding='latin-1'?>\\n<?test <testing&>\\xe3?>"
"""
#
diff --git a/Lib/test/test_xmlrpc_net.py b/Lib/test/test_xmlrpc_net.py
index 5df79f0..b9853ed 100644
--- a/Lib/test/test_xmlrpc_net.py
+++ b/Lib/test/test_xmlrpc_net.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
-import collections
+import collections.abc
import errno
import socket
import sys
@@ -48,7 +48,7 @@ class CurrentTimeTest(unittest.TestCase):
# Perform a minimal sanity check on the result, just to be sure
# the request means what we think it means.
- self.assertIsInstance(builders, collections.Sequence)
+ self.assertIsInstance(builders, collections.abc.Sequence)
self.assertTrue([x for x in builders if "3.x" in x], builders)
diff --git a/Lib/test/test_zipimport_support.py b/Lib/test/test_zipimport_support.py
index 610dfa0..f0e2fbc 100644
--- a/Lib/test/test_zipimport_support.py
+++ b/Lib/test/test_zipimport_support.py
@@ -163,20 +163,19 @@ class ZipSupportTests(unittest.TestCase):
test_zipped_doctest.test_DocTestRunner.verbose_flag,
test_zipped_doctest.test_Example,
test_zipped_doctest.test_debug,
- test_zipped_doctest.test_pdb_set_trace,
- test_zipped_doctest.test_pdb_set_trace_nested,
test_zipped_doctest.test_testsource,
test_zipped_doctest.test_trailing_space_in_test,
test_zipped_doctest.test_DocTestSuite,
test_zipped_doctest.test_DocTestFinder,
]
- # These remaining tests are the ones which need access
+ # These tests are the ones which need access
# to the data files, so we don't run them
fail_due_to_missing_data_files = [
test_zipped_doctest.test_DocFileSuite,
test_zipped_doctest.test_testfile,
test_zipped_doctest.test_unittest_reportflags,
]
+
for obj in known_good_tests:
_run_object_doctest(obj, test_zipped_doctest)
finally:
diff --git a/Lib/threading.py b/Lib/threading.py
index fd93f74..cb09afa 100644
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -622,7 +622,7 @@ class Thread(_Verbose):
#XXX __exc_clear = _sys.exc_clear
def __init__(self, group=None, target=None, name=None,
- args=(), kwargs=None, verbose=None):
+ args=(), kwargs=None, verbose=None, *, daemon=None):
assert group is None, "group argument must be None for now"
_Verbose.__init__(self, verbose)
if kwargs is None:
@@ -631,7 +631,10 @@ class Thread(_Verbose):
self._name = str(name or _newname())
self._args = args
self._kwargs = kwargs
- self._daemonic = self._set_daemon()
+ if daemon is not None:
+ self._daemonic = daemon
+ else:
+ self._daemonic = current_thread().daemon
self._ident = None
self._started = Event()
self._stopped = False
@@ -648,10 +651,6 @@ class Thread(_Verbose):
self._block.__init__()
self._started._reset_internal_locks()
- def _set_daemon(self):
- # Overridden in _MainThread and _DummyThread
- return current_thread().daemon
-
def __repr__(self):
assert self._initialized, "Thread.__init__() was not called"
status = "initial"
@@ -948,15 +947,12 @@ class _Timer(Thread):
class _MainThread(Thread):
def __init__(self):
- Thread.__init__(self, name="MainThread")
+ Thread.__init__(self, name="MainThread", daemon=False)
self._started.set()
self._set_ident()
with _active_limbo_lock:
_active[self._ident] = self
- def _set_daemon(self):
- return False
-
def _exitfunc(self):
self._stop()
t = _pickSomeNonDaemonThread()
@@ -988,7 +984,7 @@ def _pickSomeNonDaemonThread():
class _DummyThread(Thread):
def __init__(self):
- Thread.__init__(self, name=_newname("Dummy-%d"))
+ Thread.__init__(self, name=_newname("Dummy-%d"), daemon=True)
# Thread._block consumes an OS-level locking primitive, which
# can never be used by a _DummyThread. Since a _DummyThread
@@ -1000,9 +996,6 @@ class _DummyThread(Thread):
with _active_limbo_lock:
_active[self._ident] = self
- def _set_daemon(self):
- return True
-
def join(self, timeout=None):
assert False, "cannot join a dummy thread"
diff --git a/Lib/timeit.py b/Lib/timeit.py
index 8e04645..f2510ea 100644
--- a/Lib/timeit.py
+++ b/Lib/timeit.py
@@ -79,10 +79,10 @@ else:
# being indented 8 spaces.
template = """
def inner(_it, _timer):
- %(setup)s
+ {setup}
_t0 = _timer()
for _i in _it:
- %(stmt)s
+ {stmt}
_t1 = _timer()
return _t1 - _t0
"""
@@ -126,9 +126,9 @@ class Timer:
stmt = reindent(stmt, 8)
if isinstance(setup, str):
setup = reindent(setup, 4)
- src = template % {'stmt': stmt, 'setup': setup}
+ src = template.format(stmt=stmt, setup=setup)
elif hasattr(setup, '__call__'):
- src = template % {'stmt': stmt, 'setup': '_setup()'}
+ src = template.format(stmt=stmt, setup='_setup()')
ns['_setup'] = setup
else:
raise ValueError("setup is neither a string nor callable")
diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py
index 01c5a7b..8086bf9 100644
--- a/Lib/unittest/case.py
+++ b/Lib/unittest/case.py
@@ -469,7 +469,7 @@ class TestCase(object):
warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
RuntimeWarning)
result.addSuccess(self)
-
+ return result
finally:
result.stopTest(self)
if orig_result is None:
@@ -966,48 +966,6 @@ class TestCase(object):
self.fail(self._formatMessage(msg, standardMsg))
- def assertSameElements(self, expected_seq, actual_seq, msg=None):
- """An unordered sequence specific comparison.
-
- Raises with an error message listing which elements of expected_seq
- are missing from actual_seq and vice versa if any.
-
- Duplicate elements are ignored when comparing *expected_seq* and
- *actual_seq*. It is the equivalent of ``assertEqual(set(expected),
- set(actual))`` but it works with sequences of unhashable objects as
- well.
- """
- warnings.warn('assertSameElements is deprecated',
- DeprecationWarning)
- try:
- expected = set(expected_seq)
- actual = set(actual_seq)
- missing = sorted(expected.difference(actual))
- unexpected = sorted(actual.difference(expected))
- except TypeError:
- # Fall back to slower list-compare if any of the objects are
- # not hashable.
- expected = list(expected_seq)
- actual = list(actual_seq)
- try:
- expected.sort()
- actual.sort()
- except TypeError:
- missing, unexpected = unorderable_list_difference(expected,
- actual)
- else:
- missing, unexpected = sorted_list_difference(expected, actual)
- errors = []
- if missing:
- errors.append('Expected, but missing:\n %s' %
- safe_repr(missing))
- if unexpected:
- errors.append('Unexpected, but present:\n %s' %
- safe_repr(unexpected))
- if errors:
- standardMsg = '\n'.join(errors)
- self.fail(self._formatMessage(msg, standardMsg))
-
def assertCountEqual(self, first, second, msg=None):
"""An unordered sequence comparison asserting that the same elements,
diff --git a/Lib/unittest/test/test_case.py b/Lib/unittest/test/test_case.py
index 1db433f..82931c0 100644
--- a/Lib/unittest/test/test_case.py
+++ b/Lib/unittest/test/test_case.py
@@ -386,27 +386,62 @@ class Test_TestCase(unittest.TestCase, TestEquality, TestHashing):
self.assertIsInstance(Foo().id(), str)
- # "If result is omitted or None, a temporary result object is created
- # and used, but is not made available to the caller. As TestCase owns the
+ # "If result is omitted or None, a temporary result object is created,
+ # used, and is made available to the caller. As TestCase owns the
# temporary result startTestRun and stopTestRun are called.
def test_run__uses_defaultTestResult(self):
events = []
+ defaultResult = LoggingResult(events)
class Foo(unittest.TestCase):
def test(self):
events.append('test')
def defaultTestResult(self):
- return LoggingResult(events)
+ return defaultResult
# Make run() find a result object on its own
- Foo('test').run()
+ result = Foo('test').run()
+ self.assertIs(result, defaultResult)
expected = ['startTestRun', 'startTest', 'test', 'addSuccess',
'stopTest', 'stopTestRun']
self.assertEqual(events, expected)
+
+ # "The result object is returned to run's caller"
+ def test_run__returns_given_result(self):
+
+ class Foo(unittest.TestCase):
+ def test(self):
+ pass
+
+ result = unittest.TestResult()
+
+ retval = Foo('test').run(result)
+ self.assertIs(retval, result)
+
+
+ # "The same effect [as method run] may be had by simply calling the
+ # TestCase instance."
+ def test_call__invoking_an_instance_delegates_to_run(self):
+ resultIn = unittest.TestResult()
+ resultOut = unittest.TestResult()
+
+ class Foo(unittest.TestCase):
+ def test(self):
+ pass
+
+ def run(self, result):
+ self.assertIs(result, resultIn)
+ return resultOut
+
+ retval = Foo('test')(resultIn)
+
+ self.assertIs(retval, resultOut)
+
+
def testShortDescriptionWithoutDocstring(self):
self.assertIsNone(self.shortDescription())
@@ -1104,7 +1139,6 @@ test case
(self.assert_, (True,)),
(self.failUnlessRaises, (TypeError, lambda _: 3.14 + 'spam')),
(self.failIf, (False,)),
- (self.assertSameElements, ([1, 1, 2, 3], [1, 2, 3])),
(self.assertDictContainsSubset, (dict(a=1, b=2), dict(a=1, b=2, c=3))),
(self.assertRaisesRegexp, (KeyError, 'foo', lambda: {}['foo'])),
(self.assertRegexpMatches, ('bar', 'bar')),
@@ -1113,18 +1147,20 @@ test case
with self.assertWarns(DeprecationWarning):
meth(*args)
- def testDeprecatedFailMethods(self):
- """Test that the deprecated fail* methods get removed in 3.3"""
+ # disable this test for now. When the version where the fail* methods will
+ # be removed is decided, re-enable it and update the version
+ def _testDeprecatedFailMethods(self):
+ """Test that the deprecated fail* methods get removed in 3.x"""
if sys.version_info[:2] < (3, 3):
return
deprecated_names = [
'failIfEqual', 'failUnlessEqual', 'failUnlessAlmostEqual',
'failIfAlmostEqual', 'failUnless', 'failUnlessRaises', 'failIf',
- 'assertSameElements', 'assertDictContainsSubset',
+ 'assertDictContainsSubset',
]
for deprecated_name in deprecated_names:
with self.assertRaises(AttributeError):
- getattr(self, deprecated_name) # remove these in 3.3
+ getattr(self, deprecated_name) # remove these in 3.x
def testDeepcopy(self):
# Issue: 5660
diff --git a/Lib/unittest/test/test_loader.py b/Lib/unittest/test/test_loader.py
index f7e31a5..d1b9ef5 100644
--- a/Lib/unittest/test/test_loader.py
+++ b/Lib/unittest/test/test_loader.py
@@ -239,7 +239,7 @@ class Test_TestLoader(unittest.TestCase):
try:
loader.loadTestsFromName('sdasfasfasdf')
except ImportError as e:
- self.assertEqual(str(e), "No module named sdasfasfasdf")
+ self.assertEqual(str(e), "No module named 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise ImportError")
@@ -619,7 +619,7 @@ class Test_TestLoader(unittest.TestCase):
try:
loader.loadTestsFromNames(['sdasfasfasdf'])
except ImportError as e:
- self.assertEqual(str(e), "No module named sdasfasfasdf")
+ self.assertEqual(str(e), "No module named 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ImportError")
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index 5325d62..c80b7d1 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -1866,7 +1866,7 @@ class URLopener:
if encoding == 'base64':
import base64
# XXX is this encoding/decoding ok?
- data = base64.decodebytes(data.encode('ascii')).decode('latin1')
+ data = base64.decodebytes(data.encode('ascii')).decode('latin-1')
else:
data = unquote(data)
msg.append('Content-Length: %d' % len(data))
diff --git a/Lib/urllib/response.py b/Lib/urllib/response.py
index 9859642..bce2287 100644
--- a/Lib/urllib/response.py
+++ b/Lib/urllib/response.py
@@ -33,12 +33,15 @@ class addbase(object):
id(self), self.fp)
def close(self):
+ if self.fp:
+ self.fp.close()
+ self.fp = None
self.read = None
self.readline = None
self.readlines = None
self.fileno = None
- if self.fp: self.fp.close()
- self.fp = None
+ self.__iter__ = None
+ self.__next__ = None
def __enter__(self):
if self.fp is None: