summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
Diffstat (limited to 'Lib')
-rw-r--r--Lib/_osx_support.py4
-rw-r--r--Lib/_pyio.py34
-rw-r--r--Lib/_strptime.py2
-rw-r--r--Lib/abc.py6
-rw-r--r--Lib/aifc.py12
-rw-r--r--Lib/argparse.py28
-rw-r--r--Lib/ast.py6
-rw-r--r--Lib/asynchat.py8
-rw-r--r--Lib/asyncore.py20
-rw-r--r--Lib/bz2.py44
-rwxr-xr-xLib/cProfile.py53
-rwxr-xr-xLib/cgi.py6
-rw-r--r--Lib/chunk.py6
-rw-r--r--Lib/collections/__init__.py11
-rw-r--r--Lib/collections/abc.py3
-rw-r--r--Lib/compileall.py8
-rw-r--r--Lib/concurrent/futures/_base.py3
-rw-r--r--Lib/concurrent/futures/process.py6
-rw-r--r--Lib/concurrent/futures/thread.py2
-rw-r--r--Lib/configparser.py2
-rw-r--r--Lib/contextlib.py14
-rw-r--r--Lib/ctypes/__init__.py6
-rw-r--r--Lib/ctypes/test/test_checkretval.py2
-rw-r--r--Lib/ctypes/test/test_win32.py2
-rw-r--r--Lib/ctypes/util.py5
-rw-r--r--Lib/dbm/__init__.py16
-rw-r--r--Lib/dbm/dumb.py10
-rw-r--r--Lib/decimal.py4
-rw-r--r--Lib/difflib.py35
-rw-r--r--Lib/distutils/__init__.py2
-rw-r--r--Lib/distutils/ccompiler.py5
-rw-r--r--Lib/distutils/command/bdist.py3
-rw-r--r--Lib/distutils/command/bdist_dumb.py8
-rw-r--r--Lib/distutils/command/build_ext.py26
-rw-r--r--Lib/distutils/command/build_scripts.py2
-rw-r--r--Lib/distutils/command/install.py15
-rw-r--r--Lib/distutils/command/upload.py2
-rw-r--r--Lib/distutils/config.py11
-rw-r--r--Lib/distutils/core.py2
-rw-r--r--Lib/distutils/cygwinccompiler.py2
-rw-r--r--Lib/distutils/dir_util.py4
-rw-r--r--Lib/distutils/emxccompiler.py315
-rw-r--r--Lib/distutils/errors.py4
-rw-r--r--Lib/distutils/file_util.py16
-rw-r--r--Lib/distutils/msvc9compiler.py2
-rw-r--r--Lib/distutils/spawn.py24
-rw-r--r--Lib/distutils/sysconfig.py30
-rw-r--r--Lib/distutils/tests/test_bdist_dumb.py2
-rw-r--r--Lib/distutils/tests/test_config.py4
-rw-r--r--Lib/distutils/tests/test_install.py2
-rw-r--r--Lib/distutils/tests/test_upload.py8
-rw-r--r--Lib/distutils/tests/test_util.py2
-rw-r--r--Lib/distutils/util.py10
-rw-r--r--Lib/doctest.py8
-rw-r--r--Lib/email/_header_value_parser.py3
-rw-r--r--Lib/email/feedparser.py27
-rw-r--r--Lib/email/iterators.py6
-rw-r--r--Lib/email/parser.py3
-rw-r--r--Lib/filecmp.py16
-rw-r--r--Lib/fileinput.py12
-rw-r--r--Lib/fractions.py14
-rw-r--r--Lib/ftplib.py163
-rw-r--r--Lib/functools.py29
-rw-r--r--Lib/genericpath.py34
-rw-r--r--Lib/getpass.py2
-rw-r--r--Lib/gettext.py14
-rw-r--r--Lib/glob.py5
-rw-r--r--Lib/gzip.py20
-rw-r--r--Lib/hashlib.py15
-rw-r--r--Lib/http/client.py44
-rw-r--r--Lib/http/cookiejar.py31
-rw-r--r--Lib/http/server.py28
-rw-r--r--Lib/idlelib/EditorWindow.py6
-rw-r--r--Lib/idlelib/FileList.py2
-rw-r--r--Lib/idlelib/GrepDialog.py4
-rw-r--r--Lib/idlelib/IOBinding.py6
-rw-r--r--Lib/idlelib/NEWS.txt4
-rw-r--r--Lib/idlelib/OutputWindow.py2
-rw-r--r--Lib/idlelib/PathBrowser.py2
-rw-r--r--Lib/idlelib/PyShell.py20
-rw-r--r--Lib/idlelib/TreeWidget.py4
-rw-r--r--Lib/idlelib/configHandler.py14
-rw-r--r--Lib/idlelib/help.txt558
-rw-r--r--Lib/idlelib/idlever.py2
-rw-r--r--Lib/idlelib/rpc.py10
-rw-r--r--Lib/idlelib/run.py4
-rw-r--r--Lib/idlelib/textView.py2
-rw-r--r--Lib/imaplib.py8
-rw-r--r--Lib/imghdr.py2
-rw-r--r--Lib/importlib/__init__.py2
-rw-r--r--Lib/importlib/_bootstrap.py248
-rw-r--r--Lib/importlib/abc.py177
-rw-r--r--Lib/inspect.py20
-rw-r--r--Lib/io.py2
-rw-r--r--Lib/json/__init__.py25
-rw-r--r--Lib/json/decoder.py28
-rw-r--r--Lib/json/encoder.py29
-rw-r--r--Lib/json/scanner.py4
-rw-r--r--Lib/json/tool.py3
-rwxr-xr-xLib/keyword.py5
-rw-r--r--Lib/lib2to3/btm_utils.py6
-rw-r--r--Lib/lib2to3/fixer_util.py23
-rw-r--r--Lib/lib2to3/fixes/fix_intern.py21
-rw-r--r--Lib/lib2to3/fixes/fix_reload.py28
-rw-r--r--Lib/lib2to3/main.py4
-rw-r--r--Lib/lib2to3/pgen2/conv.py4
-rw-r--r--Lib/lib2to3/pgen2/driver.py2
-rw-r--r--Lib/lib2to3/pgen2/grammar.py10
-rw-r--r--Lib/lib2to3/pytree.py9
-rw-r--r--Lib/lib2to3/refactor.py6
-rwxr-xr-xLib/lib2to3/tests/pytree_idempotency.py2
-rw-r--r--Lib/lib2to3/tests/test_fixers.py59
-rw-r--r--Lib/linecache.py8
-rw-r--r--Lib/logging/__init__.py35
-rw-r--r--Lib/logging/config.py70
-rw-r--r--Lib/logging/handlers.py63
-rw-r--r--Lib/lzma.py130
-rw-r--r--Lib/macpath.py2
-rw-r--r--Lib/mailbox.py83
-rw-r--r--Lib/mailcap.py2
-rw-r--r--Lib/mimetypes.py8
-rw-r--r--Lib/modulefinder.py2
-rw-r--r--Lib/multiprocessing/__init__.py13
-rw-r--r--Lib/multiprocessing/connection.py16
-rw-r--r--Lib/multiprocessing/dummy/__init__.py27
-rw-r--r--Lib/multiprocessing/dummy/connection.py27
-rw-r--r--Lib/multiprocessing/forking.py24
-rw-r--r--Lib/multiprocessing/managers.py2
-rw-r--r--Lib/multiprocessing/pool.py20
-rw-r--r--Lib/multiprocessing/queues.py4
-rw-r--r--Lib/netrc.py2
-rw-r--r--Lib/nntplib.py4
-rw-r--r--Lib/ntpath.py31
-rw-r--r--Lib/nturl2path.py4
-rw-r--r--Lib/os.py53
-rw-r--r--Lib/os2emxpath.py158
-rwxr-xr-xLib/pdb.py10
-rw-r--r--Lib/pkgutil.py25
-rw-r--r--Lib/plat-os2emx/IN.py82
-rw-r--r--Lib/plat-os2emx/SOCKET.py106
-rw-r--r--Lib/plat-os2emx/_emx_link.py79
-rw-r--r--Lib/plat-os2emx/grp.py182
-rw-r--r--Lib/plat-os2emx/pwd.py208
-rwxr-xr-xLib/plat-os2emx/regen7
-rwxr-xr-xLib/platform.py27
-rw-r--r--Lib/poplib.py91
-rw-r--r--Lib/posixpath.py34
-rw-r--r--Lib/pprint.py39
-rwxr-xr-xLib/profile.py63
-rw-r--r--Lib/pstats.py12
-rw-r--r--Lib/pty.py13
-rw-r--r--Lib/py_compile.py58
-rwxr-xr-xLib/pydoc.py26
-rwxr-xr-xLib/quopri.py2
-rw-r--r--Lib/random.py7
-rw-r--r--Lib/sched.py11
-rw-r--r--Lib/shelve.py9
-rw-r--r--Lib/shutil.py39
-rw-r--r--Lib/site.py36
-rwxr-xr-xLib/smtpd.py7
-rw-r--r--Lib/smtplib.py8
-rw-r--r--Lib/sndhdr.py17
-rw-r--r--Lib/socket.py2
-rw-r--r--Lib/socketserver.py8
-rw-r--r--Lib/sqlite3/test/dbapi.py18
-rw-r--r--Lib/ssl.py102
-rw-r--r--Lib/subprocess.py62
-rwxr-xr-xLib/symbol.py4
-rw-r--r--Lib/symtable.py3
-rw-r--r--Lib/sysconfig.py31
-rwxr-xr-xLib/tabnanny.py2
-rw-r--r--Lib/tarfile.py29
-rw-r--r--Lib/telnetlib.py6
-rw-r--r--Lib/tempfile.py9
-rw-r--r--Lib/test/__main__.py14
-rw-r--r--Lib/test/badsyntax_future10.py3
-rw-r--r--Lib/test/fork_wait.py2
-rw-r--r--Lib/test/json_tests/test_fail.py89
-rw-r--r--Lib/test/json_tests/test_indent.py4
-rw-r--r--Lib/test/keycert3.pem73
-rw-r--r--Lib/test/keycert4.pem73
-rw-r--r--Lib/test/make_ssl_certs.py112
-rw-r--r--Lib/test/mock_socket.py8
-rw-r--r--Lib/test/multibytecodec_support.py2
-rw-r--r--Lib/test/pycacert.pem78
-rw-r--r--Lib/test/pycakey.pem28
-rwxr-xr-xLib/test/regrtest.py646
-rw-r--r--Lib/test/sortperf.py6
-rw-r--r--Lib/test/ssl_servers.py10
-rw-r--r--Lib/test/support.py52
-rw-r--r--Lib/test/test_abc.py13
-rw-r--r--Lib/test/test_aifc.py18
-rw-r--r--Lib/test/test_argparse.py32
-rwxr-xr-xLib/test/test_array.py4
-rw-r--r--Lib/test/test_ast.py93
-rw-r--r--Lib/test/test_asyncore.py6
-rw-r--r--Lib/test/test_bisect.py2
-rw-r--r--Lib/test/test_builtin.py19
-rw-r--r--Lib/test/test_bytes.py25
-rw-r--r--Lib/test/test_bz2.py141
-rw-r--r--Lib/test/test_cmd_line.py19
-rw-r--r--Lib/test/test_codecs.py4
-rw-r--r--Lib/test/test_collections.py32
-rw-r--r--Lib/test/test_complex.py2
-rw-r--r--Lib/test/test_concurrent_futures.py22
-rw-r--r--Lib/test/test_contextlib.py22
-rw-r--r--Lib/test/test_cprofile.py2
-rw-r--r--Lib/test/test_crypt.py6
-rw-r--r--Lib/test/test_csv.py21
-rw-r--r--Lib/test/test_dbm.py2
-rw-r--r--Lib/test/test_devpoll.py2
-rw-r--r--Lib/test/test_doctest.py47
-rw-r--r--Lib/test/test_email/torture_test.py2
-rw-r--r--Lib/test/test_enumerate.py10
-rw-r--r--Lib/test/test_epoll.py19
-rw-r--r--Lib/test/test_exceptions.py25
-rw-r--r--Lib/test/test_fcntl.py15
-rw-r--r--Lib/test/test_file.py10
-rw-r--r--Lib/test/test_filecmp.py16
-rw-r--r--Lib/test/test_fileinput.py4
-rw-r--r--Lib/test/test_fileio.py20
-rw-r--r--Lib/test/test_format.py16
-rw-r--r--Lib/test/test_fractions.py32
-rw-r--r--Lib/test/test_ftplib.py25
-rw-r--r--Lib/test/test_functools.py191
-rw-r--r--Lib/test/test_future.py8
-rw-r--r--Lib/test/test_gc.py26
-rw-r--r--Lib/test/test_generators.py21
-rw-r--r--Lib/test/test_genericpath.py82
-rw-r--r--Lib/test/test_hashlib.py127
-rw-r--r--Lib/test/test_httplib.py30
-rw-r--r--Lib/test/test_httpservers.py21
-rw-r--r--Lib/test/test_imaplib.py2
-rw-r--r--Lib/test/test_imp.py14
-rw-r--r--Lib/test/test_importlib/import_/test___loader__.py44
-rw-r--r--Lib/test/test_importlib/source/test_abc_loader.py532
-rw-r--r--Lib/test/test_importlib/source/test_file_loader.py2
-rw-r--r--Lib/test/test_importlib/test_abc.py18
-rw-r--r--Lib/test/test_importlib/test_api.py35
-rw-r--r--Lib/test/test_importlib/test_util.py42
-rw-r--r--Lib/test/test_inspect.py8
-rw-r--r--Lib/test/test_int.py41
-rw-r--r--Lib/test/test_io.py62
-rw-r--r--Lib/test/test_ioctl.py2
-rw-r--r--Lib/test/test_iterlen.py62
-rw-r--r--Lib/test/test_itertools.py5
-rw-r--r--Lib/test/test_keywordonlyarg.py12
-rw-r--r--Lib/test/test_kqueue.py2
-rw-r--r--Lib/test/test_largefile.py4
-rw-r--r--Lib/test/test_logging.py195
-rw-r--r--Lib/test/test_mailbox.py2
-rw-r--r--Lib/test/test_marshal.py127
-rw-r--r--Lib/test/test_memoryio.py12
-rw-r--r--Lib/test/test_mimetypes.py2
-rw-r--r--Lib/test/test_mmap.py14
-rw-r--r--Lib/test/test_multiprocessing.py197
-rw-r--r--Lib/test/test_nntplib.py2
-rw-r--r--Lib/test/test_normalization.py2
-rw-r--r--Lib/test/test_openpty.py2
-rw-r--r--Lib/test/test_operator.py25
-rw-r--r--Lib/test/test_os.py128
-rw-r--r--Lib/test/test_ossaudiodev.py4
-rw-r--r--Lib/test/test_pep277.py4
-rw-r--r--Lib/test/test_poll.py21
-rw-r--r--Lib/test/test_poplib.py157
-rw-r--r--Lib/test/test_posix.py2
-rw-r--r--Lib/test/test_posixpath.py62
-rw-r--r--Lib/test/test_pprint.py38
-rw-r--r--Lib/test/test_profile.py29
-rw-r--r--Lib/test/test_pty.py2
-rw-r--r--Lib/test/test_pydoc.py25
-rw-r--r--Lib/test/test_random.py33
-rw-r--r--Lib/test/test_range.py2
-rw-r--r--Lib/test/test_regrtest.py100
-rw-r--r--Lib/test/test_resource.py2
-rw-r--r--Lib/test/test_select.py4
-rw-r--r--Lib/test/test_set.py2
-rw-r--r--Lib/test/test_shelve.py13
-rw-r--r--Lib/test/test_shutil.py31
-rw-r--r--Lib/test/test_signal.py9
-rw-r--r--Lib/test/test_site.py6
-rw-r--r--Lib/test/test_slice.py114
-rw-r--r--Lib/test/test_smtplib.py14
-rw-r--r--Lib/test/test_sndhdr.py2
-rw-r--r--Lib/test/test_socket.py224
-rw-r--r--Lib/test/test_socketserver.py18
-rw-r--r--Lib/test/test_ssl.py186
-rw-r--r--Lib/test/test_struct.py6
-rw-r--r--Lib/test/test_subprocess.py16
-rw-r--r--Lib/test/test_sundry.py34
-rw-r--r--Lib/test/test_syntax.py4
-rw-r--r--Lib/test/test_sys.py34
-rw-r--r--Lib/test/test_sysconfig.py44
-rw-r--r--Lib/test/test_tarfile.py12
-rw-r--r--Lib/test/test_tempfile.py10
-rw-r--r--Lib/test/test_thread.py2
-rw-r--r--Lib/test/test_threading.py5
-rw-r--r--Lib/test/test_threadsignals.py2
-rw-r--r--Lib/test/test_timeout.py2
-rw-r--r--Lib/test/test_types.py37
-rw-r--r--Lib/test/test_ucn.py2
-rw-r--r--Lib/test/test_unicode.py38
-rw-r--r--Lib/test/test_unicodedata.py2
-rw-r--r--Lib/test/test_urllib.py102
-rw-r--r--Lib/test/test_urllib2.py133
-rw-r--r--Lib/test/test_urllib2_localnet.py30
-rw-r--r--Lib/test/test_urllib2net.py24
-rw-r--r--Lib/test/test_urllibnet.py19
-rwxr-xr-xLib/test/test_urlparse.py8
-rw-r--r--Lib/test/test_venv.py59
-rw-r--r--Lib/test/test_wait3.py12
-rw-r--r--Lib/test/test_weakref.py140
-rw-r--r--Lib/test/test_winreg.py28
-rw-r--r--Lib/test/test_winsound.py2
-rw-r--r--Lib/test/test_xml_etree.py17
-rw-r--r--Lib/test/test_xmlrpc.py28
-rw-r--r--Lib/test/test_xmlrpc_net.py4
-rw-r--r--Lib/test/test_zipfile.py8
-rw-r--r--Lib/test/test_zipimport.py4
-rw-r--r--Lib/test/tf_inherit_check.py2
-rw-r--r--Lib/threading.py25
-rw-r--r--Lib/timeit.py46
-rw-r--r--Lib/tkinter/__init__.py39
-rw-r--r--Lib/tkinter/filedialog.py4
-rwxr-xr-xLib/token.py6
-rw-r--r--Lib/tokenize.py2
-rw-r--r--Lib/trace.py10
-rw-r--r--Lib/traceback.py3
-rw-r--r--Lib/turtle.py22
-rw-r--r--Lib/unittest/case.py252
-rw-r--r--Lib/unittest/loader.py18
-rw-r--r--Lib/unittest/main.py5
-rw-r--r--Lib/unittest/mock.py229
-rw-r--r--Lib/unittest/result.py16
-rw-r--r--Lib/unittest/test/support.py38
-rw-r--r--Lib/unittest/test/test_case.py94
-rw-r--r--Lib/unittest/test/test_discovery.py34
-rw-r--r--Lib/unittest/test/test_program.py35
-rw-r--r--Lib/unittest/test/test_result.py58
-rw-r--r--Lib/unittest/test/test_runner.py12
-rw-r--r--Lib/unittest/test/test_skipping.py74
-rw-r--r--Lib/unittest/test/testmock/testhelpers.py59
-rw-r--r--Lib/unittest/test/testmock/testmock.py125
-rw-r--r--Lib/unittest/test/testmock/testwith.py83
-rw-r--r--Lib/urllib/error.py14
-rw-r--r--Lib/urllib/request.py151
-rw-r--r--Lib/uuid.py4
-rw-r--r--Lib/venv/__init__.py23
-rw-r--r--Lib/venv/scripts/posix/activate.csh37
-rw-r--r--Lib/venv/scripts/posix/activate.fish74
-rw-r--r--Lib/warnings.py2
-rw-r--r--Lib/weakref.py59
-rw-r--r--Lib/webbrowser.py88
-rw-r--r--Lib/xml/dom/expatbuilder.py10
-rw-r--r--Lib/xml/etree/ElementInclude.py6
-rw-r--r--Lib/xml/etree/ElementPath.py15
-rw-r--r--Lib/xml/etree/ElementTree.py1103
-rw-r--r--Lib/xmlrpc/client.py9
-rw-r--r--Lib/zipfile.py12
359 files changed, 7555 insertions, 6306 deletions
diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py
index b3aad56..8412e45 100644
--- a/Lib/_osx_support.py
+++ b/Lib/_osx_support.py
@@ -38,7 +38,7 @@ def _find_executable(executable, path=None):
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
- if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+ if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
@@ -94,7 +94,7 @@ def _get_system_version():
_SYSTEM_VERSION = ''
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
- except IOError:
+ except OSError:
# We're on a plain darwin box, fall back to the default
# behaviour.
pass
diff --git a/Lib/_pyio.py b/Lib/_pyio.py
index aab60db..c44f47c 100644
--- a/Lib/_pyio.py
+++ b/Lib/_pyio.py
@@ -34,7 +34,7 @@ BlockingIOError = BlockingIOError
def open(file, mode="r", buffering=-1, encoding=None, errors=None,
newline=None, closefd=True, opener=None):
- r"""Open file and return a stream. Raise IOError upon failure.
+ r"""Open file and return a stream. Raise OSError upon failure.
file is either a text or byte string giving the name (and the path
if the file isn't in the current working directory) of the file to
@@ -200,7 +200,7 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
buffering = DEFAULT_BUFFER_SIZE
try:
bs = os.fstat(raw.fileno()).st_blksize
- except (os.error, AttributeError):
+ except (OSError, AttributeError):
pass
else:
if bs > 1:
@@ -254,7 +254,7 @@ class OpenWrapper:
try:
UnsupportedOperation = io.UnsupportedOperation
except AttributeError:
- class UnsupportedOperation(ValueError, IOError):
+ class UnsupportedOperation(ValueError, OSError):
pass
@@ -278,7 +278,7 @@ class IOBase(metaclass=abc.ABCMeta):
readinto) needed. Text I/O classes work with str data.
Note that calling any method (even inquiries) on a closed stream is
- undefined. Implementations may raise IOError in this case.
+ undefined. Implementations may raise OSError in this case.
IOBase (and its subclasses) support the iterator protocol, meaning
that an IOBase object can be iterated over yielding the lines in a
@@ -294,7 +294,7 @@ class IOBase(metaclass=abc.ABCMeta):
### Internal ###
def _unsupported(self, name):
- """Internal: raise an IOError exception for unsupported operations."""
+ """Internal: raise an OSError exception for unsupported operations."""
raise UnsupportedOperation("%s.%s() not supported" %
(self.__class__.__name__, name))
@@ -441,7 +441,7 @@ class IOBase(metaclass=abc.ABCMeta):
def fileno(self):
"""Returns underlying file descriptor (an int) if one exists.
- An IOError is raised if the IO object does not use a file descriptor.
+ An OSError is raised if the IO object does not use a file descriptor.
"""
self._unsupported("fileno")
@@ -699,13 +699,13 @@ class _BufferedIOMixin(BufferedIOBase):
def seek(self, pos, whence=0):
new_position = self.raw.seek(pos, whence)
if new_position < 0:
- raise IOError("seek() returned an invalid position")
+ raise OSError("seek() returned an invalid position")
return new_position
def tell(self):
pos = self.raw.tell()
if pos < 0:
- raise IOError("tell() returned an invalid position")
+ raise OSError("tell() returned an invalid position")
return pos
def truncate(self, pos=None):
@@ -927,7 +927,7 @@ class BufferedReader(_BufferedIOMixin):
"""Create a new buffered reader using the given readable raw IO object.
"""
if not raw.readable():
- raise IOError('"raw" argument must be readable.')
+ raise OSError('"raw" argument must be readable.')
_BufferedIOMixin.__init__(self, raw)
if buffer_size <= 0:
@@ -1074,7 +1074,7 @@ class BufferedWriter(_BufferedIOMixin):
def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
if not raw.writable():
- raise IOError('"raw" argument must be writable.')
+ raise OSError('"raw" argument must be writable.')
_BufferedIOMixin.__init__(self, raw)
if buffer_size <= 0:
@@ -1138,7 +1138,7 @@ class BufferedWriter(_BufferedIOMixin):
errno.EAGAIN,
"write could not complete without blocking", 0)
if n > len(self._write_buf) or n < 0:
- raise IOError("write() returned incorrect number of bytes")
+ raise OSError("write() returned incorrect number of bytes")
del self._write_buf[:n]
def tell(self):
@@ -1174,10 +1174,10 @@ class BufferedRWPair(BufferedIOBase):
The arguments are two RawIO instances.
"""
if not reader.readable():
- raise IOError('"reader" argument must be readable.')
+ raise OSError('"reader" argument must be readable.')
if not writer.writable():
- raise IOError('"writer" argument must be writable.')
+ raise OSError('"writer" argument must be writable.')
self.reader = BufferedReader(reader, buffer_size)
self.writer = BufferedWriter(writer, buffer_size)
@@ -1248,7 +1248,7 @@ class BufferedRandom(BufferedWriter, BufferedReader):
with self._read_lock:
self._reset_read_buf()
if pos < 0:
- raise IOError("seek() returned invalid position")
+ raise OSError("seek() returned invalid position")
return pos
def tell(self):
@@ -1727,7 +1727,7 @@ class TextIOWrapper(TextIOBase):
if not self._seekable:
raise UnsupportedOperation("underlying stream is not seekable")
if not self._telling:
- raise IOError("telling position disabled by next() call")
+ raise OSError("telling position disabled by next() call")
self.flush()
position = self.buffer.tell()
decoder = self._decoder
@@ -1814,7 +1814,7 @@ class TextIOWrapper(TextIOBase):
chars_decoded += len(decoder.decode(b'', final=True))
need_eof = 1
if chars_decoded < chars_to_skip:
- raise IOError("can't reconstruct logical file position")
+ raise OSError("can't reconstruct logical file position")
# The returned cookie corresponds to the last safe start point.
return self._pack_cookie(
@@ -1891,7 +1891,7 @@ class TextIOWrapper(TextIOBase):
# Skip chars_to_skip of the decoded characters.
if len(self._decoded_chars) < chars_to_skip:
- raise IOError("can't restore logical file position")
+ raise OSError("can't restore logical file position")
self._decoded_chars_used = chars_to_skip
# Finally, reset the encoder (merely useful for proper BOM handling)
diff --git a/Lib/_strptime.py b/Lib/_strptime.py
index b0cd3d6..3ecfc2a 100644
--- a/Lib/_strptime.py
+++ b/Lib/_strptime.py
@@ -21,7 +21,7 @@ from datetime import (date as datetime_date,
timezone as datetime_timezone)
try:
from _thread import allocate_lock as _thread_allocate_lock
-except:
+except ImportError:
from _dummy_thread import allocate_lock as _thread_allocate_lock
__all__ = []
diff --git a/Lib/abc.py b/Lib/abc.py
index 09778e8..e807895 100644
--- a/Lib/abc.py
+++ b/Lib/abc.py
@@ -226,3 +226,9 @@ class ABCMeta(type):
# No dice; update negative cache
cls._abc_negative_cache.add(subclass)
return False
+
+class ABC(metaclass=ABCMeta):
+ """Helper class that provides a standard way to create an ABC using
+ inheritance.
+ """
+ pass
diff --git a/Lib/aifc.py b/Lib/aifc.py
index a19b38f..67ea5da 100644
--- a/Lib/aifc.py
+++ b/Lib/aifc.py
@@ -334,6 +334,12 @@ class Aifc_read:
# else, assume it is an open file object already
self.initfp(f)
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
#
# User visible methods.
#
@@ -553,6 +559,12 @@ class Aifc_write:
def __del__(self):
self.close()
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
#
# User visible methods.
#
diff --git a/Lib/argparse.py b/Lib/argparse.py
index f25b1b6..5ff755c 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -606,8 +606,7 @@ class HelpFormatter(object):
pass
else:
self._indent()
- for subaction in get_subactions():
- yield subaction
+ yield from get_subactions()
self._dedent()
def _split_lines(self, text, width):
@@ -1141,11 +1140,17 @@ class FileType(object):
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
+ - encoding -- The file's encoding. Accepts the same values as the
+ builtin open() function.
+ - errors -- A string indicating how encoding and decoding errors are to
+ be handled. Accepts the same value as the builtin open() function.
"""
- def __init__(self, mode='r', bufsize=-1):
+ def __init__(self, mode='r', bufsize=-1, encoding=None, errors=None):
self._mode = mode
self._bufsize = bufsize
+ self._encoding = encoding
+ self._errors = errors
def __call__(self, string):
# the special argument "-" means sys.std{in,out}
@@ -1160,14 +1165,18 @@ class FileType(object):
# all other arguments are used as file names
try:
- return open(string, self._mode, self._bufsize)
- except IOError as e:
+ return open(string, self._mode, self._bufsize, self._encoding,
+ self._errors)
+ except OSError as e:
message = _("can't open '%s': %s")
raise ArgumentTypeError(message % (string, e))
def __repr__(self):
args = self._mode, self._bufsize
- args_str = ', '.join(repr(arg) for arg in args if arg != -1)
+ kwargs = [('encoding', self._encoding), ('errors', self._errors)]
+ args_str = ', '.join([repr(arg) for arg in args if arg != -1] +
+ ['%s=%r' % (kw, arg) for kw, arg in kwargs
+ if arg is not None])
return '%s(%s)' % (type(self).__name__, args_str)
# ===========================
@@ -2001,17 +2010,14 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
# replace arguments referencing files with the file content
else:
try:
- args_file = open(arg_string[1:])
- try:
+ with open(arg_string[1:]) as args_file:
arg_strings = []
for arg_line in args_file.read().splitlines():
for arg in self.convert_arg_line_to_args(arg_line):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
- finally:
- args_file.close()
- except IOError:
+ except OSError:
err = _sys.exc_info()[1]
self.error(str(err))
diff --git a/Lib/ast.py b/Lib/ast.py
index 13f59f9..02c3b28 100644
--- a/Lib/ast.py
+++ b/Lib/ast.py
@@ -42,7 +42,6 @@ def literal_eval(node_or_string):
Python literal structures: strings, bytes, numbers, tuples, lists, dicts,
sets, booleans, and None.
"""
- _safe_names = {'None': None, 'True': True, 'False': False}
if isinstance(node_or_string, str):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
@@ -61,9 +60,8 @@ def literal_eval(node_or_string):
elif isinstance(node, Dict):
return dict((_convert(k), _convert(v)) for k, v
in zip(node.keys, node.values))
- elif isinstance(node, Name):
- if node.id in _safe_names:
- return _safe_names[node.id]
+ elif isinstance(node, NameConstant):
+ return node.value
elif isinstance(node, UnaryOp) and \
isinstance(node.op, (UAdd, USub)) and \
isinstance(node.operand, (Num, UnaryOp, BinOp)):
diff --git a/Lib/asynchat.py b/Lib/asynchat.py
index 4e26bb5..f055d63 100644
--- a/Lib/asynchat.py
+++ b/Lib/asynchat.py
@@ -56,8 +56,8 @@ class async_chat (asyncore.dispatcher):
# these are overridable defaults
- ac_in_buffer_size = 4096
- ac_out_buffer_size = 4096
+ ac_in_buffer_size = 65536
+ ac_out_buffer_size = 65536
# we don't want to enable the use of encoding by default, because that is a
# sign of an application bug that we don't want to pass silently
@@ -114,7 +114,7 @@ class async_chat (asyncore.dispatcher):
try:
data = self.recv (self.ac_in_buffer_size)
- except socket.error as why:
+ except OSError as why:
self.handle_error()
return
@@ -243,7 +243,7 @@ class async_chat (asyncore.dispatcher):
# send the data
try:
num_sent = self.send(data)
- except socket.error:
+ except OSError:
self.handle_error()
return
diff --git a/Lib/asyncore.py b/Lib/asyncore.py
index 909d9f6..f146643 100644
--- a/Lib/asyncore.py
+++ b/Lib/asyncore.py
@@ -112,7 +112,7 @@ def readwrite(obj, flags):
obj.handle_expt_event()
if flags & (select.POLLHUP | select.POLLERR | select.POLLNVAL):
obj.handle_close()
- except socket.error as e:
+ except OSError as e:
if e.args[0] not in _DISCONNECTED:
obj.handle_error()
else:
@@ -240,7 +240,7 @@ class dispatcher:
# passed be connected.
try:
self.addr = sock.getpeername()
- except socket.error as err:
+ except OSError as err:
if err.args[0] in (ENOTCONN, EINVAL):
# To handle the case where we got an unconnected
# socket.
@@ -304,7 +304,7 @@ class dispatcher:
self.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR) | 1
)
- except socket.error:
+ except OSError:
pass
# ==================================================
@@ -345,7 +345,7 @@ class dispatcher:
self.addr = address
self.handle_connect_event()
else:
- raise socket.error(err, errorcode[err])
+ raise OSError(err, errorcode[err])
def accept(self):
# XXX can return either an address pair or None
@@ -353,7 +353,7 @@ class dispatcher:
conn, addr = self.socket.accept()
except TypeError:
return None
- except socket.error as why:
+ except OSError as why:
if why.args[0] in (EWOULDBLOCK, ECONNABORTED, EAGAIN):
return None
else:
@@ -365,7 +365,7 @@ class dispatcher:
try:
result = self.socket.send(data)
return result
- except socket.error as why:
+ except OSError as why:
if why.args[0] == EWOULDBLOCK:
return 0
elif why.args[0] in _DISCONNECTED:
@@ -384,7 +384,7 @@ class dispatcher:
return b''
else:
return data
- except socket.error as why:
+ except OSError as why:
# winsock sometimes raises ENOTCONN
if why.args[0] in _DISCONNECTED:
self.handle_close()
@@ -399,7 +399,7 @@ class dispatcher:
self.del_channel()
try:
self.socket.close()
- except socket.error as why:
+ except OSError as why:
if why.args[0] not in (ENOTCONN, EBADF):
raise
@@ -443,7 +443,7 @@ class dispatcher:
def handle_connect_event(self):
err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if err != 0:
- raise socket.error(err, _strerror(err))
+ raise OSError(err, _strerror(err))
self.handle_connect()
self.connected = True
self.connecting = False
@@ -532,7 +532,7 @@ class dispatcher_with_send(dispatcher):
def initiate_send(self):
num_sent = 0
- num_sent = dispatcher.send(self, self.out_buffer[:512])
+ num_sent = dispatcher.send(self, self.out_buffer[:65536])
self.out_buffer = self.out_buffer[num_sent:]
def handle_write(self):
diff --git a/Lib/bz2.py b/Lib/bz2.py
index c307507..6e6a2b9 100644
--- a/Lib/bz2.py
+++ b/Lib/bz2.py
@@ -9,7 +9,6 @@ __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor",
__author__ = "Nadeem Vawda <nadeem.vawda@gmail.com>"
-import builtins
import io
import warnings
@@ -28,6 +27,8 @@ _MODE_WRITE = 3
_BUFFER_SIZE = 8192
+_builtin_open = open
+
class BZ2File(io.BufferedIOBase):
@@ -43,12 +44,13 @@ class BZ2File(io.BufferedIOBase):
def __init__(self, filename, mode="r", buffering=None, compresslevel=9):
"""Open a bzip2-compressed file.
- If filename is a str or bytes object, is gives the name of the file to
- be opened. Otherwise, it should be a file object, which will be used to
- read or write the compressed data.
+ If filename is a str or bytes object, it gives the name
+ of the file to be opened. Otherwise, it should be a file object,
+ which will be used to read or write the compressed data.
- mode can be 'r' for reading (default), 'w' for (over)writing, or 'a' for
- appending. These can equivalently be given as 'rb', 'wb', and 'ab'.
+ mode can be 'r' for reading (default), 'w' for (over)writing,
+ or 'a' for appending. These can equivalently be given as 'rb',
+ 'wb', and 'ab'.
buffering is ignored. Its use is deprecated.
@@ -90,10 +92,10 @@ class BZ2File(io.BufferedIOBase):
mode_code = _MODE_WRITE
self._compressor = BZ2Compressor(compresslevel)
else:
- raise ValueError("Invalid mode: {!r}".format(mode))
+ raise ValueError("Invalid mode: %r" % (mode,))
if isinstance(filename, (str, bytes)):
- self._fp = builtins.open(filename, mode)
+ self._fp = _builtin_open(filename, mode)
self._closefp = True
self._mode = mode_code
elif hasattr(filename, "read") or hasattr(filename, "write"):
@@ -189,15 +191,17 @@ class BZ2File(io.BufferedIOBase):
if not rawblock:
if self._decompressor.eof:
+ # End-of-stream marker and end of file. We're good.
self._mode = _MODE_READ_EOF
self._size = self._pos
return False
else:
+ # Problem - we were expecting more compressed data.
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
- # Continue to next stream.
if self._decompressor.eof:
+ # Continue to next stream.
self._decompressor = BZ2Decompressor()
self._buffer = self._decompressor.decompress(rawblock)
@@ -412,7 +416,7 @@ class BZ2File(io.BufferedIOBase):
self._read_all(return_data=False)
offset = self._size + offset
else:
- raise ValueError("Invalid value for whence: {}".format(whence))
+ raise ValueError("Invalid value for whence: %s" % (whence,))
# Make it so that offset is the number of bytes to skip forward.
if offset < self._pos:
@@ -436,20 +440,20 @@ def open(filename, mode="rb", compresslevel=9,
encoding=None, errors=None, newline=None):
"""Open a bzip2-compressed file in binary or text mode.
- The filename argument can be an actual filename (a str or bytes object), or
- an existing file object to read from or write to.
+ The filename argument can be an actual filename (a str or bytes
+ object), or an existing file object to read from or write to.
- The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for binary mode,
- or "rt", "wt" or "at" for text mode. The default mode is "rb", and the
- default compresslevel is 9.
+ The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for
+ binary mode, or "rt", "wt" or "at" for text mode. The default mode
+ is "rb", and the default compresslevel is 9.
- For binary mode, this function is equivalent to the BZ2File constructor:
- BZ2File(filename, mode, compresslevel). In this case, the encoding, errors
- and newline arguments must not be provided.
+ For binary mode, this function is equivalent to the BZ2File
+ constructor: BZ2File(filename, mode, compresslevel). In this case,
+ the encoding, errors and newline arguments must not be provided.
For text mode, a BZ2File object is created, and wrapped in an
- io.TextIOWrapper instance with the specified encoding, error handling
- behavior, and line ending(s).
+ io.TextIOWrapper instance with the specified encoding, error
+ handling behavior, and line ending(s).
"""
if "t" in mode:
diff --git a/Lib/cProfile.py b/Lib/cProfile.py
index c24d45b..1184385 100755
--- a/Lib/cProfile.py
+++ b/Lib/cProfile.py
@@ -7,54 +7,20 @@
__all__ = ["run", "runctx", "Profile"]
import _lsprof
+import profile as _pyprofile
# ____________________________________________________________
# Simple interface
def run(statement, filename=None, sort=-1):
- """Run statement under profiler optionally saving results in filename
-
- This function takes a single argument that can be passed to the
- "exec" statement, and an optional file name. In all cases this
- routine attempts to "exec" its first argument and gather profiling
- statistics from the execution. If no file name is present, then this
- function automatically prints a simple profiling report, sorted by the
- standard name string (file/line/function-name) that is presented in
- each line.
- """
- prof = Profile()
- result = None
- try:
- try:
- prof = prof.run(statement)
- except SystemExit:
- pass
- finally:
- if filename is not None:
- prof.dump_stats(filename)
- else:
- result = prof.print_stats(sort)
- return result
+ return _pyprofile._Utils(Profile).run(statement, filename, sort)
def runctx(statement, globals, locals, filename=None, sort=-1):
- """Run statement under profiler, supplying your own globals and locals,
- optionally saving results in filename.
+ return _pyprofile._Utils(Profile).runctx(statement, globals, locals,
+ filename, sort)
- statement and filename have the same semantics as profile.run
- """
- prof = Profile()
- result = None
- try:
- try:
- prof = prof.runctx(statement, globals, locals)
- except SystemExit:
- pass
- finally:
- if filename is not None:
- prof.dump_stats(filename)
- else:
- result = prof.print_stats(sort)
- return result
+run.__doc__ = _pyprofile.run.__doc__
+runctx.__doc__ = _pyprofile.runctx.__doc__
# ____________________________________________________________
@@ -77,10 +43,9 @@ class Profile(_lsprof.Profiler):
def dump_stats(self, file):
import marshal
- f = open(file, 'wb')
- self.create_stats()
- marshal.dump(self.stats, f)
- f.close()
+ with open(file, 'wb') as f:
+ self.create_stats()
+ marshal.dump(self.stats, f)
def create_stats(self):
self.disable()
diff --git a/Lib/cgi.py b/Lib/cgi.py
index 96b1f57..385a57c 100755
--- a/Lib/cgi.py
+++ b/Lib/cgi.py
@@ -80,7 +80,7 @@ def initlog(*allargs):
if logfile and not logfp:
try:
logfp = open(logfile, "a")
- except IOError:
+ except OSError:
pass
if not logfp:
log = nolog
@@ -949,8 +949,8 @@ def print_directory():
print("<H3>Current Working Directory:</H3>")
try:
pwd = os.getcwd()
- except os.error as msg:
- print("os.error:", html.escape(str(msg)))
+ except OSError as msg:
+ print("OSError:", html.escape(str(msg)))
else:
print(html.escape(pwd))
print()
diff --git a/Lib/chunk.py b/Lib/chunk.py
index 5863ed0..dc90a75 100644
--- a/Lib/chunk.py
+++ b/Lib/chunk.py
@@ -70,7 +70,7 @@ class Chunk:
self.size_read = 0
try:
self.offset = self.file.tell()
- except (AttributeError, IOError):
+ except (AttributeError, OSError):
self.seekable = False
else:
self.seekable = True
@@ -102,7 +102,7 @@ class Chunk:
if self.closed:
raise ValueError("I/O operation on closed file")
if not self.seekable:
- raise IOError("cannot seek")
+ raise OSError("cannot seek")
if whence == 1:
pos = pos + self.size_read
elif whence == 2:
@@ -158,7 +158,7 @@ class Chunk:
self.file.seek(n, 1)
self.size_read = self.size_read + n
return
- except IOError:
+ except OSError:
pass
while self.size_read < self.chunksize:
n = min(8192, self.chunksize - self.size_read)
diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py
index 707c53b..ca92c3c 100644
--- a/Lib/collections/__init__.py
+++ b/Lib/collections/__init__.py
@@ -816,9 +816,14 @@ class ChainMap(MutableMapping):
__copy__ = copy
- def new_child(self): # like Django's Context.push()
- 'New ChainMap with a new dict followed by all previous maps.'
- return self.__class__({}, *self.maps)
+ def new_child(self, m=None): # like Django's Context.push()
+ '''
+ New ChainMap with a new map followed by all previous maps. If no
+ map is provided, an empty dict is used.
+ '''
+ if m is None:
+ m = {}
+ return self.__class__(m, *self.maps)
@property
def parents(self): # like Django's Context.pop()
diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py
index c23b7dd..18c07bf 100644
--- a/Lib/collections/abc.py
+++ b/Lib/collections/abc.py
@@ -430,8 +430,7 @@ class KeysView(MappingView, Set):
return key in self._mapping
def __iter__(self):
- for key in self._mapping:
- yield key
+ yield from self._mapping
KeysView.register(dict_keys)
diff --git a/Lib/compileall.py b/Lib/compileall.py
index d3cff6a..a8e9a31 100644
--- a/Lib/compileall.py
+++ b/Lib/compileall.py
@@ -38,7 +38,7 @@ def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None,
print('Listing {!r}...'.format(dir))
try:
names = os.listdir(dir)
- except os.error:
+ except OSError:
print("Can't list {!r}".format(dir))
names = []
names.sort()
@@ -106,7 +106,7 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False,
actual = chandle.read(8)
if expect == actual:
return success
- except IOError:
+ except OSError:
pass
if not quiet:
print('Compiling {!r}...'.format(fullname))
@@ -124,7 +124,7 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False,
msg = msg.decode(sys.stdout.encoding)
print(msg)
success = 0
- except (SyntaxError, UnicodeError, IOError) as e:
+ except (SyntaxError, UnicodeError, OSError) as e:
if quiet:
print('*** Error compiling {!r}...'.format(fullname))
else:
@@ -209,7 +209,7 @@ def main():
with (sys.stdin if args.flist=='-' else open(args.flist)) as f:
for line in f:
compile_dests.append(line.strip())
- except EnvironmentError:
+ except OSError:
print("Error reading file list {}".format(args.flist))
return False
diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py
index ca3aebd..3d03280 100644
--- a/Lib/concurrent/futures/_base.py
+++ b/Lib/concurrent/futures/_base.py
@@ -198,8 +198,7 @@ def as_completed(fs, timeout=None):
waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
try:
- for future in finished:
- yield future
+ yield from finished
while pending:
if timeout is None:
diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py
index 04238a7..3c20b93 100644
--- a/Lib/concurrent/futures/process.py
+++ b/Lib/concurrent/futures/process.py
@@ -40,7 +40,7 @@ Local worker thread:
Process #1..n:
- reads _CallItems from "Call Q", executes the calls, and puts the resulting
- _ResultItems in "Request Q"
+ _ResultItems in "Result Q"
"""
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
@@ -240,6 +240,8 @@ def _queue_management_worker(executor_reference,
"terminated abruptly while the future was "
"running or pending."
))
+ # Delete references to object. See issue16284
+ del work_item
pending_work_items.clear()
# Terminate remaining workers forcibly: the queues or their
# locks may be in a dirty state and block forever.
@@ -264,6 +266,8 @@ def _queue_management_worker(executor_reference,
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
+ # Delete references to object. See issue16284
+ del work_item
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py
index 95bb682..f9beb0f 100644
--- a/Lib/concurrent/futures/thread.py
+++ b/Lib/concurrent/futures/thread.py
@@ -63,6 +63,8 @@ def _worker(executor_reference, work_queue):
work_item = work_queue.get(block=True)
if work_item is not None:
work_item.run()
+ # Delete references to object. See issue16284
+ del work_item
continue
executor = executor_reference()
# Exit if:
diff --git a/Lib/configparser.py b/Lib/configparser.py
index c7bee6b..f9584cd 100644
--- a/Lib/configparser.py
+++ b/Lib/configparser.py
@@ -687,7 +687,7 @@ class RawConfigParser(MutableMapping):
try:
with open(filename, encoding=encoding) as fp:
self._read(fp, filename)
- except IOError:
+ except OSError:
continue
read_ok.append(filename)
return read_ok
diff --git a/Lib/contextlib.py b/Lib/contextlib.py
index 0b6bf71..03c56da 100644
--- a/Lib/contextlib.py
+++ b/Lib/contextlib.py
@@ -4,7 +4,7 @@ import sys
from collections import deque
from functools import wraps
-__all__ = ["contextmanager", "closing", "ContextDecorator", "ExitStack"]
+__all__ = ["contextmanager", "closing", "ContextDecorator", "ExitStack", "ignored"]
class ContextDecorator(object):
@@ -140,6 +140,18 @@ class closing(object):
def __exit__(self, *exc_info):
self.thing.close()
+@contextmanager
+def ignored(*exceptions):
+ """Context manager to ignore specifed exceptions
+
+ with ignored(OSError):
+ os.remove(somefile)
+
+ """
+ try:
+ yield
+ except exceptions:
+ pass
# Inspired by discussions on http://bugs.python.org/issue13585
class ExitStack(object):
diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py
index c92e130..e2f75c5 100644
--- a/Lib/ctypes/__init__.py
+++ b/Lib/ctypes/__init__.py
@@ -395,7 +395,7 @@ if _os.name in ("nt", "ce"):
_type_ = "l"
# _check_retval_ is called with the function's result when it
# is used as restype. It checks for the FAILED bit, and
- # raises a WindowsError if it is set.
+ # raises an OSError if it is set.
#
# The _check_retval_ method is implemented in C, so that the
# method definition itself is not included in the traceback
@@ -407,7 +407,7 @@ if _os.name in ("nt", "ce"):
class OleDLL(CDLL):
"""This class represents a dll exporting functions using the
Windows stdcall calling convention, and returning HRESULT.
- HRESULT error values are automatically raised as WindowsError
+ HRESULT error values are automatically raised as OSError
exceptions.
"""
_func_flags_ = _FUNCFLAG_STDCALL
@@ -456,7 +456,7 @@ if _os.name in ("nt", "ce"):
code = GetLastError()
if descr is None:
descr = FormatError(code).strip()
- return WindowsError(None, descr, None, code)
+ return OSError(None, descr, None, code)
if sizeof(c_uint) == sizeof(c_void_p):
c_size_t = c_uint
diff --git a/Lib/ctypes/test/test_checkretval.py b/Lib/ctypes/test/test_checkretval.py
index 01ccc57..19bb813 100644
--- a/Lib/ctypes/test/test_checkretval.py
+++ b/Lib/ctypes/test/test_checkretval.py
@@ -31,7 +31,7 @@ class Test(unittest.TestCase):
pass
else:
def test_oledll(self):
- self.assertRaises(WindowsError,
+ self.assertRaises(OSError,
oledll.oleaut32.CreateTypeLib2,
0, None, None)
diff --git a/Lib/ctypes/test/test_win32.py b/Lib/ctypes/test/test_win32.py
index da21336..91ad314 100644
--- a/Lib/ctypes/test/test_win32.py
+++ b/Lib/ctypes/test/test_win32.py
@@ -41,7 +41,7 @@ if sys.platform == "win32":
# Call functions with invalid arguments, and make sure
# that access violations are trapped and raise an
# exception.
- self.assertRaises(WindowsError, windll.kernel32.GetModuleHandleA, 32)
+ self.assertRaises(OSError, windll.kernel32.GetModuleHandleA, 32)
def test_noargs(self):
# This is a special case on win32 x64
diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py
index 1515604..c427d31 100644
--- a/Lib/ctypes/util.py
+++ b/Lib/ctypes/util.py
@@ -102,9 +102,8 @@ elif os.name == "posix":
finally:
try:
os.unlink(ccout)
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
if rv == 10:
raise OSError('gcc or cc command not found')
res = re.search(expr, trace)
diff --git a/Lib/dbm/__init__.py b/Lib/dbm/__init__.py
index 813a29d..0609e49 100644
--- a/Lib/dbm/__init__.py
+++ b/Lib/dbm/__init__.py
@@ -42,7 +42,7 @@ _names = ['dbm.gnu', 'dbm.ndbm', 'dbm.dumb']
_defaultmod = None
_modules = {}
-error = (error, IOError)
+error = (error, OSError)
def open(file, flag='r', mode=0o666):
@@ -106,12 +106,10 @@ def whichdb(filename):
try:
f = io.open(filename + ".pag", "rb")
f.close()
- # dbm linked with gdbm on OS/2 doesn't have .dir file
- if not (ndbm.library == "GNU gdbm" and sys.platform == "os2emx"):
- f = io.open(filename + ".dir", "rb")
- f.close()
+ f = io.open(filename + ".dir", "rb")
+ f.close()
return "dbm.ndbm"
- except IOError:
+ except OSError:
# some dbm emulations based on Berkeley DB generate a .db file
# some do not, but they should be caught by the bsd checks
try:
@@ -124,7 +122,7 @@ def whichdb(filename):
d = ndbm.open(filename)
d.close()
return "dbm.ndbm"
- except IOError:
+ except OSError:
pass
# Check for dumbdbm next -- this has a .dir and a .dat file
@@ -141,13 +139,13 @@ def whichdb(filename):
return "dbm.dumb"
finally:
f.close()
- except (OSError, IOError):
+ except OSError:
pass
# See if the file exists, return None if not
try:
f = io.open(filename, "rb")
- except IOError:
+ except OSError:
return None
# Read the start of the file -- the magic number
diff --git a/Lib/dbm/dumb.py b/Lib/dbm/dumb.py
index cfb9123..9ac7852 100644
--- a/Lib/dbm/dumb.py
+++ b/Lib/dbm/dumb.py
@@ -29,7 +29,7 @@ __all__ = ["error", "open"]
_BLOCKSIZE = 512
-error = IOError
+error = OSError
class _Database(collections.MutableMapping):
@@ -67,7 +67,7 @@ class _Database(collections.MutableMapping):
# Mod by Jack: create data file if needed
try:
f = _io.open(self._datfile, 'r', encoding="Latin-1")
- except IOError:
+ except OSError:
f = _io.open(self._datfile, 'w', encoding="Latin-1")
self._chmod(self._datfile)
f.close()
@@ -78,7 +78,7 @@ class _Database(collections.MutableMapping):
self._index = {}
try:
f = _io.open(self._dirfile, 'r', encoding="Latin-1")
- except IOError:
+ except OSError:
pass
else:
for line in f:
@@ -100,12 +100,12 @@ class _Database(collections.MutableMapping):
try:
self._os.unlink(self._bakfile)
- except self._os.error:
+ except OSError:
pass
try:
self._os.rename(self._dirfile, self._bakfile)
- except self._os.error:
+ except OSError:
pass
f = self._io.open(self._dirfile, 'w', encoding="Latin-1")
diff --git a/Lib/decimal.py b/Lib/decimal.py
index 746b34a..25f8fbc 100644
--- a/Lib/decimal.py
+++ b/Lib/decimal.py
@@ -703,8 +703,7 @@ class Decimal(object):
raise TypeError("Cannot convert %r to Decimal" % value)
- # @classmethod, but @decorator is not valid Python 2.3 syntax, so
- # don't use it (see notes on Py2.3 compatibility at top of file)
+ @classmethod
def from_float(cls, f):
"""Converts a float to a decimal number, exactly.
@@ -743,7 +742,6 @@ class Decimal(object):
return result
else:
return cls(result)
- from_float = classmethod(from_float)
def _isnan(self):
"""Returns whether the number is not actually one.
diff --git a/Lib/difflib.py b/Lib/difflib.py
index ae377d7..8097744 100644
--- a/Lib/difflib.py
+++ b/Lib/difflib.py
@@ -336,20 +336,6 @@ class SequenceMatcher:
for elt in popular: # ditto; as fast for 1% deletion
del b2j[elt]
- def isbjunk(self, item):
- "Deprecated; use 'item in SequenceMatcher().bjunk'."
- warnings.warn("'SequenceMatcher().isbjunk(item)' is deprecated;\n"
- "use 'item in SMinstance.bjunk' instead.",
- DeprecationWarning, 2)
- return item in self.bjunk
-
- def isbpopular(self, item):
- "Deprecated; use 'item in SequenceMatcher().bpopular'."
- warnings.warn("'SequenceMatcher().isbpopular(item)' is deprecated;\n"
- "use 'item in SMinstance.bpopular' instead.",
- DeprecationWarning, 2)
- return item in self.bpopular
-
def find_longest_match(self, alo, ahi, blo, bhi):
"""Find longest matching block in a[alo:ahi] and b[blo:bhi].
@@ -922,8 +908,7 @@ class Differ:
else:
raise ValueError('unknown tag %r' % (tag,))
- for line in g:
- yield line
+ yield from g
def _dump(self, tag, x, lo, hi):
"""Generate comparison results for a same-tagged range."""
@@ -942,8 +927,7 @@ class Differ:
second = self._dump('+', b, blo, bhi)
for g in first, second:
- for line in g:
- yield line
+ yield from g
def _fancy_replace(self, a, alo, ahi, b, blo, bhi):
r"""
@@ -997,8 +981,7 @@ class Differ:
# no non-identical "pretty close" pair
if eqi is None:
# no identical pair either -- treat it as a straight replace
- for line in self._plain_replace(a, alo, ahi, b, blo, bhi):
- yield line
+ yield from self._plain_replace(a, alo, ahi, b, blo, bhi)
return
# no close pair, but an identical pair -- synch up on that
best_i, best_j, best_ratio = eqi, eqj, 1.0
@@ -1010,8 +993,7 @@ class Differ:
# identical
# pump out diffs from before the synch point
- for line in self._fancy_helper(a, alo, best_i, b, blo, best_j):
- yield line
+ yield from self._fancy_helper(a, alo, best_i, b, blo, best_j)
# do intraline marking on the synch pair
aelt, belt = a[best_i], b[best_j]
@@ -1033,15 +1015,13 @@ class Differ:
btags += ' ' * lb
else:
raise ValueError('unknown tag %r' % (tag,))
- for line in self._qformat(aelt, belt, atags, btags):
- yield line
+ yield from self._qformat(aelt, belt, atags, btags)
else:
# the synch pair is identical
yield ' ' + aelt
# pump out diffs from after the synch point
- for line in self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi):
- yield line
+ yield from self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi)
def _fancy_helper(self, a, alo, ahi, b, blo, bhi):
g = []
@@ -1053,8 +1033,7 @@ class Differ:
elif blo < bhi:
g = self._dump('+', b, blo, bhi)
- for line in g:
- yield line
+ yield from g
def _qformat(self, aline, bline, atags, btags):
r"""
diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py
index 345ac4f..70a72b0 100644
--- a/Lib/distutils/__init__.py
+++ b/Lib/distutils/__init__.py
@@ -13,5 +13,5 @@ used from a setup script as
# Updated automatically by the Python release process.
#
#--start constants--
-__version__ = "3.3.0"
+__version__ = "3.4.0a0"
#--end constants--
diff --git a/Lib/distutils/ccompiler.py b/Lib/distutils/ccompiler.py
index c795c95..911e84d 100644
--- a/Lib/distutils/ccompiler.py
+++ b/Lib/distutils/ccompiler.py
@@ -351,7 +351,7 @@ class CCompiler:
return macros, objects, extra, pp_opts, build
def _get_cc_args(self, pp_opts, debug, before):
- # works for unixccompiler, emxccompiler, cygwinccompiler
+ # works for unixccompiler, cygwinccompiler
cc_args = pp_opts + ['-c']
if debug:
cc_args[:0] = ['-g']
@@ -926,7 +926,6 @@ _default_compilers = (
# on a cygwin built python we can use gcc like an ordinary UNIXish
# compiler
('cygwin.*', 'unix'),
- ('os2emx', 'emx'),
# OS name mappings
('posix', 'unix'),
@@ -968,8 +967,6 @@ compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler',
"Mingw32 port of GNU C Compiler for Win32"),
'bcpp': ('bcppcompiler', 'BCPPCompiler',
"Borland C++ Compiler"),
- 'emx': ('emxccompiler', 'EMXCCompiler',
- "EMX port of GNU C Compiler for OS/2"),
}
def show_compilers():
diff --git a/Lib/distutils/command/bdist.py b/Lib/distutils/command/bdist.py
index c5188eb..38b169a 100644
--- a/Lib/distutils/command/bdist.py
+++ b/Lib/distutils/command/bdist.py
@@ -52,8 +52,7 @@ class bdist(Command):
# This won't do in reality: will need to distinguish RPM-ish Linux,
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
default_format = {'posix': 'gztar',
- 'nt': 'zip',
- 'os2': 'zip'}
+ 'nt': 'zip'}
# Establish the preferred order (for the --help-formats option).
format_commands = ['rpm', 'gztar', 'bztar', 'ztar', 'tar',
diff --git a/Lib/distutils/command/bdist_dumb.py b/Lib/distutils/command/bdist_dumb.py
index 1ab09d1..eefdfea 100644
--- a/Lib/distutils/command/bdist_dumb.py
+++ b/Lib/distutils/command/bdist_dumb.py
@@ -38,8 +38,7 @@ class bdist_dumb(Command):
boolean_options = ['keep-temp', 'skip-build', 'relative']
default_format = { 'posix': 'gztar',
- 'nt': 'zip',
- 'os2': 'zip' }
+ 'nt': 'zip' }
def initialize_options(self):
self.bdist_dir = None
@@ -85,11 +84,6 @@ class bdist_dumb(Command):
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
self.plat_name)
- # OS/2 objects to any ":" characters in a filename (such as when
- # a timestamp is used in a version) so change them to hyphens.
- if os.name == "os2":
- archive_basename = archive_basename.replace(":", "-")
-
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
index 1ad0d5f..a6aad53 100644
--- a/Lib/distutils/command/build_ext.py
+++ b/Lib/distutils/command/build_ext.py
@@ -230,11 +230,6 @@ class build_ext(Command):
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VC6'))
- # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
- # import libraries in its "Config" subdirectory
- if os.name == 'os2':
- self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
-
# for extensions under Cygwin and AtheOS Python's library directory must be
# appended to library_dirs
if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
@@ -620,9 +615,6 @@ class build_ext(Command):
return fn
else:
return "swig.exe"
- elif os.name == "os2":
- # assume swig available in the PATH.
- return "swig.exe"
else:
raise DistutilsPlatformError(
"I don't know how to find (much less run) SWIG "
@@ -673,9 +665,6 @@ class build_ext(Command):
"""
from distutils.sysconfig import get_config_var
ext_path = ext_name.split('.')
- # OS/2 has an 8 character module (extension) limit :-(
- if os.name == "os2":
- ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
# extensions in debug_mode are named 'module_d.pyd' under windows
ext_suffix = get_config_var('EXT_SUFFIX')
if os.name == 'nt' and self.debug:
@@ -696,7 +685,7 @@ class build_ext(Command):
def get_libraries(self, ext):
"""Return the list of libraries to link against when building a
shared extension. On most platforms, this is just 'ext.libraries';
- on Windows and OS/2, we add the Python library (eg. python20.dll).
+ on Windows, we add the Python library (eg. python20.dll).
"""
# The python library is always needed on Windows. For MSVC, this
# is redundant, since the library is mentioned in a pragma in
@@ -716,19 +705,6 @@ class build_ext(Command):
return ext.libraries + [pythonlib]
else:
return ext.libraries
- elif sys.platform == "os2emx":
- # EMX/GCC requires the python library explicitly, and I
- # believe VACPP does as well (though not confirmed) - AIM Apr01
- template = "python%d%d"
- # debug versions of the main DLL aren't supported, at least
- # not at this time - AIM Apr01
- #if self.debug:
- # template = template + '_d'
- pythonlib = (template %
- (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
- # don't extend ext.libraries, it may be shared with other
- # extensions, it is a reference to the original list
- return ext.libraries + [pythonlib]
elif sys.platform[:6] == "cygwin":
template = "python%d.%d"
pythonlib = (template %
diff --git a/Lib/distutils/command/build_scripts.py b/Lib/distutils/command/build_scripts.py
index 4b5b22e..90a8380 100644
--- a/Lib/distutils/command/build_scripts.py
+++ b/Lib/distutils/command/build_scripts.py
@@ -74,7 +74,7 @@ class build_scripts(Command):
# script.
try:
f = open(script, "rb")
- except IOError:
+ except OSError:
if not self.dry_run:
raise
f = None
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index 9b1c36a..cfc5d5e 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -58,13 +58,6 @@ INSTALL_SCHEMES = {
'data' : '$base',
},
'nt': WINDOWS_SCHEME,
- 'os2': {
- 'purelib': '$base/Lib/site-packages',
- 'platlib': '$base/Lib/site-packages',
- 'headers': '$base/Include/$dist_name',
- 'scripts': '$base/Scripts',
- 'data' : '$base',
- },
}
# user site schemes
@@ -86,14 +79,6 @@ if HAS_USER_SITE:
'data' : '$userbase',
}
- INSTALL_SCHEMES['os2_home'] = {
- 'purelib': '$usersite',
- 'platlib': '$usersite',
- 'headers': '$userbase/include/python$py_version_short/$dist_name',
- 'scripts': '$userbase/bin',
- 'data' : '$userbase',
- }
-
# The keys to an installation scheme; if any new types of files are to be
# installed, be sure to add an entry to every installation scheme above,
# and to SCHEME_KEYS here.
diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py
index 8b36851..88990b2 100644
--- a/Lib/distutils/command/upload.py
+++ b/Lib/distutils/command/upload.py
@@ -186,7 +186,7 @@ class upload(PyPIRCCommand):
http.putheader('Authorization', auth)
http.endheaders()
http.send(body)
- except socket.error as e:
+ except OSError as e:
self.announce(str(e), log.ERROR)
return
diff --git a/Lib/distutils/config.py b/Lib/distutils/config.py
index 1fd5334..7439a83 100644
--- a/Lib/distutils/config.py
+++ b/Lib/distutils/config.py
@@ -21,7 +21,7 @@ password:%s
class PyPIRCCommand(Command):
"""Base command that knows how to handle the .pypirc file
"""
- DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
+ DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi'
DEFAULT_REALM = 'pypi'
repository = None
realm = None
@@ -83,6 +83,15 @@ class PyPIRCCommand(Command):
current[key] = config.get(server, key)
else:
current[key] = default
+
+ # work around people having "repository" for the "pypi"
+ # section of their config set to the HTTP (rather than
+ # HTTPS) URL
+ if (server == 'pypi' and
+ repository in (self.DEFAULT_REPOSITORY, 'pypi')):
+ current['repository'] = self.DEFAULT_REPOSITORY
+ return current
+
if (current['server'] == repository or
current['repository'] == repository):
return current
diff --git a/Lib/distutils/core.py b/Lib/distutils/core.py
index 260332a..91e5132 100644
--- a/Lib/distutils/core.py
+++ b/Lib/distutils/core.py
@@ -148,7 +148,7 @@ def setup (**attrs):
dist.run_commands()
except KeyboardInterrupt:
raise SystemExit("interrupted")
- except (IOError, os.error) as exc:
+ except OSError as exc:
error = grok_environment_error(exc)
if DEBUG:
diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py
index 0bdd539..0c23ab1 100644
--- a/Lib/distutils/cygwinccompiler.py
+++ b/Lib/distutils/cygwinccompiler.py
@@ -359,7 +359,7 @@ def check_config_h():
return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
finally:
config_h.close()
- except IOError as exc:
+ except OSError as exc:
return (CONFIG_H_UNCERTAIN,
"couldn't read '%s': %s" % (fn, exc.strerror))
diff --git a/Lib/distutils/dir_util.py b/Lib/distutils/dir_util.py
index 2826ff8..2b35aa3 100644
--- a/Lib/distutils/dir_util.py
+++ b/Lib/distutils/dir_util.py
@@ -124,7 +124,7 @@ def copy_tree(src, dst, preserve_mode=1, preserve_times=1,
"cannot copy tree '%s': not a directory" % src)
try:
names = os.listdir(src)
- except os.error as e:
+ except OSError as e:
(errno, errstr) = e
if dry_run:
names = []
@@ -198,7 +198,7 @@ def remove_tree(directory, verbose=1, dry_run=0):
abspath = os.path.abspath(cmd[1])
if abspath in _path_created:
del _path_created[abspath]
- except (IOError, OSError) as exc:
+ except OSError as exc:
log.warn(grok_environment_error(
exc, "error removing %s: " % directory))
diff --git a/Lib/distutils/emxccompiler.py b/Lib/distutils/emxccompiler.py
deleted file mode 100644
index 3675f8d..0000000
--- a/Lib/distutils/emxccompiler.py
+++ /dev/null
@@ -1,315 +0,0 @@
-"""distutils.emxccompiler
-
-Provides the EMXCCompiler class, a subclass of UnixCCompiler that
-handles the EMX port of the GNU C compiler to OS/2.
-"""
-
-# issues:
-#
-# * OS/2 insists that DLLs can have names no longer than 8 characters
-# We put export_symbols in a def-file, as though the DLL can have
-# an arbitrary length name, but truncate the output filename.
-#
-# * only use OMF objects and use LINK386 as the linker (-Zomf)
-#
-# * always build for multithreading (-Zmt) as the accompanying OS/2 port
-# of Python is only distributed with threads enabled.
-#
-# tested configurations:
-#
-# * EMX gcc 2.81/EMX 0.9d fix03
-
-import os,sys,copy
-from distutils.ccompiler import gen_preprocess_options, gen_lib_options
-from distutils.unixccompiler import UnixCCompiler
-from distutils.file_util import write_file
-from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
-from distutils import log
-
-class EMXCCompiler (UnixCCompiler):
-
- compiler_type = 'emx'
- obj_extension = ".obj"
- static_lib_extension = ".lib"
- shared_lib_extension = ".dll"
- static_lib_format = "%s%s"
- shared_lib_format = "%s%s"
- res_extension = ".res" # compiled resource file
- exe_extension = ".exe"
-
- def __init__ (self,
- verbose=0,
- dry_run=0,
- force=0):
-
- UnixCCompiler.__init__ (self, verbose, dry_run, force)
-
- (status, details) = check_config_h()
- self.debug_print("Python's GCC status: %s (details: %s)" %
- (status, details))
- if status is not CONFIG_H_OK:
- self.warn(
- "Python's pyconfig.h doesn't seem to support your compiler. " +
- ("Reason: %s." % details) +
- "Compiling may fail because of undefined preprocessor macros.")
-
- (self.gcc_version, self.ld_version) = \
- get_versions()
- self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
- (self.gcc_version,
- self.ld_version) )
-
- # Hard-code GCC because that's what this is all about.
- # XXX optimization, warnings etc. should be customizable.
- self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
- compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
- linker_exe='gcc -Zomf -Zmt -Zcrtdll',
- linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
-
- # want the gcc library statically linked (so that we don't have
- # to distribute a version dependent on the compiler we have)
- self.dll_libraries=["gcc"]
-
- # __init__ ()
-
- def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
- if ext == '.rc':
- # gcc requires '.rc' compiled to binary ('.res') files !!!
- try:
- self.spawn(["rc", "-r", src])
- except DistutilsExecError as msg:
- raise CompileError(msg)
- else: # for other files use the C-compiler
- try:
- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
- extra_postargs)
- except DistutilsExecError as msg:
- raise CompileError(msg)
-
- def link (self,
- target_desc,
- objects,
- output_filename,
- output_dir=None,
- libraries=None,
- library_dirs=None,
- runtime_library_dirs=None,
- export_symbols=None,
- debug=0,
- extra_preargs=None,
- extra_postargs=None,
- build_temp=None,
- target_lang=None):
-
- # use separate copies, so we can modify the lists
- extra_preargs = copy.copy(extra_preargs or [])
- libraries = copy.copy(libraries or [])
- objects = copy.copy(objects or [])
-
- # Additional libraries
- libraries.extend(self.dll_libraries)
-
- # handle export symbols by creating a def-file
- # with executables this only works with gcc/ld as linker
- if ((export_symbols is not None) and
- (target_desc != self.EXECUTABLE)):
- # (The linker doesn't do anything if output is up-to-date.
- # So it would probably better to check if we really need this,
- # but for this we had to insert some unchanged parts of
- # UnixCCompiler, and this is not what we want.)
-
- # we want to put some files in the same directory as the
- # object files are, build_temp doesn't help much
- # where are the object files
- temp_dir = os.path.dirname(objects[0])
- # name of dll to give the helper files the same base name
- (dll_name, dll_extension) = os.path.splitext(
- os.path.basename(output_filename))
-
- # generate the filenames for these files
- def_file = os.path.join(temp_dir, dll_name + ".def")
-
- # Generate .def file
- contents = [
- "LIBRARY %s INITINSTANCE TERMINSTANCE" % \
- os.path.splitext(os.path.basename(output_filename))[0],
- "DATA MULTIPLE NONSHARED",
- "EXPORTS"]
- for sym in export_symbols:
- contents.append(' "%s"' % sym)
- self.execute(write_file, (def_file, contents),
- "writing %s" % def_file)
-
- # next add options for def-file and to creating import libraries
- # for gcc/ld the def-file is specified as any other object files
- objects.append(def_file)
-
- #end: if ((export_symbols is not None) and
- # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
-
- # who wants symbols and a many times larger output file
- # should explicitly switch the debug mode on
- # otherwise we let dllwrap/ld strip the output file
- # (On my machine: 10KB < stripped_file < ??100KB
- # unstripped_file = stripped_file + XXX KB
- # ( XXX=254 for a typical python extension))
- if not debug:
- extra_preargs.append("-s")
-
- UnixCCompiler.link(self,
- target_desc,
- objects,
- output_filename,
- output_dir,
- libraries,
- library_dirs,
- runtime_library_dirs,
- None, # export_symbols, we do this in our def-file
- debug,
- extra_preargs,
- extra_postargs,
- build_temp,
- target_lang)
-
- # link ()
-
- # -- Miscellaneous methods -----------------------------------------
-
- # override the object_filenames method from CCompiler to
- # support rc and res-files
- def object_filenames (self,
- source_filenames,
- strip_dir=0,
- output_dir=''):
- if output_dir is None: output_dir = ''
- obj_names = []
- for src_name in source_filenames:
- # use normcase to make sure '.rc' is really '.rc' and not '.RC'
- (base, ext) = os.path.splitext (os.path.normcase(src_name))
- if ext not in (self.src_extensions + ['.rc']):
- raise UnknownFileError("unknown file type '%s' (from '%s')" % \
- (ext, src_name))
- if strip_dir:
- base = os.path.basename (base)
- if ext == '.rc':
- # these need to be compiled to object files
- obj_names.append (os.path.join (output_dir,
- base + self.res_extension))
- else:
- obj_names.append (os.path.join (output_dir,
- base + self.obj_extension))
- return obj_names
-
- # object_filenames ()
-
- # override the find_library_file method from UnixCCompiler
- # to deal with file naming/searching differences
- def find_library_file(self, dirs, lib, debug=0):
- shortlib = '%s.lib' % lib
- longlib = 'lib%s.lib' % lib # this form very rare
-
- # get EMX's default library directory search path
- try:
- emx_dirs = os.environ['LIBRARY_PATH'].split(';')
- except KeyError:
- emx_dirs = []
-
- for dir in dirs + emx_dirs:
- shortlibp = os.path.join(dir, shortlib)
- longlibp = os.path.join(dir, longlib)
- if os.path.exists(shortlibp):
- return shortlibp
- elif os.path.exists(longlibp):
- return longlibp
-
- # Oops, didn't find it in *any* of 'dirs'
- return None
-
-# class EMXCCompiler
-
-
-# Because these compilers aren't configured in Python's pyconfig.h file by
-# default, we should at least warn the user if he is using a unmodified
-# version.
-
-CONFIG_H_OK = "ok"
-CONFIG_H_NOTOK = "not ok"
-CONFIG_H_UNCERTAIN = "uncertain"
-
-def check_config_h():
-
- """Check if the current Python installation (specifically, pyconfig.h)
- appears amenable to building extensions with GCC. Returns a tuple
- (status, details), where 'status' is one of the following constants:
- CONFIG_H_OK
- all is well, go ahead and compile
- CONFIG_H_NOTOK
- doesn't look good
- CONFIG_H_UNCERTAIN
- not sure -- unable to read pyconfig.h
- 'details' is a human-readable string explaining the situation.
-
- Note there are two ways to conclude "OK": either 'sys.version' contains
- the string "GCC" (implying that this Python was built with GCC), or the
- installed "pyconfig.h" contains the string "__GNUC__".
- """
-
- # XXX since this function also checks sys.version, it's not strictly a
- # "pyconfig.h" check -- should probably be renamed...
-
- from distutils import sysconfig
- # if sys.version contains GCC then python was compiled with
- # GCC, and the pyconfig.h file should be OK
- if sys.version.find("GCC") >= 0:
- return (CONFIG_H_OK, "sys.version mentions 'GCC'")
-
- fn = sysconfig.get_config_h_filename()
- try:
- # It would probably better to read single lines to search.
- # But we do this only once, and it is fast enough
- f = open(fn)
- try:
- s = f.read()
- finally:
- f.close()
-
- except IOError as exc:
- # if we can't read this file, we cannot say it is wrong
- # the compiler will complain later about this file as missing
- return (CONFIG_H_UNCERTAIN,
- "couldn't read '%s': %s" % (fn, exc.strerror))
-
- else:
- # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
- if s.find("__GNUC__") >= 0:
- return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
- else:
- return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
-
-
-def get_versions():
- """ Try to find out the versions of gcc and ld.
- If not possible it returns None for it.
- """
- from distutils.version import StrictVersion
- from distutils.spawn import find_executable
- import re
-
- gcc_exe = find_executable('gcc')
- if gcc_exe:
- out = os.popen(gcc_exe + ' -dumpversion','r')
- try:
- out_string = out.read()
- finally:
- out.close()
- result = re.search('(\d+\.\d+\.\d+)', out_string, re.ASCII)
- if result:
- gcc_version = StrictVersion(result.group(1))
- else:
- gcc_version = None
- else:
- gcc_version = None
- # EMX ld has no way of reporting version number, and we use GCC
- # anyway - so we can link OMF DLLs
- ld_version = None
- return (gcc_version, ld_version)
diff --git a/Lib/distutils/errors.py b/Lib/distutils/errors.py
index eb13c98..8b93059 100644
--- a/Lib/distutils/errors.py
+++ b/Lib/distutils/errors.py
@@ -35,8 +35,8 @@ class DistutilsArgError (DistutilsError):
class DistutilsFileError (DistutilsError):
"""Any problems in the filesystem: expected file not found, etc.
- Typically this is for problems that we detect before IOError or
- OSError could be raised."""
+ Typically this is for problems that we detect before OSError
+ could be raised."""
pass
class DistutilsOptionError (DistutilsError):
diff --git a/Lib/distutils/file_util.py b/Lib/distutils/file_util.py
index 9bdd14e..f6ed290 100644
--- a/Lib/distutils/file_util.py
+++ b/Lib/distutils/file_util.py
@@ -27,26 +27,26 @@ def _copy_file_contents(src, dst, buffer_size=16*1024):
try:
try:
fsrc = open(src, 'rb')
- except os.error as e:
+ except OSError as e:
raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror))
if os.path.exists(dst):
try:
os.unlink(dst)
- except os.error as e:
+ except OSError as e:
raise DistutilsFileError(
"could not delete '%s': %s" % (dst, e.strerror))
try:
fdst = open(dst, 'wb')
- except os.error as e:
+ except OSError as e:
raise DistutilsFileError(
"could not create '%s': %s" % (dst, e.strerror))
while True:
try:
buf = fsrc.read(buffer_size)
- except os.error as e:
+ except OSError as e:
raise DistutilsFileError(
"could not read from '%s': %s" % (src, e.strerror))
@@ -55,7 +55,7 @@ def _copy_file_contents(src, dst, buffer_size=16*1024):
try:
fdst.write(buf)
- except os.error as e:
+ except OSError as e:
raise DistutilsFileError(
"could not write to '%s': %s" % (dst, e.strerror))
finally:
@@ -193,7 +193,7 @@ def move_file (src, dst,
copy_it = False
try:
os.rename(src, dst)
- except os.error as e:
+ except OSError as e:
(num, msg) = e
if num == errno.EXDEV:
copy_it = True
@@ -205,11 +205,11 @@ def move_file (src, dst,
copy_file(src, dst, verbose=verbose)
try:
os.unlink(src)
- except os.error as e:
+ except OSError as e:
(num, msg) = e
try:
os.unlink(dst)
- except os.error:
+ except OSError:
pass
raise DistutilsFileError(
"couldn't move '%s' to '%s' by copy/delete: "
diff --git a/Lib/distutils/msvc9compiler.py b/Lib/distutils/msvc9compiler.py
index b3f6ce1..9688f20 100644
--- a/Lib/distutils/msvc9compiler.py
+++ b/Lib/distutils/msvc9compiler.py
@@ -729,7 +729,7 @@ class MSVCCompiler(CCompiler) :
return manifest_file
finally:
manifest_f.close()
- except IOError:
+ except OSError:
pass
# -- Miscellaneous methods -----------------------------------------
diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py
index f58c55f..b1c5a44 100644
--- a/Lib/distutils/spawn.py
+++ b/Lib/distutils/spawn.py
@@ -32,8 +32,6 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0):
_spawn_posix(cmd, search_path, dry_run=dry_run)
elif os.name == 'nt':
_spawn_nt(cmd, search_path, dry_run=dry_run)
- elif os.name == 'os2':
- _spawn_os2(cmd, search_path, dry_run=dry_run)
else:
raise DistutilsPlatformError(
"don't know how to spawn programs on platform '%s'" % os.name)
@@ -74,26 +72,6 @@ def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0):
raise DistutilsExecError(
"command '%s' failed with exit status %d" % (cmd[0], rc))
-def _spawn_os2(cmd, search_path=1, verbose=0, dry_run=0):
- executable = cmd[0]
- if search_path:
- # either we find one or it stays the same
- executable = find_executable(executable) or executable
- log.info(' '.join([executable] + cmd[1:]))
- if not dry_run:
- # spawnv for OS/2 EMX requires a full path to the .exe
- try:
- rc = os.spawnv(os.P_WAIT, executable, cmd)
- except OSError as exc:
- # this seems to happen when the command isn't found
- raise DistutilsExecError(
- "command '%s' failed: %s" % (cmd[0], exc.args[-1]))
- if rc != 0:
- # and this reflects the command running but failing
- log.debug("command '%s' failed with exit status %d" % (cmd[0], rc))
- raise DistutilsExecError(
- "command '%s' failed with exit status %d" % (cmd[0], rc))
-
if sys.platform == 'darwin':
from distutils import sysconfig
_cfg_target = None
@@ -180,7 +158,7 @@ def find_executable(executable, path=None):
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
- if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+ if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index b73503d..898aa26 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -114,8 +114,6 @@ def get_python_inc(plat_specific=0, prefix=None):
return os.path.join(prefix, "include", python_dir)
elif os.name == "nt":
return os.path.join(prefix, "include")
- elif os.name == "os2":
- return os.path.join(prefix, "Include")
else:
raise DistutilsPlatformError(
"I don't know where Python installs its C header files "
@@ -157,11 +155,6 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
return prefix
else:
return os.path.join(prefix, "Lib", "site-packages")
- elif os.name == "os2":
- if standard_lib:
- return os.path.join(prefix, "Lib")
- else:
- return os.path.join(prefix, "Lib", "site-packages")
else:
raise DistutilsPlatformError(
"I don't know where Python installs its library "
@@ -436,7 +429,7 @@ def _init_posix():
try:
filename = get_makefile_filename()
parse_makefile(filename, g)
- except IOError as msg:
+ except OSError as msg:
my_msg = "invalid Python installation: unable to open %s" % filename
if hasattr(msg, "strerror"):
my_msg = my_msg + " (%s)" % msg.strerror
@@ -448,7 +441,7 @@ def _init_posix():
filename = get_config_h_filename()
with open(filename) as file:
parse_config_h(file, g)
- except IOError as msg:
+ except OSError as msg:
my_msg = "invalid Python installation: unable to open %s" % filename
if hasattr(msg, "strerror"):
my_msg = my_msg + " (%s)" % msg.strerror
@@ -486,7 +479,6 @@ def _init_nt():
# XXX hmmm.. a normal install puts include files here
g['INCLUDEPY'] = get_python_inc(plat_specific=0)
- g['SO'] = '.pyd'
g['EXT_SUFFIX'] = '.pyd'
g['EXE'] = ".exe"
g['VERSION'] = get_python_version().replace(".", "")
@@ -496,24 +488,6 @@ def _init_nt():
_config_vars = g
-def _init_os2():
- """Initialize the module as appropriate for OS/2"""
- g = {}
- # set basic install directories
- g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
- g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
-
- # XXX hmmm.. a normal install puts include files here
- g['INCLUDEPY'] = get_python_inc(plat_specific=0)
-
- g['SO'] = '.pyd'
- g['EXT_SUFFIX'] = '.pyd'
- g['EXE'] = ".exe"
-
- global _config_vars
- _config_vars = g
-
-
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform. Generally this includes
diff --git a/Lib/distutils/tests/test_bdist_dumb.py b/Lib/distutils/tests/test_bdist_dumb.py
index 0ad32d4..7306166 100644
--- a/Lib/distutils/tests/test_bdist_dumb.py
+++ b/Lib/distutils/tests/test_bdist_dumb.py
@@ -75,8 +75,6 @@ class BuildDumbTestCase(support.TempdirManager,
# see what we have
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name)
- if os.name == 'os2':
- base = base.replace(':', '-')
self.assertEqual(dist_created, [base])
diff --git a/Lib/distutils/tests/test_config.py b/Lib/distutils/tests/test_config.py
index 525bee9..1259361 100644
--- a/Lib/distutils/tests/test_config.py
+++ b/Lib/distutils/tests/test_config.py
@@ -87,7 +87,7 @@ class PyPIRCCommandTestCase(support.TempdirManager,
config = list(sorted(config.items()))
waited = [('password', 'secret'), ('realm', 'pypi'),
- ('repository', 'http://pypi.python.org/pypi'),
+ ('repository', 'https://pypi.python.org/pypi'),
('server', 'server1'), ('username', 'me')]
self.assertEqual(config, waited)
@@ -96,7 +96,7 @@ class PyPIRCCommandTestCase(support.TempdirManager,
config = cmd._read_pypirc()
config = list(sorted(config.items()))
waited = [('password', 'secret'), ('realm', 'pypi'),
- ('repository', 'http://pypi.python.org/pypi'),
+ ('repository', 'https://pypi.python.org/pypi'),
('server', 'server-login'), ('username', 'tarek')]
self.assertEqual(config, waited)
diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
index b190127..e9b2642 100644
--- a/Lib/distutils/tests/test_install.py
+++ b/Lib/distutils/tests/test_install.py
@@ -94,7 +94,7 @@ class InstallTestCase(support.TempdirManager,
self.addCleanup(cleanup)
- for key in ('nt_user', 'unix_user', 'os2_home'):
+ for key in ('nt_user', 'unix_user'):
self.assertIn(key, INSTALL_SCHEMES)
dist = Distribution({'name': 'xx'})
diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py
index 4c6464a..d269686 100644
--- a/Lib/distutils/tests/test_upload.py
+++ b/Lib/distutils/tests/test_upload.py
@@ -72,11 +72,11 @@ class uploadTestCase(PyPIRCCommandTestCase):
def setUp(self):
super(uploadTestCase, self).setUp()
- self.old_class = httpclient.HTTPConnection
- self.conn = httpclient.HTTPConnection = FakeConnection()
+ self.old_class = httpclient.HTTPSConnection
+ self.conn = httpclient.HTTPSConnection = FakeConnection()
def tearDown(self):
- httpclient.HTTPConnection = self.old_class
+ httpclient.HTTPSConnection = self.old_class
super(uploadTestCase, self).tearDown()
def test_finalize_options(self):
@@ -88,7 +88,7 @@ class uploadTestCase(PyPIRCCommandTestCase):
cmd.finalize_options()
for attr, waited in (('username', 'me'), ('password', 'secret'),
('realm', 'pypi'),
- ('repository', 'http://pypi.python.org/pypi')):
+ ('repository', 'https://pypi.python.org/pypi')):
self.assertEqual(getattr(cmd, attr), waited)
def test_saved_password(self):
diff --git a/Lib/distutils/tests/test_util.py b/Lib/distutils/tests/test_util.py
index eac9b51..b73cfe9 100644
--- a/Lib/distutils/tests/test_util.py
+++ b/Lib/distutils/tests/test_util.py
@@ -236,7 +236,7 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase):
self.assertRaises(DistutilsPlatformError,
change_root, 'c:\\root', 'its\\here')
- # XXX platforms to be covered: os2, mac
+ # XXX platforms to be covered: mac
def test_check_environ(self):
util._environ_checked = 0
diff --git a/Lib/distutils/util.py b/Lib/distutils/util.py
index 67d8166..a2f9544 100644
--- a/Lib/distutils/util.py
+++ b/Lib/distutils/util.py
@@ -154,12 +154,6 @@ def change_root (new_root, pathname):
path = path[1:]
return os.path.join(new_root, path)
- elif os.name == 'os2':
- (drive, path) = os.path.splitdrive(pathname)
- if path[0] == os.sep:
- path = path[1:]
- return os.path.join(new_root, path)
-
else:
raise DistutilsPlatformError("nothing known about platform '%s'" % os.name)
@@ -213,8 +207,8 @@ def subst_vars (s, local_vars):
def grok_environment_error (exc, prefix="error: "):
- """Generate a useful error message from an EnvironmentError (IOError or
- OSError) exception object. Handles Python 1.5.1 and 1.5.2 styles, and
+ """Generate a useful error message from an OSError
+ exception object. Handles Python 1.5.1 and 1.5.2 styles, and
does what it can to deal with exception objects that don't have a
filename (which happens when the error is due to a two-file operation,
such as 'rename()' or 'link()'. Returns the error message as a string
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 3af05fb..16a732d 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -62,6 +62,7 @@ __all__ = [
'REPORT_NDIFF',
'REPORT_ONLY_FIRST_FAILURE',
'REPORTING_FLAGS',
+ 'FAIL_FAST',
# 1. Utility Functions
# 2. Example & DocTest
'Example',
@@ -150,11 +151,13 @@ REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
+FAIL_FAST = register_optionflag('FAIL_FAST')
REPORTING_FLAGS = (REPORT_UDIFF |
REPORT_CDIFF |
REPORT_NDIFF |
- REPORT_ONLY_FIRST_FAILURE)
+ REPORT_ONLY_FIRST_FAILURE |
+ FAIL_FAST)
# Special string markers for use in `want` strings:
BLANKLINE_MARKER = '<BLANKLINE>'
@@ -1342,6 +1345,9 @@ class DocTestRunner:
else:
assert False, ("unknown outcome", outcome)
+ if failures and self.optionflags & FAIL_FAST:
+ break
+
# Restore the option flags (in case they were modified)
self.optionflags = original_optionflags
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
index 26cfa52..1928505 100644
--- a/Lib/email/_header_value_parser.py
+++ b/Lib/email/_header_value_parser.py
@@ -367,8 +367,7 @@ class TokenList(list):
yield (indent + ' !! invalid element in token '
'list: {!r}'.format(token))
else:
- for line in token._pp(indent+' '):
- yield line
+ yield from token._pp(indent+' ')
if self.defects:
extra = ' Defects: {}'.format(self.defects)
else:
diff --git a/Lib/email/feedparser.py b/Lib/email/feedparser.py
index ea41e95..eb75fe3 100644
--- a/Lib/email/feedparser.py
+++ b/Lib/email/feedparser.py
@@ -98,24 +98,15 @@ class BufferedSubFile(object):
"""Push some new data into this object."""
# Handle any previous leftovers
data, self._partial = self._partial + data, ''
- # Crack into lines, but preserve the newlines on the end of each
- parts = NLCRE_crack.split(data)
- # The *ahem* interesting behaviour of re.split when supplied grouping
- # parentheses is that the last element of the resulting list is the
- # data after the final RE. In the case of a NL/CR terminated string,
- # this is the empty string.
- self._partial = parts.pop()
- #GAN 29Mar09 bugs 1555570, 1721862 Confusion at 8K boundary ending with \r:
- # is there a \n to follow later?
- if not self._partial and parts and parts[-1].endswith('\r'):
- self._partial = parts.pop(-2)+parts.pop()
- # parts is a list of strings, alternating between the line contents
- # and the eol character(s). Gather up a list of lines after
- # re-attaching the newlines.
- lines = []
- for i in range(len(parts) // 2):
- lines.append(parts[i*2] + parts[i*2+1])
- self.pushlines(lines)
+ # Crack into lines, but preserve the linesep characters on the end of each
+ parts = data.splitlines(True)
+ # If the last element of the list does not end in a newline, then treat
+ # it as a partial line. We only check for '\n' here because a line
+ # ending with '\r' might be a line that was split in the middle of a
+ # '\r\n' sequence (see bugs 1555570 and 1721862).
+ if parts and not parts[-1].endswith('\n'):
+ self._partial = parts.pop()
+ self.pushlines(parts)
def pushlines(self, lines):
# Reverse and insert at the front of the lines.
diff --git a/Lib/email/iterators.py b/Lib/email/iterators.py
index 3adc4a0..b5502ee 100644
--- a/Lib/email/iterators.py
+++ b/Lib/email/iterators.py
@@ -26,8 +26,7 @@ def walk(self):
yield self
if self.is_multipart():
for subpart in self.get_payload():
- for subsubpart in subpart.walk():
- yield subsubpart
+ yield from subpart.walk()
@@ -40,8 +39,7 @@ def body_line_iterator(msg, decode=False):
for subpart in msg.walk():
payload = subpart.get_payload(decode=decode)
if isinstance(payload, str):
- for line in StringIO(payload):
- yield line
+ yield from StringIO(payload)
def typed_subpart_iterator(msg, maintype='text', subtype=None):
diff --git a/Lib/email/parser.py b/Lib/email/parser.py
index 752bf35..f49d31d 100644
--- a/Lib/email/parser.py
+++ b/Lib/email/parser.py
@@ -4,7 +4,8 @@
"""A parser of RFC 2822 and MIME email messages."""
-__all__ = ['Parser', 'HeaderParser', 'BytesParser', 'BytesHeaderParser']
+__all__ = ['Parser', 'HeaderParser', 'BytesParser', 'BytesHeaderParser',
+ 'FeedParser', 'BytesFeedParser']
import warnings
from io import StringIO, TextIOWrapper
diff --git a/Lib/filecmp.py b/Lib/filecmp.py
index f5cea1d..014c0d0 100644
--- a/Lib/filecmp.py
+++ b/Lib/filecmp.py
@@ -13,11 +13,15 @@ import os
import stat
from itertools import filterfalse
-__all__ = ["cmp", "dircmp", "cmpfiles"]
+__all__ = ['cmp', 'dircmp', 'cmpfiles', 'DEFAULT_IGNORES']
_cache = {}
BUFSIZE = 8*1024
+DEFAULT_IGNORES = [
+ 'RCS', 'CVS', 'tags', '.git', '.hg', '.bzr', '_darcs', '__pycache__']
+
+
def cmp(f1, f2, shallow=True):
"""Compare two files.
@@ -80,7 +84,7 @@ class dircmp:
dircmp(a, b, ignore=None, hide=None)
A and B are directories.
IGNORE is a list of names to ignore,
- defaults to ['RCS', 'CVS', 'tags'].
+ defaults to DEFAULT_IGNORES.
HIDE is a list of names to hide,
defaults to [os.curdir, os.pardir].
@@ -116,7 +120,7 @@ class dircmp:
else:
self.hide = hide
if ignore is None:
- self.ignore = ['RCS', 'CVS', 'tags'] # Names ignored in comparison
+ self.ignore = DEFAULT_IGNORES
else:
self.ignore = ignore
@@ -147,12 +151,12 @@ class dircmp:
ok = 1
try:
a_stat = os.stat(a_path)
- except os.error as why:
+ except OSError as why:
# print('Can\'t stat', a_path, ':', why.args[1])
ok = 0
try:
b_stat = os.stat(b_path)
- except os.error as why:
+ except OSError as why:
# print('Can\'t stat', b_path, ':', why.args[1])
ok = 0
@@ -268,7 +272,7 @@ def cmpfiles(a, b, common, shallow=True):
def _cmp(a, b, sh, abs=abs, cmp=cmp):
try:
return not abs(cmp(a, b, sh))
- except os.error:
+ except OSError:
return 2
diff --git a/Lib/fileinput.py b/Lib/fileinput.py
index dbbbb21..f9bb88a 100644
--- a/Lib/fileinput.py
+++ b/Lib/fileinput.py
@@ -30,7 +30,7 @@ pertaining to the last line read; nextfile() has no effect.
All files are opened in text mode by default, you can override this by
setting the mode parameter to input() or FileInput.__init__().
-If an I/O error occurs during opening or reading a file, the IOError
+If an I/O error occurs during opening or reading a file, the OSError
exception is raised.
If sys.stdin is used more than once, the second and further use will
@@ -324,9 +324,11 @@ class FileInput:
if self._inplace:
self._backupfilename = (
self._filename + (self._backup or ".bak"))
- try: os.unlink(self._backupfilename)
- except os.error: pass
- # The next few lines may raise IOError
+ try:
+ os.unlink(self._backupfilename)
+ except OSError:
+ pass
+ # The next few lines may raise OSError
os.rename(self._filename, self._backupfilename)
self._file = open(self._backupfilename, self._mode)
try:
@@ -348,7 +350,7 @@ class FileInput:
self._savestdout = sys.stdout
sys.stdout = self._output
else:
- # This may raise IOError
+ # This may raise OSError
if self._openhook:
self._file = self._openhook(self._filename, self._mode)
else:
diff --git a/Lib/fractions.py b/Lib/fractions.py
index 8be52d2..79e83ff 100644
--- a/Lib/fractions.py
+++ b/Lib/fractions.py
@@ -182,8 +182,10 @@ class Fraction(numbers.Rational):
elif not isinstance(f, float):
raise TypeError("%s.from_float() only takes floats, not %r (%s)" %
(cls.__name__, f, type(f).__name__))
- if math.isnan(f) or math.isinf(f):
- raise TypeError("Cannot convert %r to %s." % (f, cls.__name__))
+ if math.isnan(f):
+ raise ValueError("Cannot convert %r to %s." % (f, cls.__name__))
+ if math.isinf(f):
+ raise OverflowError("Cannot convert %r to %s." % (f, cls.__name__))
return cls(*f.as_integer_ratio())
@classmethod
@@ -196,9 +198,11 @@ class Fraction(numbers.Rational):
raise TypeError(
"%s.from_decimal() only takes Decimals, not %r (%s)" %
(cls.__name__, dec, type(dec).__name__))
- if not dec.is_finite():
- # Catches infinities and nans.
- raise TypeError("Cannot convert %s to %s." % (dec, cls.__name__))
+ if dec.is_infinite():
+ raise OverflowError(
+ "Cannot convert %s to %s." % (dec, cls.__name__))
+ if dec.is_nan():
+ raise ValueError("Cannot convert %s to %s." % (dec, cls.__name__))
sign, digits, exp = dec.as_tuple()
digits = int(''.join(map(str, digits)))
if sign:
diff --git a/Lib/ftplib.py b/Lib/ftplib.py
index 741e647..c5554f8 100644
--- a/Lib/ftplib.py
+++ b/Lib/ftplib.py
@@ -39,9 +39,10 @@ python ftplib.py -d localhost -l -p -l
import os
import sys
import socket
+import warnings
from socket import _GLOBAL_DEFAULT_TIMEOUT
-__all__ = ["FTP","Netrc"]
+__all__ = ["FTP", "Netrc"]
# Magic number from <socket.h>
MSG_OOB = 0x1 # Process data out of band
@@ -61,7 +62,7 @@ class error_proto(Error): pass # response does not begin with [1-5]
# All exceptions (hopefully) that may be raised here and that aren't
# (always) programming errors on our side
-all_errors = (Error, IOError, EOFError)
+all_errors = (Error, OSError, EOFError)
# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
@@ -123,7 +124,7 @@ class FTP:
if self.sock is not None:
try:
self.quit()
- except (socket.error, EOFError):
+ except (OSError, EOFError):
pass
finally:
if self.sock is not None:
@@ -183,7 +184,8 @@ class FTP:
# Internal: send one line to the server, appending CRLF
def putline(self, line):
line = line + CRLF
- if self.debugging > 1: print('*put*', self.sanitize(line))
+ if self.debugging > 1:
+ print('*put*', self.sanitize(line))
self.sock.sendall(line.encode(self.encoding))
# Internal: send one command to the server (through putline())
@@ -197,9 +199,12 @@ class FTP:
line = self.file.readline()
if self.debugging > 1:
print('*get*', self.sanitize(line))
- if not line: raise EOFError
- if line[-2:] == CRLF: line = line[:-2]
- elif line[-1:] in CRLF: line = line[:-1]
+ if not line:
+ raise EOFError
+ if line[-2:] == CRLF:
+ line = line[:-2]
+ elif line[-1:] in CRLF:
+ line = line[:-1]
return line
# Internal: get a response from the server, which may possibly
@@ -222,7 +227,8 @@ class FTP:
# Raise various errors if the response indicates an error
def getresp(self):
resp = self.getmultiline()
- if self.debugging: print('*resp*', self.sanitize(resp))
+ if self.debugging:
+ print('*resp*', self.sanitize(resp))
self.lastresp = resp[:3]
c = resp[:1]
if c in {'1', '2', '3'}:
@@ -246,7 +252,8 @@ class FTP:
IP and Synch; that doesn't seem to work with the servers I've
tried. Instead, just send the ABOR command as OOB data.'''
line = b'ABOR' + B_CRLF
- if self.debugging > 1: print('*put urgent*', self.sanitize(line))
+ if self.debugging > 1:
+ print('*put urgent*', self.sanitize(line))
self.sock.sendall(line, MSG_OOB)
resp = self.getmultiline()
if resp[:3] not in {'426', '225', '226'}:
@@ -295,7 +302,7 @@ class FTP:
try:
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
- except socket.error as _:
+ except OSError as _:
err = _
if sock:
sock.close()
@@ -306,8 +313,8 @@ class FTP:
if err is not None:
raise err
else:
- raise socket.error("getaddrinfo returns an empty list")
- raise socket.error(msg)
+ raise OSError("getaddrinfo returns an empty list")
+ raise OSError(msg)
sock.listen(1)
port = sock.getsockname()[1] # Get proper port
host = self.sock.getsockname()[0] # Get proper host
@@ -387,9 +394,12 @@ class FTP:
def login(self, user = '', passwd = '', acct = ''):
'''Login, default anonymous.'''
- if not user: user = 'anonymous'
- if not passwd: passwd = ''
- if not acct: acct = ''
+ if not user:
+ user = 'anonymous'
+ if not passwd:
+ passwd = ''
+ if not acct:
+ acct = ''
if user == 'anonymous' and passwd in {'', '-'}:
# If there is no anonymous ftp password specified
# then we'll just use anonymous@
@@ -400,8 +410,10 @@ class FTP:
# host or country.
passwd = passwd + 'anonymous@'
resp = self.sendcmd('USER ' + user)
- if resp[0] == '3': resp = self.sendcmd('PASS ' + passwd)
- if resp[0] == '3': resp = self.sendcmd('ACCT ' + acct)
+ if resp[0] == '3':
+ resp = self.sendcmd('PASS ' + passwd)
+ if resp[0] == '3':
+ resp = self.sendcmd('ACCT ' + acct)
if resp[0] != '2':
raise error_reply(resp)
return resp
@@ -427,6 +439,9 @@ class FTP:
if not data:
break
callback(data)
+ # shutdown ssl layer
+ if _SSLSocket is not None and isinstance(conn, _SSLSocket):
+ conn.unwrap()
return self.voidresp()
def retrlines(self, cmd, callback = None):
@@ -441,13 +456,15 @@ class FTP:
Returns:
The response code.
"""
- if callback is None: callback = print_line
+ if callback is None:
+ callback = print_line
resp = self.sendcmd('TYPE A')
with self.transfercmd(cmd) as conn, \
conn.makefile('r', encoding=self.encoding) as fp:
while 1:
line = fp.readline()
- if self.debugging > 2: print('*retr*', repr(line))
+ if self.debugging > 2:
+ print('*retr*', repr(line))
if not line:
break
if line[-2:] == CRLF:
@@ -455,6 +472,9 @@ class FTP:
elif line[-1:] == '\n':
line = line[:-1]
callback(line)
+ # shutdown ssl layer
+ if _SSLSocket is not None and isinstance(conn, _SSLSocket):
+ conn.unwrap()
return self.voidresp()
def storbinary(self, cmd, fp, blocksize=8192, callback=None, rest=None):
@@ -476,9 +496,14 @@ class FTP:
with self.transfercmd(cmd, rest) as conn:
while 1:
buf = fp.read(blocksize)
- if not buf: break
+ if not buf:
+ break
conn.sendall(buf)
- if callback: callback(buf)
+ if callback:
+ callback(buf)
+ # shutdown ssl layer
+ if _SSLSocket is not None and isinstance(conn, _SSLSocket):
+ conn.unwrap()
return self.voidresp()
def storlines(self, cmd, fp, callback=None):
@@ -497,12 +522,17 @@ class FTP:
with self.transfercmd(cmd) as conn:
while 1:
buf = fp.readline()
- if not buf: break
+ if not buf:
+ break
if buf[-2:] != B_CRLF:
if buf[-1] in B_CRLF: buf = buf[:-1]
buf = buf + B_CRLF
conn.sendall(buf)
- if callback: callback(buf)
+ if callback:
+ callback(buf)
+ # shutdown ssl layer
+ if _SSLSocket is not None and isinstance(conn, _SSLSocket):
+ conn.unwrap()
return self.voidresp()
def acct(self, password):
@@ -637,8 +667,10 @@ class FTP:
try:
import ssl
except ImportError:
- pass
+ _SSLSocket = None
else:
+ _SSLSocket = ssl.SSLSocket
+
class FTP_TLS(FTP):
'''A FTP subclass which adds TLS support to FTP as described
in RFC-4217.
@@ -753,69 +785,6 @@ else:
ssl_version=self.ssl_version)
return conn, size
- def retrbinary(self, cmd, callback, blocksize=8192, rest=None):
- self.voidcmd('TYPE I')
- with self.transfercmd(cmd, rest) as conn:
- while 1:
- data = conn.recv(blocksize)
- if not data:
- break
- callback(data)
- # shutdown ssl layer
- if isinstance(conn, ssl.SSLSocket):
- conn.unwrap()
- return self.voidresp()
-
- def retrlines(self, cmd, callback = None):
- if callback is None: callback = print_line
- resp = self.sendcmd('TYPE A')
- conn = self.transfercmd(cmd)
- fp = conn.makefile('r', encoding=self.encoding)
- with fp, conn:
- while 1:
- line = fp.readline()
- if self.debugging > 2: print('*retr*', repr(line))
- if not line:
- break
- if line[-2:] == CRLF:
- line = line[:-2]
- elif line[-1:] == '\n':
- line = line[:-1]
- callback(line)
- # shutdown ssl layer
- if isinstance(conn, ssl.SSLSocket):
- conn.unwrap()
- return self.voidresp()
-
- def storbinary(self, cmd, fp, blocksize=8192, callback=None, rest=None):
- self.voidcmd('TYPE I')
- with self.transfercmd(cmd, rest) as conn:
- while 1:
- buf = fp.read(blocksize)
- if not buf: break
- conn.sendall(buf)
- if callback: callback(buf)
- # shutdown ssl layer
- if isinstance(conn, ssl.SSLSocket):
- conn.unwrap()
- return self.voidresp()
-
- def storlines(self, cmd, fp, callback=None):
- self.voidcmd('TYPE A')
- with self.transfercmd(cmd) as conn:
- while 1:
- buf = fp.readline()
- if not buf: break
- if buf[-2:] != B_CRLF:
- if buf[-1] in B_CRLF: buf = buf[:-1]
- buf = buf + B_CRLF
- conn.sendall(buf)
- if callback: callback(buf)
- # shutdown ssl layer
- if isinstance(conn, ssl.SSLSocket):
- conn.unwrap()
- return self.voidresp()
-
def abort(self):
# overridden as we can't pass MSG_OOB flag to sendall()
line = b'ABOR' + B_CRLF
@@ -826,7 +795,7 @@ else:
return resp
__all__.append('FTP_TLS')
- all_errors = (Error, IOError, EOFError, ssl.SSLError)
+ all_errors = (Error, OSError, EOFError, ssl.SSLError)
_150_re = None
@@ -923,7 +892,8 @@ def print_line(line):
def ftpcp(source, sourcename, target, targetname = '', type = 'I'):
'''Copy file from one FTP-instance to another.'''
- if not targetname: targetname = sourcename
+ if not targetname:
+ targetname = sourcename
type = 'TYPE ' + type
source.voidcmd(type)
target.voidcmd(type)
@@ -933,9 +903,11 @@ def ftpcp(source, sourcename, target, targetname = '', type = 'I'):
# transfer request.
# So: STOR before RETR, because here the target is a "user".
treply = target.sendcmd('STOR ' + targetname)
- if treply[:3] not in {'125', '150'}: raise error_proto # RFC 959
+ if treply[:3] not in {'125', '150'}:
+ raise error_proto # RFC 959
sreply = source.sendcmd('RETR ' + sourcename)
- if sreply[:3] not in {'125', '150'}: raise error_proto # RFC 959
+ if sreply[:3] not in {'125', '150'}:
+ raise error_proto # RFC 959
source.voidresp()
target.voidresp()
@@ -953,19 +925,22 @@ class Netrc:
__defacct = None
def __init__(self, filename=None):
+ warnings.warn("This class is deprecated, use the netrc module instead",
+ DeprecationWarning, 2)
if filename is None:
if "HOME" in os.environ:
filename = os.path.join(os.environ["HOME"],
".netrc")
else:
- raise IOError("specify file to load or set $HOME")
+ raise OSError("specify file to load or set $HOME")
self.__hosts = {}
self.__macros = {}
fp = open(filename, "r")
in_macro = 0
while 1:
line = fp.readline()
- if not line: break
+ if not line:
+ break
if in_macro and line.strip():
macro_lines.append(line)
continue
@@ -1074,7 +1049,7 @@ def test():
userid = passwd = acct = ''
try:
netrc = Netrc(rcfile)
- except IOError:
+ except OSError:
if rcfile is not None:
sys.stderr.write("Could not open account file"
" -- using anonymous login.")
diff --git a/Lib/functools.py b/Lib/functools.py
index 053e44e..fa11d2f 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -11,7 +11,10 @@
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
-from _functools import partial, reduce
+try:
+ from _functools import reduce
+except ImportError:
+ pass
from collections import namedtuple
try:
from _thread import RLock
@@ -140,6 +143,29 @@ except ImportError:
################################################################################
+### partial() argument application
+################################################################################
+
+def partial(func, *args, **keywords):
+ """new function with partial application of the given arguments
+ and keywords.
+ """
+ def newfunc(*fargs, **fkeywords):
+ newkeywords = keywords.copy()
+ newkeywords.update(fkeywords)
+ return func(*(args + fargs), **newkeywords)
+ newfunc.func = func
+ newfunc.args = args
+ newfunc.keywords = keywords
+ return newfunc
+
+try:
+ from _functools import partial
+except ImportError:
+ pass
+
+
+################################################################################
### LRU Cache function decorator
################################################################################
@@ -220,7 +246,6 @@ def lru_cache(maxsize=128, typed=False):
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
def decorating_function(user_function):
-
cache = {}
hits = misses = 0
full = False
diff --git a/Lib/genericpath.py b/Lib/genericpath.py
index 2174187..5292aa8 100644
--- a/Lib/genericpath.py
+++ b/Lib/genericpath.py
@@ -7,7 +7,8 @@ import os
import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
- 'getsize', 'isdir', 'isfile']
+ 'getsize', 'isdir', 'isfile', 'samefile', 'sameopenfile',
+ 'samestat']
# Does a path exist?
@@ -16,7 +17,7 @@ def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
os.stat(path)
- except os.error:
+ except OSError:
return False
return True
@@ -27,7 +28,7 @@ def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
- except os.error:
+ except OSError:
return False
return stat.S_ISREG(st.st_mode)
@@ -39,7 +40,7 @@ def isdir(s):
"""Return true if the pathname refers to an existing directory."""
try:
st = os.stat(s)
- except os.error:
+ except OSError:
return False
return stat.S_ISDIR(st.st_mode)
@@ -75,6 +76,31 @@ def commonprefix(m):
return s1[:i]
return s1
+# Are two stat buffers (obtained from stat, fstat or lstat)
+# describing the same file?
+def samestat(s1, s2):
+ """Test whether two stat buffers reference the same file"""
+ return (s1.st_ino == s2.st_ino and
+ s1.st_dev == s2.st_dev)
+
+
+# Are two filenames really pointing to the same file?
+def samefile(f1, f2):
+ """Test whether two pathnames reference the same actual file"""
+ s1 = os.stat(f1)
+ s2 = os.stat(f2)
+ return samestat(s1, s2)
+
+
+# Are two open files really referencing the same file?
+# (Not necessarily the same file descriptor!)
+def sameopenfile(fp1, fp2):
+ """Test whether two open file objects reference the same file"""
+ s1 = os.fstat(fp1)
+ s2 = os.fstat(fp2)
+ return samestat(s1, s2)
+
+
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
diff --git a/Lib/getpass.py b/Lib/getpass.py
index 0044742..53689e9 100644
--- a/Lib/getpass.py
+++ b/Lib/getpass.py
@@ -47,7 +47,7 @@ def unix_getpass(prompt='Password: ', stream=None):
input = tty
if not stream:
stream = tty
- except EnvironmentError as e:
+ except OSError as e:
# If that fails, see if stdin can be controlled.
try:
fd = sys.stdin.fileno()
diff --git a/Lib/gettext.py b/Lib/gettext.py
index e43f044..05d9c1e 100644
--- a/Lib/gettext.py
+++ b/Lib/gettext.py
@@ -244,7 +244,7 @@ class GNUTranslations(NullTranslations):
version, msgcount, masteridx, transidx = unpack('>4I', buf[4:20])
ii = '>II'
else:
- raise IOError(0, 'Bad magic number', filename)
+ raise OSError(0, 'Bad magic number', filename)
# Now put all messages from the .mo file buffer into the catalog
# dictionary.
for i in range(0, msgcount):
@@ -256,7 +256,7 @@ class GNUTranslations(NullTranslations):
msg = buf[moff:mend]
tmsg = buf[toff:tend]
else:
- raise IOError(0, 'File is corrupt', filename)
+ raise OSError(0, 'File is corrupt', filename)
# See if we're looking at GNU .mo conventions for metadata
if mlen == 0:
# Catalog description
@@ -398,7 +398,7 @@ def translation(domain, localedir=None, languages=None,
if not mofiles:
if fallback:
return NullTranslations()
- raise IOError(ENOENT, 'No translation file found for domain', domain)
+ raise OSError(ENOENT, 'No translation file found for domain', domain)
# Avoid opening, reading, and parsing the .mo file after it's been done
# once.
result = None
@@ -460,7 +460,7 @@ def dgettext(domain, message):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
- except IOError:
+ except OSError:
return message
return t.gettext(message)
@@ -468,7 +468,7 @@ def ldgettext(domain, message):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
- except IOError:
+ except OSError:
return message
return t.lgettext(message)
@@ -476,7 +476,7 @@ def dngettext(domain, msgid1, msgid2, n):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
- except IOError:
+ except OSError:
if n == 1:
return msgid1
else:
@@ -487,7 +487,7 @@ def ldngettext(domain, msgid1, msgid2, n):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
- except IOError:
+ except OSError:
if n == 1:
return msgid1
else:
diff --git a/Lib/glob.py b/Lib/glob.py
index 1f60265..1a268a3 100644
--- a/Lib/glob.py
+++ b/Lib/glob.py
@@ -32,8 +32,7 @@ def iglob(pathname):
return
dirname, basename = os.path.split(pathname)
if not dirname:
- for name in glob1(None, basename):
- yield name
+ yield from glob1(None, basename)
return
# `os.path.split()` returns the argument itself as a dirname if it is a
# drive or UNC path. Prevent an infinite recursion if a drive or UNC path
@@ -62,7 +61,7 @@ def glob1(dirname, pattern):
dirname = os.curdir
try:
names = os.listdir(dirname)
- except os.error:
+ except OSError:
return []
if not _ishidden(pattern):
names = [x for x in names if not _ishidden(x)]
diff --git a/Lib/gzip.py b/Lib/gzip.py
index d7da02c..d5b5743 100644
--- a/Lib/gzip.py
+++ b/Lib/gzip.py
@@ -294,11 +294,11 @@ class GzipFile(io.BufferedIOBase):
return False
if magic != b'\037\213':
- raise IOError('Not a gzipped file')
+ raise OSError('Not a gzipped file')
method, flag, self.mtime = struct.unpack("<BBIxx", self._read_exact(8))
if method != 8:
- raise IOError('Unknown compression method')
+ raise OSError('Unknown compression method')
if flag & FEXTRA:
# Read & discard the extra field, if present
@@ -328,7 +328,7 @@ class GzipFile(io.BufferedIOBase):
self._check_closed()
if self.mode != WRITE:
import errno
- raise IOError(errno.EBADF, "write() on read-only GzipFile object")
+ raise OSError(errno.EBADF, "write() on read-only GzipFile object")
if self.fileobj is None:
raise ValueError("write() on closed GzipFile object")
@@ -349,7 +349,7 @@ class GzipFile(io.BufferedIOBase):
self._check_closed()
if self.mode != READ:
import errno
- raise IOError(errno.EBADF, "read() on write-only GzipFile object")
+ raise OSError(errno.EBADF, "read() on write-only GzipFile object")
if self.extrasize <= 0 and self.fileobj is None:
return b''
@@ -378,7 +378,7 @@ class GzipFile(io.BufferedIOBase):
self._check_closed()
if self.mode != READ:
import errno
- raise IOError(errno.EBADF, "read1() on write-only GzipFile object")
+ raise OSError(errno.EBADF, "read1() on write-only GzipFile object")
if self.extrasize <= 0 and self.fileobj is None:
return b''
@@ -399,7 +399,7 @@ class GzipFile(io.BufferedIOBase):
def peek(self, n):
if self.mode != READ:
import errno
- raise IOError(errno.EBADF, "peek() on write-only GzipFile object")
+ raise OSError(errno.EBADF, "peek() on write-only GzipFile object")
# Do not return ridiculously small buffers, for one common idiom
# is to call peek(1) and expect more bytes in return.
@@ -480,10 +480,10 @@ class GzipFile(io.BufferedIOBase):
# stored is the true file size mod 2**32.
crc32, isize = struct.unpack("<II", self._read_exact(8))
if crc32 != self.crc:
- raise IOError("CRC check failed %s != %s" % (hex(crc32),
+ raise OSError("CRC check failed %s != %s" % (hex(crc32),
hex(self.crc)))
elif isize != (self.size & 0xffffffff):
- raise IOError("Incorrect length of data produced")
+ raise OSError("Incorrect length of data produced")
# Gzip files can be padded with zeroes and still have archives.
# Consume all zero bytes and set the file position to the first
@@ -532,7 +532,7 @@ class GzipFile(io.BufferedIOBase):
'''Return the uncompressed stream file position indicator to the
beginning of the file'''
if self.mode != READ:
- raise IOError("Can't rewind in write mode")
+ raise OSError("Can't rewind in write mode")
self.fileobj.seek(0)
self._new_member = True
self.extrabuf = b""
@@ -557,7 +557,7 @@ class GzipFile(io.BufferedIOBase):
raise ValueError('Seek from end not supported')
if self.mode == WRITE:
if offset < self.offset:
- raise IOError('Negative seek in write mode')
+ raise OSError('Negative seek in write mode')
count = offset - self.offset
chunk = bytes(1024)
for i in range(count // 1024):
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
index 21454c7..a1bd8b2 100644
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -54,7 +54,8 @@ More condensed:
# This tuple and __get_builtin_constructor() must be modified if a new
# always available algorithm is added.
-__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
+__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
+ 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512')
algorithms_guaranteed = set(__always_supported)
algorithms_available = set(__always_supported)
@@ -85,6 +86,18 @@ def __get_builtin_constructor(name):
return _sha512.sha512
elif bs == '384':
return _sha512.sha384
+ elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
+ 'SHA3_224', 'SHA3_256', 'SHA3_384', 'SHA3_512'}:
+ import _sha3
+ bs = name[5:]
+ if bs == '224':
+ return _sha3.sha3_224
+ elif bs == '256':
+ return _sha3.sha3_256
+ elif bs == '384':
+ return _sha3.sha3_384
+ elif bs == '512':
+ return _sha3.sha3_512
except ImportError:
pass # no extension module, this hash is unsupported.
diff --git a/Lib/http/client.py b/Lib/http/client.py
index 4663d43..08311d7 100644
--- a/Lib/http/client.py
+++ b/Lib/http/client.py
@@ -267,8 +267,6 @@ def parse_headers(fp, _class=HTTPMessage):
return email.parser.Parser(_class=_class).parsestr(hstring)
-_strict_sentinel = object()
-
class HTTPResponse(io.RawIOBase):
# See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details.
@@ -278,7 +276,7 @@ class HTTPResponse(io.RawIOBase):
# text following RFC 2047. The basic status line parsing only
# accepts iso-8859-1.
- def __init__(self, sock, debuglevel=0, strict=_strict_sentinel, method=None, url=None):
+ def __init__(self, sock, debuglevel=0, method=None, url=None):
# If the response includes a content-length header, we need to
# make sure that the client doesn't read more than the
# specified number of bytes. If it does, it will block until
@@ -288,10 +286,6 @@ class HTTPResponse(io.RawIOBase):
# clients unless they know what they are doing.
self.fp = sock.makefile("rb")
self.debuglevel = debuglevel
- if strict is not _strict_sentinel:
- warnings.warn("the 'strict' argument isn't supported anymore; "
- "http.client now always assumes HTTP/1.x compliant servers.",
- DeprecationWarning, 2)
self._method = method
# The HTTPResponse object is returned via urllib. The clients
@@ -728,13 +722,17 @@ class HTTPConnection:
default_port = HTTP_PORT
auto_open = 1
debuglevel = 0
-
- def __init__(self, host, port=None, strict=_strict_sentinel,
- timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
- if strict is not _strict_sentinel:
- warnings.warn("the 'strict' argument isn't supported anymore; "
- "http.client now always assumes HTTP/1.x compliant servers.",
- DeprecationWarning, 2)
+ # TCP Maximum Segment Size (MSS) is determined by the TCP stack on
+ # a per-connection basis. There is no simple and efficient
+ # platform independent mechanism for determining the MSS, so
+ # instead a reasonable estimate is chosen. The getsockopt()
+ # interface using the TCP_MAXSEG parameter may be a suitable
+ # approach on some operating systems. A value of 16KiB is chosen
+ # as a reasonable estimate of the maximum MSS.
+ mss = 16384
+
+ def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None):
self.timeout = timeout
self.source_address = source_address
self.sock = None
@@ -800,8 +798,8 @@ class HTTPConnection:
if code != 200:
self.close()
- raise socket.error("Tunnel connection failed: %d %s" % (code,
- message.strip()))
+ raise OSError("Tunnel connection failed: %d %s" % (code,
+ message.strip()))
while True:
line = response.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
@@ -895,8 +893,11 @@ class HTTPConnection:
del self._buffer[:]
# If msg and message_body are sent in a single send() call,
# it will avoid performance problems caused by the interaction
- # between delayed ack and the Nagle algorithm.
- if isinstance(message_body, bytes):
+ # between delayed ack and the Nagle algorithm. However,
+ # there is no performance gain if the message is larger
+ # than MSS (and there is a memory penalty for the message
+ # copy).
+ if isinstance(message_body, bytes) and len(message_body) < self.mss:
msg += message_body
message_body = None
self.send(msg)
@@ -1166,9 +1167,10 @@ else:
# XXX Should key_file and cert_file be deprecated in favour of context?
def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=_strict_sentinel, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
- source_address=None, *, context=None, check_hostname=None):
- super(HTTPSConnection, self).__init__(host, port, strict, timeout,
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None, *, context=None,
+ check_hostname=None):
+ super(HTTPSConnection, self).__init__(host, port, timeout,
source_address)
self.key_file = key_file
self.cert_file = cert_file
diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py
index 901e762..95f2f44 100644
--- a/Lib/http/cookiejar.py
+++ b/Lib/http/cookiejar.py
@@ -1193,8 +1193,7 @@ def deepvalues(mapping):
pass
else:
mapping = True
- for subobj in deepvalues(obj):
- yield subobj
+ yield from deepvalues(obj)
if not mapping:
yield obj
@@ -1731,8 +1730,8 @@ class CookieJar:
return "<%s[%s]>" % (self.__class__, ", ".join(r))
-# derives from IOError for backwards-compatibility with Python 2.4.0
-class LoadError(IOError): pass
+# derives from OSError for backwards-compatibility with Python 2.4.0
+class LoadError(OSError): pass
class FileCookieJar(CookieJar):
"""CookieJar that can be loaded from and saved to a file."""
@@ -1762,17 +1761,14 @@ class FileCookieJar(CookieJar):
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
- f = open(filename)
- try:
+ with open(filename) as f:
self._really_load(f, filename, ignore_discard, ignore_expires)
- finally:
- f.close()
def revert(self, filename=None,
ignore_discard=False, ignore_expires=False):
"""Clear all cookies and reload cookies from a saved file.
- Raises LoadError (or IOError) if reversion is not successful; the
+ Raises LoadError (or OSError) if reversion is not successful; the
object's state will not be altered if this happens.
"""
@@ -1787,7 +1783,7 @@ class FileCookieJar(CookieJar):
self._cookies = {}
try:
self.load(filename, ignore_discard, ignore_expires)
- except (LoadError, IOError):
+ except OSError:
self._cookies = old_state
raise
@@ -1857,15 +1853,12 @@ class LWPCookieJar(FileCookieJar):
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
- f = open(filename, "w")
- try:
+ with open(filename, "w") as f:
# There really isn't an LWP Cookies 2.0 format, but this indicates
# that there is extra information in here (domain_dot and
# port_spec) while still being compatible with libwww-perl, I hope.
f.write("#LWP-Cookies-2.0\n")
f.write(self.as_lwp_str(ignore_discard, ignore_expires))
- finally:
- f.close()
def _really_load(self, f, filename, ignore_discard, ignore_expires):
magic = f.readline()
@@ -1938,8 +1931,7 @@ class LWPCookieJar(FileCookieJar):
if not ignore_expires and c.is_expired(now):
continue
self.set_cookie(c)
-
- except IOError:
+ except OSError:
raise
except Exception:
_warn_unhandled_exception()
@@ -2045,7 +2037,7 @@ class MozillaCookieJar(FileCookieJar):
continue
self.set_cookie(c)
- except IOError:
+ except OSError:
raise
except Exception:
_warn_unhandled_exception()
@@ -2057,8 +2049,7 @@ class MozillaCookieJar(FileCookieJar):
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
- f = open(filename, "w")
- try:
+ with open(filename, "w") as f:
f.write(self.header)
now = time.time()
for cookie in self:
@@ -2087,5 +2078,3 @@ class MozillaCookieJar(FileCookieJar):
"\t".join([cookie.domain, initial_dot, cookie.path,
secure, expires, name, value])+
"\n")
- finally:
- f.close()
diff --git a/Lib/http/server.py b/Lib/http/server.py
index c4ac703..e47e034 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -401,12 +401,17 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
while not self.close_connection:
self.handle_one_request()
- def send_error(self, code, message=None):
+ def send_error(self, code, message=None, explain=None):
"""Send and log an error reply.
- Arguments are the error code, and a detailed message.
- The detailed message defaults to the short entry matching the
- response code.
+ Arguments are
+ * code: an HTTP error code
+ 3 digits
+ * message: a simple optional 1 line reason phrase.
+ *( HTAB / SP / VCHAR / %x80-FF )
+ defaults to short entry matching the response code
+ * explain: a detailed message defaults to the long entry
+ matching the response code.
This sends an error response (so it must be called before any
output has been generated), logs the error, and finally sends
@@ -420,17 +425,20 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
shortmsg, longmsg = '???', '???'
if message is None:
message = shortmsg
- explain = longmsg
+ if explain is None:
+ explain = longmsg
self.log_error("code %d, message %s", code, message)
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
content = (self.error_message_format %
- {'code': code, 'message': _quote_html(message), 'explain': explain})
+ {'code': code, 'message': _quote_html(message), 'explain': _quote_html(explain)})
+ body = content.encode('UTF-8', 'replace')
self.send_response(code, message)
self.send_header("Content-Type", self.error_content_type)
self.send_header('Connection', 'close')
+ self.send_header('Content-Length', int(len(body)))
self.end_headers()
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
- self.wfile.write(content.encode('UTF-8', 'replace'))
+ self.wfile.write(body)
def send_response(self, code, message=None):
"""Add the response header to the headers buffer and log the
@@ -709,7 +717,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
ctype = self.guess_type(path)
try:
f = open(path, 'rb')
- except IOError:
+ except OSError:
self.send_error(404, "File not found")
return None
self.send_response(200)
@@ -730,7 +738,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
"""
try:
list = os.listdir(path)
- except os.error:
+ except OSError:
self.send_error(404, "No permission to list directory")
return None
list.sort(key=lambda a: a.lower())
@@ -1121,7 +1129,7 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
try:
try:
os.setuid(nobody)
- except os.error:
+ except OSError:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
diff --git a/Lib/idlelib/EditorWindow.py b/Lib/idlelib/EditorWindow.py
index 3397415..565cf36 100644
--- a/Lib/idlelib/EditorWindow.py
+++ b/Lib/idlelib/EditorWindow.py
@@ -542,7 +542,7 @@ class EditorWindow(object):
if sys.platform[:3] == 'win':
try:
os.startfile(self.help_url)
- except WindowsError as why:
+ except OSError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
@@ -852,7 +852,7 @@ class EditorWindow(object):
if sys.platform[:3] == 'win':
try:
os.startfile(helpfile)
- except WindowsError as why:
+ except OSError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
@@ -886,7 +886,7 @@ class EditorWindow(object):
with open(self.recent_files_path, 'w',
encoding='utf_8', errors='replace') as rf_file:
rf_file.writelines(rf_list)
- except IOError as err:
+ except OSError as err:
if not getattr(self.root, "recentfilelist_error_displayed", False):
self.root.recentfilelist_error_displayed = True
tkMessageBox.showerror(title='IDLE Error',
diff --git a/Lib/idlelib/FileList.py b/Lib/idlelib/FileList.py
index 37a337e..a9989a8 100644
--- a/Lib/idlelib/FileList.py
+++ b/Lib/idlelib/FileList.py
@@ -103,7 +103,7 @@ class FileList:
if not os.path.isabs(filename):
try:
pwd = os.getcwd()
- except os.error:
+ except OSError:
pass
else:
filename = os.path.join(pwd, filename)
diff --git a/Lib/idlelib/GrepDialog.py b/Lib/idlelib/GrepDialog.py
index 27fcc33..1d8d51b 100644
--- a/Lib/idlelib/GrepDialog.py
+++ b/Lib/idlelib/GrepDialog.py
@@ -82,7 +82,7 @@ class GrepDialog(SearchDialogBase):
for fn in list:
try:
f = open(fn, errors='replace')
- except IOError as msg:
+ except OSError as msg:
print(msg)
continue
lineno = 0
@@ -110,7 +110,7 @@ class GrepDialog(SearchDialogBase):
def findfiles(self, dir, base, rec):
try:
names = os.listdir(dir or os.curdir)
- except os.error as msg:
+ except OSError as msg:
print(msg)
return []
list = []
diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py
index 9fe0701..dda3634 100644
--- a/Lib/idlelib/IOBinding.py
+++ b/Lib/idlelib/IOBinding.py
@@ -213,7 +213,7 @@ class IOBinding:
f.seek(0)
bytes = f.read()
f.close()
- except IOError as msg:
+ except OSError as msg:
tkMessageBox.showerror("I/O Error", str(msg), master=self.text)
return False
chars, converted = self._decode(two_lines, bytes)
@@ -378,7 +378,7 @@ class IOBinding:
f.flush()
f.close()
return True
- except IOError as msg:
+ except OSError as msg:
tkMessageBox.showerror("I/O Error", str(msg),
master=self.text)
return False
@@ -504,7 +504,7 @@ class IOBinding:
else:
try:
pwd = os.getcwd()
- except os.error:
+ except OSError:
pwd = ""
return pwd, ""
diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt
index f6e8917d..21752d7 100644
--- a/Lib/idlelib/NEWS.txt
+++ b/Lib/idlelib/NEWS.txt
@@ -1,6 +1,8 @@
-What's New in IDLE 3.3.1?
+What's New in IDLE 3.4.0?
=========================
+- Issue #5066: Update IDLE docs. Patch by Todd Rovito.
+
- Issue #16226: Fix IDLE Path Browser crash.
(Patch by Roger Serwy)
diff --git a/Lib/idlelib/OutputWindow.py b/Lib/idlelib/OutputWindow.py
index 745ccd2..9dacc49 100644
--- a/Lib/idlelib/OutputWindow.py
+++ b/Lib/idlelib/OutputWindow.py
@@ -106,7 +106,7 @@ class OutputWindow(EditorWindow):
f = open(filename, "r")
f.close()
break
- except IOError:
+ except OSError:
continue
else:
return None
diff --git a/Lib/idlelib/PathBrowser.py b/Lib/idlelib/PathBrowser.py
index 55bf1aa..ab05c67 100644
--- a/Lib/idlelib/PathBrowser.py
+++ b/Lib/idlelib/PathBrowser.py
@@ -45,7 +45,7 @@ class DirBrowserTreeItem(TreeItem):
def GetSubList(self):
try:
names = os.listdir(self.dir or os.curdir)
- except os.error:
+ except OSError:
return []
packages = []
for name in names:
diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py
index 38ed3af..9ccb31c 100644
--- a/Lib/idlelib/PyShell.py
+++ b/Lib/idlelib/PyShell.py
@@ -58,7 +58,7 @@ else:
try:
file.write(warnings.formatwarning(message, category, filename,
lineno, line=line))
- except IOError:
+ except OSError:
pass ## file (probably __stderr__) is invalid, warning dropped.
warnings.showwarning = idle_showwarning
def idle_formatwarning(message, category, filename, lineno, line=None):
@@ -211,7 +211,7 @@ class PyShellEditorWindow(EditorWindow):
try:
with open(self.breakpointPath, "r") as fp:
lines = fp.readlines()
- except IOError:
+ except OSError:
lines = []
try:
with open(self.breakpointPath, "w") as new_file:
@@ -222,7 +222,7 @@ class PyShellEditorWindow(EditorWindow):
breaks = self.breakpoints
if breaks:
new_file.write(filename + '=' + str(breaks) + '\n')
- except IOError as err:
+ except OSError as err:
if not getattr(self.root, "breakpoint_error_displayed", False):
self.root.breakpoint_error_displayed = True
tkMessageBox.showerror(title='IDLE Error',
@@ -393,7 +393,7 @@ class ModifiedInterpreter(InteractiveInterpreter):
try:
self.rpcclt = MyRPCClient(addr)
break
- except socket.error as err:
+ except OSError as err:
pass
else:
self.display_port_binding_error()
@@ -526,7 +526,7 @@ class ModifiedInterpreter(InteractiveInterpreter):
return
try:
response = clt.pollresponse(self.active_seq, wait=0.05)
- except (EOFError, IOError, KeyboardInterrupt):
+ except (EOFError, OSError, KeyboardInterrupt):
# lost connection or subprocess terminated itself, restart
# [the KBI is from rpc.SocketIO.handle_EOF()]
if self.tkconsole.closing:
@@ -1012,7 +1012,10 @@ class PyShell(OutputWindow):
self.close()
return False
else:
- nosub = "==== No Subprocess ===="
+ nosub = ("==== No Subprocess ====\n\n" +
+ "WARNING: Running IDLE without a Subprocess is deprecated\n" +
+ "and will be removed in a later version. See Help/IDLE Help\n" +
+ "for details.\n\n")
sys.displayhook = rpc.displayhook
self.write("Python %s on %s\n%s\n%s" %
@@ -1358,7 +1361,8 @@ USAGE: idle [-deins] [-t title] [file]*
idle [-dns] [-t title] - [arg]*
-h print this help message and exit
- -n run IDLE without a subprocess (see Help/IDLE Help for details)
+ -n run IDLE without a subprocess (DEPRECATED,
+ see Help/IDLE Help for details)
The following options will override the IDLE 'settings' configuration:
@@ -1436,6 +1440,8 @@ def main():
if o == '-i':
enable_shell = True
if o == '-n':
+ print(" Warning: running IDLE without a subprocess is deprecated.",
+ file=sys.stderr)
use_subprocess = False
if o == '-r':
script = a
diff --git a/Lib/idlelib/TreeWidget.py b/Lib/idlelib/TreeWidget.py
index d4e524b..833896c 100644
--- a/Lib/idlelib/TreeWidget.py
+++ b/Lib/idlelib/TreeWidget.py
@@ -382,7 +382,7 @@ class FileTreeItem(TreeItem):
try:
os.rename(self.path, newpath)
self.path = newpath
- except os.error:
+ except OSError:
pass
def GetIconName(self):
@@ -395,7 +395,7 @@ class FileTreeItem(TreeItem):
def GetSubList(self):
try:
names = os.listdir(self.path)
- except os.error:
+ except OSError:
return []
names.sort(key = os.path.normcase)
sublist = []
diff --git a/Lib/idlelib/configHandler.py b/Lib/idlelib/configHandler.py
index 5bf2668..ea2010e 100644
--- a/Lib/idlelib/configHandler.py
+++ b/Lib/idlelib/configHandler.py
@@ -142,7 +142,7 @@ class IdleUserConfParser(IdleConfParser):
fname = self.file
try:
cfgFile = open(fname, 'w')
- except IOError:
+ except OSError:
os.unlink(fname)
cfgFile = open(fname, 'w')
with cfgFile:
@@ -207,7 +207,7 @@ class IdleConf:
userDir+',\n but the path does not exist.\n')
try:
sys.stderr.write(warn)
- except IOError:
+ except OSError:
pass
userDir = '~'
if userDir == "~": # still no path to home!
@@ -217,7 +217,7 @@ class IdleConf:
if not os.path.exists(userDir):
try:
os.mkdir(userDir)
- except (OSError, IOError):
+ except OSError:
warn = ('\n Warning: unable to create user config directory\n'+
userDir+'\n Check path and permissions.\n Exiting!\n\n')
sys.stderr.write(warn)
@@ -251,7 +251,7 @@ class IdleConf:
raw=raw)))
try:
sys.stderr.write(warning)
- except IOError:
+ except OSError:
pass
try:
if self.defaultCfg[configType].has_option(section,option):
@@ -268,7 +268,7 @@ class IdleConf:
(option, section, default))
try:
sys.stderr.write(warning)
- except IOError:
+ except OSError:
pass
return default
@@ -380,7 +380,7 @@ class IdleConf:
(element, themeName, theme[element]))
try:
sys.stderr.write(warning)
- except IOError:
+ except OSError:
pass
colour=cfgParser.Get(themeName,element,default=theme[element])
theme[element]=colour
@@ -637,7 +637,7 @@ class IdleConf:
(event, keySetName, keyBindings[event]))
try:
sys.stderr.write(warning)
- except IOError:
+ except OSError:
pass
return keyBindings
diff --git a/Lib/idlelib/help.txt b/Lib/idlelib/help.txt
index 815ee40..c4e1231 100644
--- a/Lib/idlelib/help.txt
+++ b/Lib/idlelib/help.txt
@@ -1,142 +1,185 @@
[See the end of this file for ** TIPS ** on using IDLE !!]
-Click on the dotted line at the top of a menu to "tear it off": a
-separate window containing the menu is created.
-
-File Menu:
-
- New Window -- Create a new editing window
- Open... -- Open an existing file
- Recent Files... -- Open a list of recent files
- Open Module... -- Open an existing module (searches sys.path)
- Class Browser -- Show classes and methods in current file
- Path Browser -- Show sys.path directories, modules, classes
+IDLE is the Python IDE built with the tkinter GUI toolkit.
+
+IDLE has the following features:
+-coded in 100% pure Python, using the tkinter GUI toolkit
+-cross-platform: works on Windows, Unix, and OS X
+-multi-window text editor with multiple undo, Python colorizing, smart indent,
+call tips, and many other features
+-Python shell window (a.k.a interactive interpreter)
+-debugger (not complete, but you can set breakpoints, view and step)
+
+Menus:
+
+IDLE has two window types the Shell window and the Editor window. It is
+possible to have multiple editor windows simultaneously. IDLE's
+menus dynamically change based on which window is currently selected. Each menu
+documented below indicates which window type it is associated with. Click on
+the dotted line at the top of a menu to "tear it off": a separate window
+containing the menu is created (for Unix and Windows only).
+
+File Menu (Shell and Editor):
+
+ New Window -- Create a new editing window
+ Open... -- Open an existing file
+ Open Module... -- Open an existing module (searches sys.path)
+ Recent Files... -- Open a list of recent files
+ Class Browser -- Show classes and methods in current file
+ Path Browser -- Show sys.path directories, modules, classes,
and methods
- ---
- Save -- Save current window to the associated file (unsaved
- windows have a * before and after the window title)
-
- Save As... -- Save current window to new file, which becomes
- the associated file
- Save Copy As... -- Save current window to different file
- without changing the associated file
- ---
- Print Window -- Print the current window
- ---
- Close -- Close current window (asks to save if unsaved)
- Exit -- Close all windows, quit (asks to save if unsaved)
-
-Edit Menu:
-
- Undo -- Undo last change to current window
- (A maximum of 1000 changes may be undone)
- Redo -- Redo last undone change to current window
- ---
- Cut -- Copy a selection into system-wide clipboard,
+ ---
+ Save -- Save current window to the associated file (unsaved
+ windows have a * before and after the window title)
+
+ Save As... -- Save current window to new file, which becomes
+ the associated file
+ Save Copy As... -- Save current window to different file
+ without changing the associated file
+ ---
+ Print Window -- Print the current window
+ ---
+ Close -- Close current window (asks to save if unsaved)
+ Exit -- Close all windows, quit (asks to save if unsaved)
+
+Edit Menu (Shell and Editor):
+
+ Undo -- Undo last change to current window
+ (a maximum of 1000 changes may be undone)
+ Redo -- Redo last undone change to current window
+ ---
+ Cut -- Copy a selection into system-wide clipboard,
then delete the selection
- Copy -- Copy selection into system-wide clipboard
- Paste -- Insert system-wide clipboard into window
- Select All -- Select the entire contents of the edit buffer
- ---
- Find... -- Open a search dialog box with many options
- Find Again -- Repeat last search
- Find Selection -- Search for the string in the selection
- Find in Files... -- Open a search dialog box for searching files
- Replace... -- Open a search-and-replace dialog box
- Go to Line -- Ask for a line number and show that line
- Show Calltip -- Open a small window with function param hints
- Show Completions -- Open a scroll window allowing selection keywords
- and attributes. (see '*TIPS*', below)
- Show Parens -- Highlight the surrounding parenthesis
- Expand Word -- Expand the word you have typed to match another
- word in the same buffer; repeat to get a
+ Copy -- Copy selection into system-wide clipboard
+ Paste -- Insert system-wide clipboard into window
+ Select All -- Select the entire contents of the edit buffer
+ ---
+ Find... -- Open a search dialog box with many options
+ Find Again -- Repeat last search
+ Find Selection -- Search for the string in the selection
+ Find in Files... -- Open a search dialog box for searching files
+ Replace... -- Open a search-and-replace dialog box
+ Go to Line -- Ask for a line number and show that line
+ Expand Word -- Expand the word you have typed to match another
+ word in the same buffer; repeat to get a
different expansion
-
-Format Menu (only in Edit window):
-
- Indent Region -- Shift selected lines right 4 spaces
- Dedent Region -- Shift selected lines left 4 spaces
- Comment Out Region -- Insert ## in front of selected lines
- Uncomment Region -- Remove leading # or ## from selected lines
- Tabify Region -- Turns *leading* stretches of spaces into tabs
- (Note: We recommend using 4 space blocks to indent Python code.)
- Untabify Region -- Turn *all* tabs into the right number of spaces
- New Indent Width... -- Open dialog to change indent width
- Format Paragraph -- Reformat the current blank-line-separated
- paragraph
-
-Run Menu (only in Edit window):
-
- Python Shell -- Open or wake up the Python shell window
- ---
- Check Module -- Run a syntax check on the module
- Run Module -- Execute the current file in the __main__ namespace
-
-Shell Menu (only in Shell window):
-
- View Last Restart -- Scroll the shell window to the last restart
- Restart Shell -- Restart the interpreter with a fresh environment
-
-Debug Menu (only in Shell window):
-
- Go to File/Line -- look around the insert point for a filename
- and line number, open the file, and show the line
- Debugger (toggle) -- Run commands in the shell under the debugger
- Stack Viewer -- Show the stack traceback of the last exception
- Auto-open Stack Viewer (toggle) -- Open stack viewer on traceback
-
-Options Menu:
-
- Configure IDLE -- Open a configuration dialog. Fonts, indentation,
+ Show Calltip -- After an unclosed parenthesis for a function, open
+ a small window with function parameter hints
+ Show Parens -- Highlight the surrounding parenthesis
+ Show Completions -- Open a scroll window allowing selection keywords
+ and attributes. (see '*TIPS*', below)
+
+Format Menu (Editor window only):
+
+ Indent Region -- Shift selected lines right by the indent width
+ (default 4 spaces)
+ Dedent Region -- Shift selected lines left by the indent width
+ (default 4 spaces)
+ Comment Out Region -- Insert ## in front of selected lines
+ Uncomment Region -- Remove leading # or ## from selected lines
+ Tabify Region -- Turns *leading* stretches of spaces into tabs.
+ (Note: We recommend using 4 space blocks to indent Python code.)
+ Untabify Region -- Turn *all* tabs into the corrent number of spaces
+ Toggle tabs -- Open a dialog to switch between indenting with
+ spaces and tabs.
+ New Indent Width... -- Open a dialog to change indent width. The
+ accepted default by the Python community is 4
+ spaces.
+ Format Paragraph -- Reformat the current blank-line-separated
+ paragraph. All lines in the paragraph will be
+ formatted to less than 80 columns.
+ ---
+ Strip trailing whitespace -- Removed any space characters after the end
+ of the last non-space character
+
+Run Menu (Editor window only):
+
+ Python Shell -- Open or wake up the Python shell window
+ ---
+ Check Module -- Check the syntax of the module currently open in the
+ Editor window. If the module has not been saved IDLE
+ will prompt the user to save the code.
+ Run Module -- Restart the shell to clean the environment, then
+ execute the currently open module. If the module has
+ not been saved IDLE will prompt the user to save the
+ code.
+
+Shell Menu (Shell window only):
+
+ View Last Restart -- Scroll the shell window to the last Shell restart
+ Restart Shell -- Restart the shell to clean the environment
+
+Debug Menu (Shell window only):
+
+ Go to File/Line -- Look around the insert point for a filename
+ and line number, open the file, and show the line.
+ Useful to view the source lines referenced in an
+ exception traceback. Available in the context
+ menu of the Shell window.
+ Debugger (toggle) -- This feature is not complete and considered
+ experimental. Run commands in the shell under the
+ debugger.
+ Stack Viewer -- Show the stack traceback of the last exception
+ Auto-open Stack Viewer (toggle) -- Toggle automatically opening the
+ stack viewer on unhandled
+ exception
+
+Options Menu (Shell and Editor):
+
+ Configure IDLE -- Open a configuration dialog. Fonts, indentation,
keybindings, and color themes may be altered.
- Startup Preferences may be set, and Additional Help
- Sources can be specified.
-
- On OS X this menu is not present, use
- menu 'IDLE -> Preferences...' instead.
- ---
- Code Context -- Open a pane at the top of the edit window which
- shows the block context of the section of code
- which is scrolling off the top or the window.
- (Not present in Shell window.)
-
-Windows Menu:
-
- Zoom Height -- toggles the window between configured size
- and maximum height.
- ---
- The rest of this menu lists the names of all open windows;
- select one to bring it to the foreground (deiconifying it if
- necessary).
+ Startup Preferences may be set, and additional Help
+ sources can be specified.
+
+ ---
+ Code Context (toggle) -- Open a pane at the top of the edit window
+ which shows the block context of the section
+ of code which is scrolling off the top or the
+ window. This is not present in the Shell
+ window only the Editor window.
+
+Windows Menu (Shell and Editor):
+
+ Zoom Height -- Toggles the window between normal size (40x80 initial
+ setting) and maximum height. The initial size is in the Configure
+ IDLE dialog under the general tab.
+ ---
+ The rest of this menu lists the names of all open windows;
+ select one to bring it to the foreground (deiconifying it if
+ necessary).
Help Menu:
- About IDLE -- Version, copyright, license, credits
- IDLE Readme -- Background discussion and change details
- ---
- IDLE Help -- Display this file
- Python Docs -- Access local Python documentation, if
- installed. Otherwise, access www.python.org.
- ---
- (Additional Help Sources may be added here)
-
-Edit context menu (Right-click / Control-click on OS X in Edit window):
-
- Cut -- Copy a selection into system-wide clipboard,
+ About IDLE -- Version, copyright, license, credits
+ ---
+ IDLE Help -- Display this file which is a help file for IDLE
+ detailing the menu options, basic editing and navigation,
+ and other tips.
+ Python Docs -- Access local Python documentation, if
+ installed. Or will start a web browser and open
+ docs.python.org showing the latest Python documentation.
+ ---
+ Additional help sources may be added here with the Configure IDLE
+ dialog under the General tab.
+
+Editor context menu (Right-click / Control-click on OS X in Edit window):
+
+ Cut -- Copy a selection into system-wide clipboard,
then delete the selection
- Copy -- Copy selection into system-wide clipboard
- Paste -- Insert system-wide clipboard into window
- Set Breakpoint -- Sets a breakpoint (when debugger open)
- Clear Breakpoint -- Clears the breakpoint on that line
+ Copy -- Copy selection into system-wide clipboard
+ Paste -- Insert system-wide clipboard into window
+ Set Breakpoint -- Sets a breakpoint. Breakpoints are only enabled
+ when the debugger is open.
+ Clear Breakpoint -- Clears the breakpoint on that line
Shell context menu (Right-click / Control-click on OS X in Shell window):
- Cut -- Copy a selection into system-wide clipboard,
+ Cut -- Copy a selection into system-wide clipboard,
then delete the selection
- Copy -- Copy selection into system-wide clipboard
- Paste -- Insert system-wide clipboard into window
- ---
- Go to file/line -- Same as in Debug menu
+ Copy -- Copy selection into system-wide clipboard
+ Paste -- Insert system-wide clipboard into window
+ ---
+ Go to file/line -- Same as in Debug menu
** TIPS **
@@ -144,160 +187,183 @@ Shell context menu (Right-click / Control-click on OS X in Shell window):
Additional Help Sources:
- Windows users can Google on zopeshelf.chm to access Zope help files in
- the Windows help format. The Additional Help Sources feature of the
- configuration GUI supports .chm, along with any other filetypes
- supported by your browser. Supply a Menu Item title, and enter the
- location in the Help File Path slot of the New Help Source dialog. Use
- http:// and/or www. to identify external URLs, or download the file and
- browse for its path on your machine using the Browse button.
+ Windows users can Google on zopeshelf.chm to access Zope help files in
+ the Windows help format. The Additional Help Sources feature of the
+ configuration GUI supports .chm, along with any other filetypes
+ supported by your browser. Supply a Menu Item title, and enter the
+ location in the Help File Path slot of the New Help Source dialog. Use
+ http:// and/or www. to identify external URLs, or download the file and
+ browse for its path on your machine using the Browse button.
- All users can access the extensive sources of help, including
- tutorials, available at www.python.org/doc. Selected URLs can be added
- or removed from the Help menu at any time using Configure IDLE.
+ All users can access the extensive sources of help, including
+ tutorials, available at docs.python.org. Selected URLs can be added
+ or removed from the Help menu at any time using Configure IDLE.
Basic editing and navigation:
- Backspace deletes char to the left; DEL deletes char to the right.
- Control-backspace deletes word left, Control-DEL deletes word right.
- Arrow keys and Page Up/Down move around.
- Control-left/right Arrow moves by words in a strange but useful way.
- Home/End go to begin/end of line.
- Control-Home/End go to begin/end of file.
- Some useful Emacs bindings are inherited from Tcl/Tk:
- Control-a beginning of line
- Control-e end of line
- Control-k kill line (but doesn't put it in clipboard)
- Control-l center window around the insertion point
- Standard Windows bindings may work on that platform.
- Keybindings are selected in the Settings Dialog, look there.
+ Backspace deletes char to the left; DEL deletes char to the right.
+ Control-backspace deletes word left, Control-DEL deletes word right.
+ Arrow keys and Page Up/Down move around.
+ Control-left/right Arrow moves by words in a strange but useful way.
+ Home/End go to begin/end of line.
+ Control-Home/End go to begin/end of file.
+ Some useful Emacs bindings are inherited from Tcl/Tk:
+ Control-a beginning of line
+ Control-e end of line
+ Control-k kill line (but doesn't put it in clipboard)
+ Control-l center window around the insertion point
+ Standard keybindings (like Control-c to copy and Control-v to
+ paste) may work. Keybindings are selected in the Configure IDLE
+ dialog.
Automatic indentation:
- After a block-opening statement, the next line is indented by 4 spaces
- (in the Python Shell window by one tab). After certain keywords
- (break, return etc.) the next line is dedented. In leading
- indentation, Backspace deletes up to 4 spaces if they are there. Tab
- inserts spaces (in the Python Shell window one tab), number depends on
- Indent Width. (N.B. Currently tabs are restricted to four spaces due
- to Tcl/Tk issues.)
+ After a block-opening statement, the next line is indented by 4 spaces
+ (in the Python Shell window by one tab). After certain keywords
+ (break, return etc.) the next line is dedented. In leading
+ indentation, Backspace deletes up to 4 spaces if they are there. Tab
+ inserts spaces (in the Python Shell window one tab), number depends on
+ Indent Width. Currently tabs are restricted to four spaces due
+ to Tcl/Tk limitations.
See also the indent/dedent region commands in the edit menu.
Completions:
- Completions are supplied for functions, classes, and attributes of
- classes, both built-in and user-defined. Completions are also provided
- for filenames.
-
- The AutoCompleteWindow (ACW) will open after a predefined delay
- (default is two seconds) after a '.' or (in a string) an os.sep is
- typed. If after one of those characters (plus zero or more other
- characters) you type a Tab the ACW will open immediately if a possible
- continuation is found.
-
- If there is only one possible completion for the characters entered, a
- Tab will supply that completion without opening the ACW.
-
- 'Show Completions' will force open a completions window. In an empty
- string, this will contain the files in the current directory. On a
- blank line, it will contain the built-in and user-defined functions and
- classes in the current name spaces, plus any modules imported. If some
- characters have been entered, the ACW will attempt to be more specific.
-
- If string of characters is typed, the ACW selection will jump to the
- entry most closely matching those characters. Entering a Tab will cause
- the longest non-ambiguous match to be entered in the Edit window or
- Shell. Two Tabs in a row will supply the current ACW selection, as
- will Return or a double click. Cursor keys, Page Up/Down, mouse
- selection, and the scrollwheel all operate on the ACW.
-
- 'Hidden' attributes can be accessed by typing the beginning of hidden
- name after a '.'. e.g. '_'. This allows access to modules with
- '__all__' set, or to class-private attributes.
-
- Completions and the 'Expand Word' facility can save a lot of typing!
-
- Completions are currently limited to those in the namespaces. Names in
- an Edit window which are not via __main__ or sys.modules will not be
- found. Run the module once with your imports to correct this
- situation. Note that IDLE itself places quite a few modules in
- sys.modules, so much can be found by default, e.g. the re module.
-
- If you don't like the ACW popping up unbidden, simply make the delay
- longer or disable the extension. OTOH, you could make the delay zero.
-
- You could also switch off the CallTips extension. (We will be adding
- a delay to the call tip window.)
+ Completions are supplied for functions, classes, and attributes of
+ classes, both built-in and user-defined. Completions are also provided
+ for filenames.
+
+ The AutoCompleteWindow (ACW) will open after a predefined delay
+ (default is two seconds) after a '.' or (in a string) an os.sep is
+ typed. If after one of those characters (plus zero or more other
+ characters) a tab is typed the ACW will open immediately if a possible
+ continuation is found.
+
+ If there is only one possible completion for the characters entered, a
+ tab will supply that completion without opening the ACW.
+
+ 'Show Completions' will force open a completions window, by default the
+ Control-space keys will open a completions window. In an empty
+ string, this will contain the files in the current directory. On a
+ blank line, it will contain the built-in and user-defined functions and
+ classes in the current name spaces, plus any modules imported. If some
+ characters have been entered, the ACW will attempt to be more specific.
+
+ If string of characters is typed, the ACW selection will jump to the
+ entry most closely matching those characters. Entering a tab will cause
+ the longest non-ambiguous match to be entered in the Edit window or
+ Shell. Two tabs in a row will supply the current ACW selection, as
+ will return or a double click. Cursor keys, Page Up/Down, mouse
+ selection, and the scroll wheel all operate on the ACW.
+
+ "Hidden" attributes can be accessed by typing the beginning of hidden
+ name after a '.', e.g. '_'. This allows access to modules with
+ '__all__' set, or to class-private attributes.
+
+ Completions and the 'Expand Word' facility can save a lot of typing!
+
+ Completions are currently limited to those in the namespaces. Names in
+ an Editor window which are not via __main__ or sys.modules will not be
+ found. Run the module once with your imports to correct this
+ situation. Note that IDLE itself places quite a few modules in
+ sys.modules, so much can be found by default, e.g. the re module.
+
+ If you don't like the ACW popping up unbidden, simply make the delay
+ longer or disable the extension. Or another option is the delay could
+ be set to zero. Another alternative to preventing ACW popups is to
+ disable the call tips extension.
Python Shell window:
- Control-c interrupts executing command.
- Control-d sends end-of-file; closes window if typed at >>> prompt
- (this is Control-z on Windows).
+ Control-c interrupts executing command.
+ Control-d sends end-of-file; closes window if typed at >>> prompt
+ (this is Control-z on Windows).
+ Alt-/ expand word is also useful to reduce typing.
Command history:
- Alt-p retrieves previous command matching what you have typed.
- Alt-n retrieves next.
- (These are Control-p, Control-n on OS X)
- Return while cursor is on a previous command retrieves that command.
- Expand word is also useful to reduce typing.
+ Alt-p retrieves previous command matching what you have typed. On OS X
+ use Control-p.
+ Alt-n retrieves next. On OS X use Control-n.
+ Return while cursor is on a previous command retrieves that command.
Syntax colors:
- The coloring is applied in a background "thread", so you may
- occasionally see uncolorized text. To change the color
- scheme, use the Configure IDLE / Highlighting dialog.
+ The coloring is applied in a background "thread", so you may
+ occasionally see uncolorized text. To change the color
+ scheme, use the Configure IDLE / Highlighting dialog.
Python default syntax colors:
- Keywords orange
- Builtins royal purple
- Strings green
- Comments red
- Definitions blue
+ Keywords orange
+ Builtins royal purple
+ Strings green
+ Comments red
+ Definitions blue
Shell default colors:
- Console output brown
- stdout blue
- stderr red
- stdin black
+ Console output brown
+ stdout blue
+ stderr red
+ stdin black
Other preferences:
- The font preferences, keybinding, and startup preferences can
- be changed using the Settings dialog.
+ The font preferences, highlighting, keys, and general preferences can
+ be changed via the Configure IDLE menu option. Be sure to note that
+ keys can be user defined, IDLE ships with four built in key sets. In
+ addition a user can create a custom key set in the Configure IDLE
+ dialog under the keys tab.
Command line usage:
- Enter idle -h at the command prompt to get a usage message.
-
-Running without a subprocess:
-
- If IDLE is started with the -n command line switch it will run in a
- single process and will not create the subprocess which runs the RPC
- Python execution server. This can be useful if Python cannot create
- the subprocess or the RPC socket interface on your platform. However,
- in this mode user code is not isolated from IDLE itself. Also, the
- environment is not restarted when Run/Run Module (F5) is selected. If
- your code has been modified, you must reload() the affected modules and
- re-import any specific items (e.g. from foo import baz) if the changes
- are to take effect. For these reasons, it is preferable to run IDLE
- with the default subprocess if at all possible.
+ Enter idle -h at the command prompt to get a usage message.
+
+ idle.py [-c command] [-d] [-e] [-s] [-t title] [arg] ...
+
+ -c command run this command
+ -d enable debugger
+ -e edit mode; arguments are files to be edited
+ -s run $IDLESTARTUP or $PYTHONSTARTUP first
+ -t title set title of shell window
+
+ If there are arguments:
+ 1. If -e is used, arguments are files opened for editing and sys.argv
+ reflects the arguments passed to IDLE itself.
+ 2. Otherwise, if -c is used, all arguments are placed in
+ sys.argv[1:...], with sys.argv[0] set to -c.
+ 3. Otherwise, if neither -e nor -c is used, the first argument is a
+ script which is executed with the remaining arguments in
+ sys.argv[1:...] and sys.argv[0] set to the script name. If the
+ script name is -, no script is executed but an interactive Python
+ session is started; the arguments are still available in sys.argv.
+
+Running without a subprocess: (DEPRECATED in Python 3.5 see Issue 16123)
+
+ If IDLE is started with the -n command line switch it will run in a
+ single process and will not create the subprocess which runs the RPC
+ Python execution server. This can be useful if Python cannot create
+ the subprocess or the RPC socket interface on your platform. However,
+ in this mode user code is not isolated from IDLE itself. Also, the
+ environment is not restarted when Run/Run Module (F5) is selected. If
+ your code has been modified, you must reload() the affected modules and
+ re-import any specific items (e.g. from foo import baz) if the changes
+ are to take effect. For these reasons, it is preferable to run IDLE
+ with the default subprocess if at all possible.
Extensions:
- IDLE contains an extension facility. See the beginning of
- config-extensions.def in the idlelib directory for further information.
- The default extensions are currently:
-
- FormatParagraph
- AutoExpand
- ZoomHeight
- ScriptBinding
- CallTips
- ParenMatch
- AutoComplete
- CodeContext
+ IDLE contains an extension facility. See the beginning of
+ config-extensions.def in the idlelib directory for further information.
+ The default extensions are currently:
+
+ FormatParagraph
+ AutoExpand
+ ZoomHeight
+ ScriptBinding
+ CallTips
+ ParenMatch
+ AutoComplete
+ CodeContext
diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py
index 8d8317d..efe96a4 100644
--- a/Lib/idlelib/idlever.py
+++ b/Lib/idlelib/idlever.py
@@ -1 +1 @@
-IDLE_VERSION = "3.3.0"
+IDLE_VERSION = "3.4.0a0"
diff --git a/Lib/idlelib/rpc.py b/Lib/idlelib/rpc.py
index a0eb1b3..c86c975 100644
--- a/Lib/idlelib/rpc.py
+++ b/Lib/idlelib/rpc.py
@@ -199,7 +199,7 @@ class SocketIO(object):
raise
except KeyboardInterrupt:
raise
- except socket.error:
+ except OSError:
raise
except Exception as ex:
return ("CALLEXC", ex)
@@ -339,8 +339,8 @@ class SocketIO(object):
r, w, x = select.select([], [self.sock], [])
n = self.sock.send(s[:BUFSIZE])
except (AttributeError, TypeError):
- raise IOError("socket no longer exists")
- except socket.error:
+ raise OSError("socket no longer exists")
+ except OSError:
raise
else:
s = s[n:]
@@ -357,7 +357,7 @@ class SocketIO(object):
return None
try:
s = self.sock.recv(BUFSIZE)
- except socket.error:
+ except OSError:
raise EOFError
if len(s) == 0:
raise EOFError
@@ -537,7 +537,7 @@ class RPCClient(SocketIO):
SocketIO.__init__(self, working_sock)
else:
print("** Invalid host: ", address, file=sys.__stderr__)
- raise socket.error
+ raise OSError
def get_remote_proxy(self, oid):
return RPCProxy(self, oid)
diff --git a/Lib/idlelib/run.py b/Lib/idlelib/run.py
index d105912..c66679c 100644
--- a/Lib/idlelib/run.py
+++ b/Lib/idlelib/run.py
@@ -137,8 +137,8 @@ def manage_socket(address):
try:
server = MyRPCServer(address, MyHandler)
break
- except socket.error as err:
- print("IDLE Subprocess: socket error: " + err.args[1] +
+ except OSError as err:
+ print("IDLE Subprocess: OSError: " + err.args[1] +
", retrying....", file=sys.__stderr__)
socket_error = err
else:
diff --git a/Lib/idlelib/textView.py b/Lib/idlelib/textView.py
index 1eaa464..ae1f195 100644
--- a/Lib/idlelib/textView.py
+++ b/Lib/idlelib/textView.py
@@ -66,7 +66,7 @@ def view_file(parent, title, filename, encoding=None, modal=True):
try:
with open(filename, 'r', encoding=encoding) as file:
contents = file.read()
- except IOError:
+ except OSError:
import tkinter.messagebox as tkMessageBox
tkMessageBox.showerror(title='File Load Error',
message='Unable to load file %r .' % filename,
diff --git a/Lib/imaplib.py b/Lib/imaplib.py
index 724f9d1..c959f12 100644
--- a/Lib/imaplib.py
+++ b/Lib/imaplib.py
@@ -176,7 +176,7 @@ class IMAP4:
except Exception:
try:
self.shutdown()
- except socket.error:
+ except OSError:
pass
raise
@@ -269,7 +269,7 @@ class IMAP4:
self.file.close()
try:
self.sock.shutdown(socket.SHUT_RDWR)
- except socket.error as e:
+ except OSError as e:
# The server might already have closed the connection
if e.errno != errno.ENOTCONN:
raise
@@ -903,7 +903,7 @@ class IMAP4:
try:
self.send(data + CRLF)
- except (socket.error, OSError) as val:
+ except OSError as val:
raise self.abort('socket error: %s' % val)
if literal is None:
@@ -928,7 +928,7 @@ class IMAP4:
try:
self.send(literal)
self.send(CRLF)
- except (socket.error, OSError) as val:
+ except OSError as val:
raise self.abort('socket error: %s' % val)
if not literator:
diff --git a/Lib/imghdr.py b/Lib/imghdr.py
index 6ee45da..0cba063 100644
--- a/Lib/imghdr.py
+++ b/Lib/imghdr.py
@@ -149,7 +149,7 @@ def testall(list, recursive, toplevel):
sys.stdout.flush()
try:
print(what(filename))
- except IOError:
+ except OSError:
print('*** not found ***')
if __name__ == '__main__':
diff --git a/Lib/importlib/__init__.py b/Lib/importlib/__init__.py
index 22c90f2..d07f02e 100644
--- a/Lib/importlib/__init__.py
+++ b/Lib/importlib/__init__.py
@@ -68,6 +68,8 @@ def find_loader(name, path=None):
return loader
except KeyError:
pass
+ except AttributeError:
+ raise ValueError('{}.__loader__ is not set'.format(name))
return _bootstrap._find_module(name, path)
diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py
index 7e348a4..9a48c16 100644
--- a/Lib/importlib/_bootstrap.py
+++ b/Lib/importlib/_bootstrap.py
@@ -48,13 +48,7 @@ def _w_long(x):
XXX Temporary until marshal's long functions are exposed.
"""
- x = int(x)
- int_bytes = []
- int_bytes.append(x & 0xFF)
- int_bytes.append((x >> 8) & 0xFF)
- int_bytes.append((x >> 16) & 0xFF)
- int_bytes.append((x >> 24) & 0xFF)
- return bytearray(int_bytes)
+ return (int(x) & 0xFFFFFFFF).to_bytes(4, 'little')
# TODO: Expose from marshal
@@ -64,35 +58,25 @@ def _r_long(int_bytes):
XXX Temporary until marshal's long function are exposed.
"""
- x = int_bytes[0]
- x |= int_bytes[1] << 8
- x |= int_bytes[2] << 16
- x |= int_bytes[3] << 24
- return x
+ return int.from_bytes(int_bytes, 'little')
def _path_join(*path_parts):
"""Replacement for os.path.join()."""
- new_parts = []
- for part in path_parts:
- if not part:
- continue
- new_parts.append(part)
- if part[-1] not in path_separators:
- new_parts.append(path_sep)
- return ''.join(new_parts[:-1]) # Drop superfluous path separator.
+ return path_sep.join([part.rstrip(path_separators)
+ for part in path_parts if part])
def _path_split(path):
"""Replacement for os.path.split()."""
+ if len(path_separators) == 1:
+ front, _, tail = path.rpartition(path_sep)
+ return front, tail
for x in reversed(path):
if x in path_separators:
- sep = x
- break
- else:
- sep = path_sep
- front, _, tail = path.rpartition(sep)
- return front, tail
+ front, tail = path.rsplit(x, maxsplit=1)
+ return front, tail
+ return '', path
def _path_is_mode_type(path, mode):
@@ -237,7 +221,7 @@ class _ModuleLock:
self.wakeup.release()
def __repr__(self):
- return "_ModuleLock(%r) at %d" % (self.name, id(self))
+ return "_ModuleLock({!r}) at {}".format(self.name, id(self))
class _DummyModuleLock:
@@ -258,7 +242,7 @@ class _DummyModuleLock:
self.count -= 1
def __repr__(self):
- return "_DummyModuleLock(%r) at %d" % (self.name, id(self))
+ return "_DummyModuleLock({!r}) at {}".format(self.name, id(self))
# The following two functions are for consumption by Python/import.c.
@@ -367,6 +351,12 @@ Known values:
Python 2.5c2: 62131 (fix wrong code: for x, in ... in listcomp/genexp)
Python 2.6a0: 62151 (peephole optimizations and STORE_MAP opcode)
Python 2.6a1: 62161 (WITH_CLEANUP optimization)
+ Python 2.7a0: 62171 (optimize list comprehensions/change LIST_APPEND)
+ Python 2.7a0: 62181 (optimize conditional branches:
+ introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)
+ Python 2.7a0 62191 (introduce SETUP_WITH)
+ Python 2.7a0 62201 (introduce BUILD_SET)
+ Python 2.7a0 62211 (introduce MAP_ADD and SET_ADD)
Python 3000: 3000
3010 (removed UNARY_CONVERT)
3020 (added BUILD_SET)
@@ -396,14 +386,16 @@ Known values:
3210 (added size modulo 2**32 to the pyc header)
Python 3.3a1 3220 (changed PEP 380 implementation)
Python 3.3a4 3230 (revert changes to implicit __class__ closure)
+ Python 3.4a1 3250 (evaluate positional default arguments before
+ keyword-only defaults)
MAGIC must change whenever the bytecode emitted by the compiler may no
longer be understood by older implementations of the eval loop (usually
due to the addition of new opcodes).
"""
-_RAW_MAGIC_NUMBER = 3230 | ord('\r') << 16 | ord('\n') << 24
-_MAGIC_BYTES = bytes(_RAW_MAGIC_NUMBER >> n & 0xff for n in range(0, 25, 8))
+_MAGIC_BYTES = (3250).to_bytes(2, 'little') + b'\r\n'
+_RAW_MAGIC_NUMBER = int.from_bytes(_MAGIC_BYTES, 'little')
_PYCACHE = '__pycache__'
@@ -508,7 +500,7 @@ def set_loader(fxn):
"""Set __loader__ on the returned module."""
def set_loader_wrapper(self, *args, **kwargs):
module = fxn(self, *args, **kwargs)
- if not hasattr(module, '__loader__'):
+ if getattr(module, '__loader__', None) is None:
module.__loader__ = self
return module
_wrap(set_loader_wrapper, fxn)
@@ -623,6 +615,80 @@ def _find_module_shim(self, fullname):
return loader
+def _validate_bytecode_header(data, source_stats=None, name=None, path=None):
+ """Validate the header of the passed-in bytecode against source_stats (if
+ given) and returning the bytecode that can be compiled by compile().
+
+ All other arguments are used to enhance error reporting.
+
+ ImportError is raised when the magic number is incorrect or the bytecode is
+ found to be stale. EOFError is raised when the data is found to be
+ truncated.
+
+ """
+ exc_details = {}
+ if name is not None:
+ exc_details['name'] = name
+ else:
+ # To prevent having to make all messages have a conditional name.
+ name = '<bytecode>'
+ if path is not None:
+ exc_details['path'] = path
+ magic = data[:4]
+ raw_timestamp = data[4:8]
+ raw_size = data[8:12]
+ if magic != _MAGIC_BYTES:
+ msg = 'bad magic number in {!r}: {!r}'.format(name, magic)
+ raise ImportError(msg, **exc_details)
+ elif len(raw_timestamp) != 4:
+ message = 'incomplete timestamp in {!r}'.format(name)
+ _verbose_message(message)
+ raise EOFError(message)
+ elif len(raw_size) != 4:
+ message = 'incomplete size in {!r}'.format(name)
+ _verbose_message(message)
+ raise EOFError(message)
+ if source_stats is not None:
+ try:
+ source_mtime = int(source_stats['mtime'])
+ except KeyError:
+ pass
+ else:
+ if _r_long(raw_timestamp) != source_mtime:
+ message = 'bytecode is stale for {!r}'.format(name)
+ _verbose_message(message)
+ raise ImportError(message, **exc_details)
+ try:
+ source_size = source_stats['size'] & 0xFFFFFFFF
+ except KeyError:
+ pass
+ else:
+ if _r_long(raw_size) != source_size:
+ raise ImportError("bytecode is stale for {!r}".format(name),
+ **exc_details)
+ return data[12:]
+
+
+def _compile_bytecode(data, name=None, bytecode_path=None, source_path=None):
+ """Compile bytecode as returned by _validate_bytecode_header()."""
+ code = marshal.loads(data)
+ if isinstance(code, _code_type):
+ _verbose_message('code object from {!r}', bytecode_path)
+ if source_path is not None:
+ _imp._fix_co_filename(code, source_path)
+ return code
+ else:
+ raise ImportError("Non-code object in {!r}".format(bytecode_path),
+ name=name, path=bytecode_path)
+
+def _code_to_bytecode(code, mtime=0, source_size=0):
+ """Compile a code object into bytecode for writing out to a byte-compiled
+ file."""
+ data = bytearray(_MAGIC_BYTES)
+ data.extend(_w_long(mtime))
+ data.extend(_w_long(source_size))
+ data.extend(marshal.dumps(code))
+ return data
# Loaders #####################################################################
@@ -755,7 +821,7 @@ class WindowsRegistryFinder:
def _open_registry(cls, key):
try:
return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, key)
- except WindowsError:
+ except OSError:
return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key)
@classmethod
@@ -769,7 +835,7 @@ class WindowsRegistryFinder:
try:
with cls._open_registry(key) as hkey:
filepath = _winreg.QueryValue(hkey, "")
- except WindowsError:
+ except OSError:
return None
return filepath
@@ -801,51 +867,6 @@ class _LoaderBasics:
tail_name = fullname.rpartition('.')[2]
return filename_base == '__init__' and tail_name != '__init__'
- def _bytes_from_bytecode(self, fullname, data, bytecode_path, source_stats):
- """Return the marshalled bytes from bytecode, verifying the magic
- number, timestamp and source size along the way.
-
- If source_stats is None then skip the timestamp check.
-
- """
- magic = data[:4]
- raw_timestamp = data[4:8]
- raw_size = data[8:12]
- if magic != _MAGIC_BYTES:
- msg = 'bad magic number in {!r}: {!r}'.format(fullname, magic)
- raise ImportError(msg, name=fullname, path=bytecode_path)
- elif len(raw_timestamp) != 4:
- message = 'bad timestamp in {}'.format(fullname)
- _verbose_message(message)
- raise EOFError(message)
- elif len(raw_size) != 4:
- message = 'bad size in {}'.format(fullname)
- _verbose_message(message)
- raise EOFError(message)
- if source_stats is not None:
- try:
- source_mtime = int(source_stats['mtime'])
- except KeyError:
- pass
- else:
- if _r_long(raw_timestamp) != source_mtime:
- message = 'bytecode is stale for {}'.format(fullname)
- _verbose_message(message)
- raise ImportError(message, name=fullname,
- path=bytecode_path)
- try:
- source_size = source_stats['size'] & 0xFFFFFFFF
- except KeyError:
- pass
- else:
- if _r_long(raw_size) != source_size:
- raise ImportError(
- "bytecode is stale for {}".format(fullname),
- name=fullname, path=bytecode_path)
- # Can't return the code object as errors from marshal loading need to
- # propagate even when source is available.
- return data[12:]
-
@module_for_loader
def _load_module(self, module, *, sourceless=False):
"""Helper for load_module able to handle either source or sourceless
@@ -860,12 +881,9 @@ class _LoaderBasics:
module.__cached__ = module.__file__
else:
module.__cached__ = module.__file__
- module.__package__ = name
if self.is_package(name):
module.__path__ = [_path_split(module.__file__)[0]]
- else:
- module.__package__ = module.__package__.rpartition('.')[0]
- module.__loader__ = self
+ # __package__ and __loader set by @module_for_loader.
_call_with_frames_removed(exec, code_object, module.__dict__)
return module
@@ -915,7 +933,7 @@ class SourceLoader(_LoaderBasics):
path = self.get_filename(fullname)
try:
source_bytes = self.get_data(path)
- except IOError as exc:
+ except OSError as exc:
raise ImportError("source not available through get_data()",
name=fullname) from exc
readsource = _io.BytesIO(source_bytes).readline
@@ -931,6 +949,14 @@ class SourceLoader(_LoaderBasics):
raise ImportError("Failed to decode source file",
name=fullname) from exc
+ def source_to_code(self, data, path, *, _optimize=-1):
+ """Return the code object compiled from source.
+
+ The 'data' argument can be any object type that compile() supports.
+ """
+ return _call_with_frames_removed(compile, data, path, 'exec',
+ dont_inherit=True, optimize=_optimize)
+
def get_code(self, fullname):
"""Concrete implementation of InspectLoader.get_code.
@@ -953,39 +979,28 @@ class SourceLoader(_LoaderBasics):
source_mtime = int(st['mtime'])
try:
data = self.get_data(bytecode_path)
- except IOError:
+ except OSError:
pass
else:
try:
- bytes_data = self._bytes_from_bytecode(fullname, data,
- bytecode_path,
- st)
+ bytes_data = _validate_bytecode_header(data,
+ source_stats=st, name=fullname,
+ path=bytecode_path)
except (ImportError, EOFError):
pass
else:
_verbose_message('{} matches {}', bytecode_path,
source_path)
- found = marshal.loads(bytes_data)
- if isinstance(found, _code_type):
- _imp._fix_co_filename(found, source_path)
- _verbose_message('code object from {}',
- bytecode_path)
- return found
- else:
- msg = "Non-code object in {}"
- raise ImportError(msg.format(bytecode_path),
- name=fullname, path=bytecode_path)
+ return _compile_bytecode(bytes_data, name=fullname,
+ bytecode_path=bytecode_path,
+ source_path=source_path)
source_bytes = self.get_data(source_path)
- code_object = _call_with_frames_removed(compile,
- source_bytes, source_path, 'exec',
- dont_inherit=True)
+ code_object = self.source_to_code(source_bytes, source_path)
_verbose_message('code object from {}', source_path)
if (not sys.dont_write_bytecode and bytecode_path is not None and
- source_mtime is not None):
- data = bytearray(_MAGIC_BYTES)
- data.extend(_w_long(source_mtime))
- data.extend(_w_long(len(source_bytes)))
- data.extend(marshal.dumps(code_object))
+ source_mtime is not None):
+ data = _code_to_bytecode(code_object, source_mtime,
+ len(source_bytes))
try:
self._cache_bytecode(source_path, bytecode_path, data)
_verbose_message('wrote {!r}', bytecode_path)
@@ -1092,14 +1107,8 @@ class SourcelessFileLoader(FileLoader, _LoaderBasics):
def get_code(self, fullname):
path = self.get_filename(fullname)
data = self.get_data(path)
- bytes_data = self._bytes_from_bytecode(fullname, data, path, None)
- found = marshal.loads(bytes_data)
- if isinstance(found, _code_type):
- _verbose_message('code object from {!r}', path)
- return found
- else:
- raise ImportError("Non-code object in {}".format(path),
- name=fullname, path=path)
+ bytes_data = _validate_bytecode_header(data, name=fullname, path=path)
+ return _compile_bytecode(bytes_data, name=fullname, bytecode_path=path)
def get_source(self, fullname):
"""Return None as there is no source code."""
@@ -1419,7 +1428,7 @@ class FileFinder:
lower_suffix_contents.add(new_name)
self._path_cache = lower_suffix_contents
if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
- self._relaxed_path_cache = set(fn.lower() for fn in contents)
+ self._relaxed_path_cache = {fn.lower() for fn in contents}
@classmethod
def path_hook(cls, *loader_details):
@@ -1440,7 +1449,7 @@ class FileFinder:
return path_hook_for_FileFinder
def __repr__(self):
- return "FileFinder(%r)" % (self.path,)
+ return "FileFinder({!r})".format(self.path)
# Import itself ###############################################################
@@ -1545,7 +1554,7 @@ def _find_and_load_unlocked(name, import_):
except AttributeError:
pass
# Set loader if need be.
- if not hasattr(module, '__loader__'):
+ if getattr(module, '__loader__', None) is None:
try:
module.__loader__ = loader
except AttributeError:
@@ -1720,7 +1729,7 @@ def _setup(sys_module, _imp_module):
builtin_module = sys.modules[builtin_name]
setattr(self_module, builtin_name, builtin_module)
- os_details = ('posix', ['/']), ('nt', ['\\', '/']), ('os2', ['\\', '/'])
+ os_details = ('posix', ['/']), ('nt', ['\\', '/'])
for builtin_os, path_separators in os_details:
# Assumption made in _path_join()
assert all(len(sep) == 1 for sep in path_separators)
@@ -1731,9 +1740,6 @@ def _setup(sys_module, _imp_module):
else:
try:
os_module = BuiltinImporter.load_module(builtin_os)
- # TODO: rip out os2 code after 3.3 is released as per PEP 11
- if builtin_os == 'os2' and 'EMX GCC' in sys.version:
- path_sep = path_separators[1]
break
except ImportError:
continue
@@ -1755,7 +1761,7 @@ def _setup(sys_module, _imp_module):
setattr(self_module, '_thread', thread_module)
setattr(self_module, '_weakref', weakref_module)
setattr(self_module, 'path_sep', path_sep)
- setattr(self_module, 'path_separators', set(path_separators))
+ setattr(self_module, 'path_separators', ''.join(path_separators))
# Constants
setattr(self_module, '_relax_case', _make_relax_case())
EXTENSION_SUFFIXES.extend(_imp.extension_suffixes())
diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py
index 387567a..11e45f4 100644
--- a/Lib/importlib/abc.py
+++ b/Lib/importlib/abc.py
@@ -225,180 +225,3 @@ class SourceLoader(_bootstrap.SourceLoader, ResourceLoader, ExecutionLoader):
raise NotImplementedError
_register(SourceLoader, machinery.SourceFileLoader)
-
-class PyLoader(SourceLoader):
-
- """Implement the deprecated PyLoader ABC in terms of SourceLoader.
-
- This class has been deprecated! It is slated for removal in Python 3.4.
- If compatibility with Python 3.1 is not needed then implement the
- SourceLoader ABC instead of this class. If Python 3.1 compatibility is
- needed, then use the following idiom to have a single class that is
- compatible with Python 3.1 onwards::
-
- try:
- from importlib.abc import SourceLoader
- except ImportError:
- from importlib.abc import PyLoader as SourceLoader
-
-
- class CustomLoader(SourceLoader):
- def get_filename(self, fullname):
- # Implement ...
-
- def source_path(self, fullname):
- '''Implement source_path in terms of get_filename.'''
- try:
- return self.get_filename(fullname)
- except ImportError:
- return None
-
- def is_package(self, fullname):
- filename = os.path.basename(self.get_filename(fullname))
- return os.path.splitext(filename)[0] == '__init__'
-
- """
-
- @abc.abstractmethod
- def is_package(self, fullname):
- raise NotImplementedError
-
- @abc.abstractmethod
- def source_path(self, fullname):
- """Abstract method. Accepts a str module name and returns the path to
- the source code for the module."""
- raise NotImplementedError
-
- def get_filename(self, fullname):
- """Implement get_filename in terms of source_path.
-
- As get_filename should only return a source file path there is no
- chance of the path not existing but loading still being possible, so
- ImportError should propagate instead of being turned into returning
- None.
-
- """
- warnings.warn("importlib.abc.PyLoader is deprecated and is "
- "slated for removal in Python 3.4; "
- "use SourceLoader instead. "
- "See the importlib documentation on how to be "
- "compatible with Python 3.1 onwards.",
- DeprecationWarning)
- path = self.source_path(fullname)
- if path is None:
- raise ImportError(name=fullname)
- else:
- return path
-
-
-class PyPycLoader(PyLoader):
-
- """Abstract base class to assist in loading source and bytecode by
- requiring only back-end storage methods to be implemented.
-
- This class has been deprecated! Removal is slated for Python 3.4. Implement
- the SourceLoader ABC instead. If Python 3.1 compatibility is needed, see
- PyLoader.
-
- The methods get_code, get_source, and load_module are implemented for the
- user.
-
- """
-
- def get_filename(self, fullname):
- """Return the source or bytecode file path."""
- path = self.source_path(fullname)
- if path is not None:
- return path
- path = self.bytecode_path(fullname)
- if path is not None:
- return path
- raise ImportError("no source or bytecode path available for "
- "{0!r}".format(fullname), name=fullname)
-
- def get_code(self, fullname):
- """Get a code object from source or bytecode."""
- warnings.warn("importlib.abc.PyPycLoader is deprecated and slated for "
- "removal in Python 3.4; use SourceLoader instead. "
- "If Python 3.1 compatibility is required, see the "
- "latest documentation for PyLoader.",
- DeprecationWarning)
- source_timestamp = self.source_mtime(fullname)
- # Try to use bytecode if it is available.
- bytecode_path = self.bytecode_path(fullname)
- if bytecode_path:
- data = self.get_data(bytecode_path)
- try:
- magic = data[:4]
- if len(magic) < 4:
- raise ImportError(
- "bad magic number in {}".format(fullname),
- name=fullname, path=bytecode_path)
- raw_timestamp = data[4:8]
- if len(raw_timestamp) < 4:
- raise EOFError("bad timestamp in {}".format(fullname))
- pyc_timestamp = _bootstrap._r_long(raw_timestamp)
- raw_source_size = data[8:12]
- if len(raw_source_size) != 4:
- raise EOFError("bad file size in {}".format(fullname))
- # Source size is unused as the ABC does not provide a way to
- # get the size of the source ahead of reading it.
- bytecode = data[12:]
- # Verify that the magic number is valid.
- if imp.get_magic() != magic:
- raise ImportError(
- "bad magic number in {}".format(fullname),
- name=fullname, path=bytecode_path)
- # Verify that the bytecode is not stale (only matters when
- # there is source to fall back on.
- if source_timestamp:
- if pyc_timestamp < source_timestamp:
- raise ImportError("bytecode is stale", name=fullname,
- path=bytecode_path)
- except (ImportError, EOFError):
- # If source is available give it a shot.
- if source_timestamp is not None:
- pass
- else:
- raise
- else:
- # Bytecode seems fine, so try to use it.
- return marshal.loads(bytecode)
- elif source_timestamp is None:
- raise ImportError("no source or bytecode available to create code "
- "object for {0!r}".format(fullname),
- name=fullname)
- # Use the source.
- source_path = self.source_path(fullname)
- if source_path is None:
- message = "a source path must exist to load {0}".format(fullname)
- raise ImportError(message, name=fullname)
- source = self.get_data(source_path)
- code_object = compile(source, source_path, 'exec', dont_inherit=True)
- # Generate bytecode and write it out.
- if not sys.dont_write_bytecode:
- data = bytearray(imp.get_magic())
- data.extend(_bootstrap._w_long(source_timestamp))
- data.extend(_bootstrap._w_long(len(source) & 0xFFFFFFFF))
- data.extend(marshal.dumps(code_object))
- self.write_bytecode(fullname, data)
- return code_object
-
- @abc.abstractmethod
- def source_mtime(self, fullname):
- """Abstract method. Accepts a str filename and returns an int
- modification time for the source of the module."""
- raise NotImplementedError
-
- @abc.abstractmethod
- def bytecode_path(self, fullname):
- """Abstract method. Accepts a str filename and returns the str pathname
- to the bytecode for the module."""
- raise NotImplementedError
-
- @abc.abstractmethod
- def write_bytecode(self, fullname, bytecode):
- """Abstract method. Accepts a str filename and bytes object
- representing the bytecode for the module. Returns a boolean
- representing whether the bytecode was written or not."""
- raise NotImplementedError
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 7a7bb91..3661316 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -545,13 +545,13 @@ def findsource(object):
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of all the lines
- in the file and the line number indexes a line in that list. An IOError
+ in the file and the line number indexes a line in that list. An OSError
is raised if the source code cannot be retrieved."""
file = getfile(object)
sourcefile = getsourcefile(object)
if not sourcefile and file[0] + file[-1] != '<>':
- raise IOError('source code not available')
+ raise OSError('source code not available')
file = sourcefile if sourcefile else file
module = getmodule(object, file)
@@ -560,7 +560,7 @@ def findsource(object):
else:
lines = linecache.getlines(file)
if not lines:
- raise IOError('could not get source code')
+ raise OSError('could not get source code')
if ismodule(object):
return lines, 0
@@ -586,7 +586,7 @@ def findsource(object):
candidates.sort()
return lines, candidates[0][1]
else:
- raise IOError('could not find class definition')
+ raise OSError('could not find class definition')
if ismethod(object):
object = object.__func__
@@ -598,14 +598,14 @@ def findsource(object):
object = object.f_code
if iscode(object):
if not hasattr(object, 'co_firstlineno'):
- raise IOError('could not find function definition')
+ raise OSError('could not find function definition')
lnum = object.co_firstlineno - 1
pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
while lnum > 0:
if pat.match(lines[lnum]): break
lnum = lnum - 1
return lines, lnum
- raise IOError('could not find code object')
+ raise OSError('could not find code object')
def getcomments(object):
"""Get lines of comments immediately preceding an object's source code.
@@ -614,7 +614,7 @@ def getcomments(object):
"""
try:
lines, lnum = findsource(object)
- except (IOError, TypeError):
+ except (OSError, TypeError):
return None
if ismodule(object):
@@ -710,7 +710,7 @@ def getsourcelines(object):
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of the lines
corresponding to the object and the line number indicates where in the
- original source file the first line of code was found. An IOError is
+ original source file the first line of code was found. An OSError is
raised if the source code cannot be retrieved."""
lines, lnum = findsource(object)
@@ -722,7 +722,7 @@ def getsource(object):
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a single string. An
- IOError is raised if the source code cannot be retrieved."""
+ OSError is raised if the source code cannot be retrieved."""
lines, lnum = getsourcelines(object)
return ''.join(lines)
@@ -1122,7 +1122,7 @@ def getframeinfo(frame, context=1):
start = lineno - 1 - context//2
try:
lines, lnum = findsource(frame)
- except IOError:
+ except OSError:
lines = index = None
else:
start = max(start, 1)
diff --git a/Lib/io.py b/Lib/io.py
index bda4def..39878b8 100644
--- a/Lib/io.py
+++ b/Lib/io.py
@@ -4,7 +4,7 @@ builtin open function is defined in this module.
At the top of the I/O hierarchy is the abstract base class IOBase. It
defines the basic interface to a stream. Note, however, that there is no
separation between reading and writing to streams; implementations are
-allowed to raise an IOError if they do not support a given operation.
+allowed to raise an OSError if they do not support a given operation.
Extending IOBase is RawIOBase which deals simply with the reading and
writing of raw bytes to a stream. FileIO subclasses RawIOBase to provide
diff --git a/Lib/json/__init__.py b/Lib/json/__init__.py
index 48a4f8f..3f95cc3 100644
--- a/Lib/json/__init__.py
+++ b/Lib/json/__init__.py
@@ -39,8 +39,7 @@ Compact encoding::
Pretty printing::
>>> import json
- >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True,
- ... indent=4, separators=(',', ': ')))
+ >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
{
"4": 5,
"6": 7
@@ -146,13 +145,12 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
- representation. Since the default item separator is ``', '``, the
- output might include trailing whitespace when ``indent`` is specified.
- You can use ``separators=(',', ': ')`` to avoid this.
+ representation.
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
+ If specified, ``separators`` should be an ``(item_separator, key_separator)``
+ tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
+ ``(',', ': ')`` otherwise. To get the most compact JSON representation,
+ you should specify ``(',', ':')`` to eliminate whitespace.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
@@ -209,13 +207,12 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
- representation. Since the default item separator is ``', '``, the
- output might include trailing whitespace when ``indent`` is specified.
- You can use ``separators=(',', ': ')`` to avoid this.
+ representation.
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
+ If specified, ``separators`` should be an ``(item_separator, key_separator)``
+ tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
+ ``(',', ': ')`` otherwise. To get the most compact JSON representation,
+ you should specify ``(',', ':')`` to eliminate whitespace.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
diff --git a/Lib/json/decoder.py b/Lib/json/decoder.py
index 51c3aa7..da7ef9c 100644
--- a/Lib/json/decoder.py
+++ b/Lib/json/decoder.py
@@ -1,9 +1,6 @@
"""Implementation of JSONDecoder
"""
-import binascii
import re
-import sys
-import struct
from json import scanner
try:
@@ -15,14 +12,9 @@ __all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
-def _floatconstants():
- _BYTES = binascii.unhexlify(b'7FF80000000000007FF0000000000000')
- if sys.byteorder != 'big':
- _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
- nan, inf = struct.unpack('dd', _BYTES)
- return nan, inf, -inf
-
-NaN, PosInf, NegInf = _floatconstants()
+NaN = float('nan')
+PosInf = float('inf')
+NegInf = float('-inf')
def linecol(doc, pos):
@@ -196,8 +188,8 @@ def JSONObject(s_and_end, strict, scan_once, object_hook, object_pairs_hook,
try:
value, end = scan_once(s, end)
- except StopIteration:
- raise ValueError(errmsg("Expecting object", s, end))
+ except StopIteration as err:
+ raise ValueError(errmsg("Expecting value", s, err.value)) from None
pairs_append((key, value))
try:
nextchar = s[end]
@@ -240,8 +232,8 @@ def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
while True:
try:
value, end = scan_once(s, end)
- except StopIteration:
- raise ValueError(errmsg("Expecting object", s, end))
+ except StopIteration as err:
+ raise ValueError(errmsg("Expecting value", s, err.value)) from None
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
@@ -251,7 +243,7 @@ def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
if nextchar == ']':
break
elif nextchar != ',':
- raise ValueError(errmsg("Expecting ',' delimiter", s, end))
+ raise ValueError(errmsg("Expecting ',' delimiter", s, end - 1))
try:
if s[end] in _ws:
end += 1
@@ -366,6 +358,6 @@ class JSONDecoder(object):
"""
try:
obj, end = self.scan_once(s, idx)
- except StopIteration:
- raise ValueError("No JSON object could be decoded")
+ except StopIteration as err:
+ raise ValueError(errmsg("Expecting value", s, err.value)) from None
return obj, end
diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py
index 1d8b20c..39b550d 100644
--- a/Lib/json/encoder.py
+++ b/Lib/json/encoder.py
@@ -125,14 +125,12 @@ class JSONEncoder(object):
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
- None is the most compact representation. Since the default
- item separator is ', ', the output might include trailing
- whitespace when indent is specified. You can use
- separators=(',', ': ') to avoid this.
+ None is the most compact representation.
- If specified, separators should be a (item_separator, key_separator)
- tuple. The default is (', ', ': '). To get the most compact JSON
- representation you should specify (',', ':') to eliminate whitespace.
+ If specified, separators should be an (item_separator, key_separator)
+ tuple. The default is (', ', ': ') if *indent* is ``None`` and
+ (',', ': ') otherwise. To get the most compact JSON representation,
+ you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
@@ -148,6 +146,8 @@ class JSONEncoder(object):
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
+ elif indent is not None:
+ self.item_separator = ','
if default is not None:
self.default = default
@@ -309,8 +309,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
+ yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
@@ -385,8 +384,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
+ yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
@@ -408,11 +406,9 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
- for chunk in _iterencode_list(o, _current_indent_level):
- yield chunk
+ yield from _iterencode_list(o, _current_indent_level)
elif isinstance(o, dict):
- for chunk in _iterencode_dict(o, _current_indent_level):
- yield chunk
+ yield from _iterencode_dict(o, _current_indent_level)
else:
if markers is not None:
markerid = id(o)
@@ -420,8 +416,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
- for chunk in _iterencode(o, _current_indent_level):
- yield chunk
+ yield from _iterencode(o, _current_indent_level)
if markers is not None:
del markers[markerid]
return _iterencode
diff --git a/Lib/json/scanner.py b/Lib/json/scanner.py
index 23eef61..86426cd 100644
--- a/Lib/json/scanner.py
+++ b/Lib/json/scanner.py
@@ -29,7 +29,7 @@ def py_make_scanner(context):
try:
nextchar = string[idx]
except IndexError:
- raise StopIteration
+ raise StopIteration(idx)
if nextchar == '"':
return parse_string(string, idx + 1, strict)
@@ -60,7 +60,7 @@ def py_make_scanner(context):
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
- raise StopIteration
+ raise StopIteration(idx)
def scan_once(string, idx):
try:
diff --git a/Lib/json/tool.py b/Lib/json/tool.py
index ecf9c47..7db4528 100644
--- a/Lib/json/tool.py
+++ b/Lib/json/tool.py
@@ -31,8 +31,7 @@ def main():
except ValueError as e:
raise SystemExit(e)
with outfile:
- json.dump(obj, outfile, sort_keys=True,
- indent=4, separators=(',', ': '))
+ json.dump(obj, outfile, sort_keys=True, indent=4)
outfile.write('\n')
diff --git a/Lib/keyword.py b/Lib/keyword.py
index dad39cc..91528f7 100755
--- a/Lib/keyword.py
+++ b/Lib/keyword.py
@@ -85,9 +85,8 @@ def main():
sys.exit(1)
# write the output file
- fp = open(optfile, 'w')
- fp.write(''.join(format))
- fp.close()
+ with open(optfile, 'w') as fp:
+ fp.write(''.join(format))
if __name__ == "__main__":
main()
diff --git a/Lib/lib2to3/btm_utils.py b/Lib/lib2to3/btm_utils.py
index 2276dc9..339750e 100644
--- a/Lib/lib2to3/btm_utils.py
+++ b/Lib/lib2to3/btm_utils.py
@@ -96,8 +96,7 @@ class MinNode(object):
def leaves(self):
"Generator that returns the leaves of the tree"
for child in self.children:
- for x in child.leaves():
- yield x
+ yield from child.leaves()
if not self.children:
yield self
@@ -277,7 +276,6 @@ def rec_test(sequence, test_func):
sub-iterables"""
for x in sequence:
if isinstance(x, (list, tuple)):
- for y in rec_test(x, test_func):
- yield y
+ yield from rec_test(x, test_func)
else:
yield test_func(x)
diff --git a/Lib/lib2to3/fixer_util.py b/Lib/lib2to3/fixer_util.py
index 60d219f..6e259c5 100644
--- a/Lib/lib2to3/fixer_util.py
+++ b/Lib/lib2to3/fixer_util.py
@@ -129,6 +129,29 @@ def FromImport(package_name, name_leafs):
imp = Node(syms.import_from, children)
return imp
+def ImportAndCall(node, results, names):
+ """Returns an import statement and calls a method
+ of the module:
+
+ import module
+ module.name()"""
+ obj = results["obj"].clone()
+ if obj.type == syms.arglist:
+ newarglist = obj.clone()
+ else:
+ newarglist = Node(syms.arglist, [obj.clone()])
+ after = results["after"]
+ if after:
+ after = [n.clone() for n in after]
+ new = Node(syms.power,
+ Attr(Name(names[0]), Name(names[1])) +
+ [Node(syms.trailer,
+ [results["lpar"].clone(),
+ newarglist,
+ results["rpar"].clone()])] + after)
+ new.prefix = node.prefix
+ return new
+
###########################################################
### Determine whether a node represents a given literal
diff --git a/Lib/lib2to3/fixes/fix_intern.py b/Lib/lib2to3/fixes/fix_intern.py
index 6be11cd..fb2973c 100644
--- a/Lib/lib2to3/fixes/fix_intern.py
+++ b/Lib/lib2to3/fixes/fix_intern.py
@@ -6,9 +6,8 @@
intern(s) -> sys.intern(s)"""
# Local imports
-from .. import pytree
from .. import fixer_base
-from ..fixer_util import Name, Attr, touch_import
+from ..fixer_util import ImportAndCall, touch_import
class FixIntern(fixer_base.BaseFix):
@@ -26,21 +25,7 @@ class FixIntern(fixer_base.BaseFix):
"""
def transform(self, node, results):
- syms = self.syms
- obj = results["obj"].clone()
- if obj.type == syms.arglist:
- newarglist = obj.clone()
- else:
- newarglist = pytree.Node(syms.arglist, [obj.clone()])
- after = results["after"]
- if after:
- after = [n.clone() for n in after]
- new = pytree.Node(syms.power,
- Attr(Name("sys"), Name("intern")) +
- [pytree.Node(syms.trailer,
- [results["lpar"].clone(),
- newarglist,
- results["rpar"].clone()])] + after)
- new.prefix = node.prefix
+ names = ('sys', 'intern')
+ new = ImportAndCall(node, results, names)
touch_import(None, 'sys', node)
return new
diff --git a/Lib/lib2to3/fixes/fix_reload.py b/Lib/lib2to3/fixes/fix_reload.py
new file mode 100644
index 0000000..1855357
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_reload.py
@@ -0,0 +1,28 @@
+"""Fixer for reload().
+
+reload(s) -> imp.reload(s)"""
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import ImportAndCall, touch_import
+
+
+class FixReload(fixer_base.BaseFix):
+ BM_compatible = True
+ order = "pre"
+
+ PATTERN = """
+ power< 'reload'
+ trailer< lpar='('
+ ( not(arglist | argument<any '=' any>) obj=any
+ | obj=arglist<(not argument<any '=' any>) any ','> )
+ rpar=')' >
+ after=any*
+ >
+ """
+
+ def transform(self, node, results):
+ names = ('imp', 'reload')
+ new = ImportAndCall(node, results, names)
+ touch_import(None, 'imp', node)
+ return new
diff --git a/Lib/lib2to3/main.py b/Lib/lib2to3/main.py
index f9cc18b..93bae90 100644
--- a/Lib/lib2to3/main.py
+++ b/Lib/lib2to3/main.py
@@ -90,11 +90,11 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
if os.path.lexists(backup):
try:
os.remove(backup)
- except os.error as err:
+ except OSError as err:
self.log_message("Can't remove backup %s", backup)
try:
os.rename(filename, backup)
- except os.error as err:
+ except OSError as err:
self.log_message("Can't rename %s to %s", filename, backup)
# Actually write the new file
write = super(StdoutRefactoringTool, self).write_file
diff --git a/Lib/lib2to3/pgen2/conv.py b/Lib/lib2to3/pgen2/conv.py
index bf49762..ed0cac5 100644
--- a/Lib/lib2to3/pgen2/conv.py
+++ b/Lib/lib2to3/pgen2/conv.py
@@ -60,7 +60,7 @@ class Converter(grammar.Grammar):
"""
try:
f = open(filename)
- except IOError as err:
+ except OSError as err:
print("Can't open %s: %s" % (filename, err))
return False
self.symbol2number = {}
@@ -111,7 +111,7 @@ class Converter(grammar.Grammar):
"""
try:
f = open(filename)
- except IOError as err:
+ except OSError as err:
print("Can't open %s: %s" % (filename, err))
return False
# The code below essentially uses f's iterator-ness!
diff --git a/Lib/lib2to3/pgen2/driver.py b/Lib/lib2to3/pgen2/driver.py
index 4c611c6..3ccc69d 100644
--- a/Lib/lib2to3/pgen2/driver.py
+++ b/Lib/lib2to3/pgen2/driver.py
@@ -123,7 +123,7 @@ def load_grammar(gt="Grammar.txt", gp=None,
logger.info("Writing grammar tables to %s", gp)
try:
g.dump(gp)
- except IOError as e:
+ except OSError as e:
logger.info("Writing failed:"+str(e))
else:
g = grammar.Grammar()
diff --git a/Lib/lib2to3/pgen2/grammar.py b/Lib/lib2to3/pgen2/grammar.py
index 14c5f70..7f1c564 100644
--- a/Lib/lib2to3/pgen2/grammar.py
+++ b/Lib/lib2to3/pgen2/grammar.py
@@ -86,15 +86,13 @@ class Grammar(object):
def dump(self, filename):
"""Dump the grammar tables to a pickle file."""
- f = open(filename, "wb")
- pickle.dump(self.__dict__, f, 2)
- f.close()
+ with open(filename, "wb") as f:
+ pickle.dump(self.__dict__, f, 2)
def load(self, filename):
"""Load the grammar tables from a pickle file."""
- f = open(filename, "rb")
- d = pickle.load(f)
- f.close()
+ with open(filename, "rb") as f:
+ d = pickle.load(f)
self.__dict__.update(d)
def copy(self):
diff --git a/Lib/lib2to3/pytree.py b/Lib/lib2to3/pytree.py
index 17cbf0a..c4a1be3 100644
--- a/Lib/lib2to3/pytree.py
+++ b/Lib/lib2to3/pytree.py
@@ -194,8 +194,7 @@ class Base(object):
def leaves(self):
for child in self.children:
- for x in child.leaves():
- yield x
+ yield from child.leaves()
def depth(self):
if self.parent is None:
@@ -274,16 +273,14 @@ class Node(Base):
def post_order(self):
"""Return a post-order iterator for the tree."""
for child in self.children:
- for node in child.post_order():
- yield node
+ yield from child.post_order()
yield self
def pre_order(self):
"""Return a pre-order iterator for the tree."""
yield self
for child in self.children:
- for node in child.pre_order():
- yield node
+ yield from child.pre_order()
def _prefix_getter(self):
"""
diff --git a/Lib/lib2to3/refactor.py b/Lib/lib2to3/refactor.py
index 201e193..8100317 100644
--- a/Lib/lib2to3/refactor.py
+++ b/Lib/lib2to3/refactor.py
@@ -326,7 +326,7 @@ class RefactoringTool(object):
"""
try:
f = open(filename, "rb")
- except IOError as err:
+ except OSError as err:
self.log_error("Can't open %s: %s", filename, err)
return None, None
try:
@@ -534,12 +534,12 @@ class RefactoringTool(object):
"""
try:
f = _open_with_encoding(filename, "w", encoding=encoding)
- except os.error as err:
+ except OSError as err:
self.log_error("Can't create %s: %s", filename, err)
return
try:
f.write(_to_system_newlines(new_text))
- except os.error as err:
+ except OSError as err:
self.log_error("Can't write %s: %s", filename, err)
finally:
f.close()
diff --git a/Lib/lib2to3/tests/pytree_idempotency.py b/Lib/lib2to3/tests/pytree_idempotency.py
index a02bbfe..731c403 100755
--- a/Lib/lib2to3/tests/pytree_idempotency.py
+++ b/Lib/lib2to3/tests/pytree_idempotency.py
@@ -53,7 +53,7 @@ def main():
for dir in sys.path:
try:
names = os.listdir(dir)
- except os.error:
+ except OSError:
continue
print("Scanning", dir, "...", file=sys.stderr)
for name in names:
diff --git a/Lib/lib2to3/tests/test_fixers.py b/Lib/lib2to3/tests/test_fixers.py
index 914b3bf..d7659fa 100644
--- a/Lib/lib2to3/tests/test_fixers.py
+++ b/Lib/lib2to3/tests/test_fixers.py
@@ -282,6 +282,65 @@ class Test_apply(FixerTestCase):
b = """f(*args, **kwds)"""
self.check(a, b)
+class Test_reload(FixerTestCase):
+ fixer = "reload"
+
+ def test(self):
+ b = """reload(a)"""
+ a = """import imp\nimp.reload(a)"""
+ self.check(b, a)
+
+ def test_comment(self):
+ b = """reload( a ) # comment"""
+ a = """import imp\nimp.reload( a ) # comment"""
+ self.check(b, a)
+
+ # PEP 8 comments
+ b = """reload( a ) # comment"""
+ a = """import imp\nimp.reload( a ) # comment"""
+ self.check(b, a)
+
+ def test_space(self):
+ b = """reload( a )"""
+ a = """import imp\nimp.reload( a )"""
+ self.check(b, a)
+
+ b = """reload( a)"""
+ a = """import imp\nimp.reload( a)"""
+ self.check(b, a)
+
+ b = """reload(a )"""
+ a = """import imp\nimp.reload(a )"""
+ self.check(b, a)
+
+ def test_unchanged(self):
+ s = """reload(a=1)"""
+ self.unchanged(s)
+
+ s = """reload(f, g)"""
+ self.unchanged(s)
+
+ s = """reload(f, *h)"""
+ self.unchanged(s)
+
+ s = """reload(f, *h, **i)"""
+ self.unchanged(s)
+
+ s = """reload(f, **i)"""
+ self.unchanged(s)
+
+ s = """reload(*h, **i)"""
+ self.unchanged(s)
+
+ s = """reload(*h)"""
+ self.unchanged(s)
+
+ s = """reload(**i)"""
+ self.unchanged(s)
+
+ s = """reload()"""
+ self.unchanged(s)
+
class Test_intern(FixerTestCase):
fixer = "intern"
diff --git a/Lib/linecache.py b/Lib/linecache.py
index c3f2c3f..02a9eb5 100644
--- a/Lib/linecache.py
+++ b/Lib/linecache.py
@@ -59,7 +59,7 @@ def checkcache(filename=None):
continue # no-op for files loaded via a __loader__
try:
stat = os.stat(fullname)
- except os.error:
+ except OSError:
del cache[filename]
continue
if size != stat.st_size or mtime != stat.st_mtime:
@@ -91,7 +91,7 @@ def updatecache(filename, module_globals=None):
if name and get_source:
try:
data = get_source(name)
- except (ImportError, IOError):
+ except (ImportError, OSError):
pass
else:
if data is None:
@@ -118,14 +118,14 @@ def updatecache(filename, module_globals=None):
try:
stat = os.stat(fullname)
break
- except os.error:
+ except OSError:
pass
else:
return []
try:
with tokenize.open(fullname) as fp:
lines = fp.readlines()
- except IOError:
+ except OSError:
return []
if lines and not lines[-1].endswith('\n'):
lines[-1] += '\n'
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
index fa03f78..244c915 100644
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -67,7 +67,7 @@ else: #pragma: no cover
"""Return the frame object for the caller's stack frame."""
try:
raise Exception
- except:
+ except Exception:
return sys.exc_info()[2].tb_frame.f_back
# _srcfile is only used in conjunction with sys._getframe().
@@ -880,16 +880,27 @@ class Handler(Filterer):
The record which was being processed is passed in to this method.
"""
if raiseExceptions and sys.stderr: # see issue 13807
- ei = sys.exc_info()
+ t, v, tb = sys.exc_info()
try:
- traceback.print_exception(ei[0], ei[1], ei[2],
- None, sys.stderr)
- sys.stderr.write('Logged from file %s, line %s\n' % (
- record.filename, record.lineno))
- except IOError: #pragma: no cover
+ sys.stderr.write('--- Logging error ---\n')
+ traceback.print_exception(t, v, tb, None, sys.stderr)
+ sys.stderr.write('Call stack:\n')
+ # Walk the stack frame up until we're out of logging,
+ # so as to print the calling context.
+ frame = tb.tb_frame
+ while (frame and os.path.dirname(frame.f_code.co_filename) ==
+ __path__[0]):
+ frame = frame.f_back
+ if frame:
+ traceback.print_stack(frame, file=sys.stderr)
+ else:
+ # couldn't find the right stack frame, for some reason
+ sys.stderr.write('Logged from file %s, line %s\n' % (
+ record.filename, record.lineno))
+ except OSError: #pragma: no cover
pass # see issue 5971
finally:
- del ei
+ del t, v, tb
class StreamHandler(Handler):
"""
@@ -939,9 +950,7 @@ class StreamHandler(Handler):
stream.write(msg)
stream.write(self.terminator)
self.flush()
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class FileHandler(StreamHandler):
@@ -1830,7 +1839,7 @@ def shutdown(handlerList=_handlerList):
h.acquire()
h.flush()
h.close()
- except (IOError, ValueError):
+ except (OSError, ValueError):
# Ignore errors which might be caused
# because handlers have been closed but
# references to them are still around at
@@ -1838,7 +1847,7 @@ def shutdown(handlerList=_handlerList):
pass
finally:
h.release()
- except:
+ except: # ignore everything, as we're shutting down
if raiseExceptions:
raise
#else, swallow
diff --git a/Lib/logging/config.py b/Lib/logging/config.py
index 11e79a2..b0c0971 100644
--- a/Lib/logging/config.py
+++ b/Lib/logging/config.py
@@ -61,11 +61,14 @@ def fileConfig(fname, defaults=None, disable_existing_loggers=True):
"""
import configparser
- cp = configparser.ConfigParser(defaults)
- if hasattr(fname, 'readline'):
- cp.read_file(fname)
+ if isinstance(fname, configparser.RawConfigParser):
+ cp = fname
else:
- cp.read(fname)
+ cp = configparser.ConfigParser(defaults)
+ if hasattr(fname, 'readline'):
+ cp.read_file(fname)
+ else:
+ cp.read(fname)
formatters = _create_formatters(cp)
@@ -727,6 +730,7 @@ class DictConfigurator(BaseConfigurator):
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
+ props = config.pop('.', None)
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
try:
result = factory(**kwargs)
@@ -745,6 +749,9 @@ class DictConfigurator(BaseConfigurator):
result.setLevel(logging._checkLevel(level))
if filters:
self.add_filters(result, filters)
+ if props:
+ for name, value in props.items():
+ setattr(result, name, value)
return result
def add_handlers(self, logger, handlers):
@@ -793,7 +800,7 @@ def dictConfig(config):
dictConfigClass(config).configure()
-def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
+def listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None):
"""
Start up a socket server on the specified port, and listen for new
configurations.
@@ -802,6 +809,15 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
+
+ Use the ``verify`` argument to verify any bytes received across the wire
+ from a client. If specified, it should be a callable which receives a
+ single argument - the bytes of configuration data received across the
+ network - and it should return either ``None``, to indicate that the
+ passed in bytes could not be verified and should be discarded, or a
+ byte string which is then passed to the configuration machinery as
+ normal. Note that you can return transformed bytes, e.g. by decrypting
+ the bytes passed in.
"""
if not thread: #pragma: no cover
raise NotImplementedError("listen() needs threading to work")
@@ -829,25 +845,26 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
- chunk = chunk.decode("utf-8")
- try:
- import json
- d =json.loads(chunk)
- assert isinstance(d, dict)
- dictConfig(d)
- except:
- #Apply new configuration.
-
- file = io.StringIO(chunk)
+ if self.server.verify is not None:
+ chunk = self.server.verify(chunk)
+ if chunk is not None: # verified, can process
+ chunk = chunk.decode("utf-8")
try:
- fileConfig(file)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
- traceback.print_exc()
+ import json
+ d =json.loads(chunk)
+ assert isinstance(d, dict)
+ dictConfig(d)
+ except Exception:
+ #Apply new configuration.
+
+ file = io.StringIO(chunk)
+ try:
+ fileConfig(file)
+ except Exception:
+ traceback.print_exc()
if self.server.ready:
self.server.ready.set()
- except socket.error as e:
+ except OSError as e:
if not isinstance(e.args, tuple):
raise
else:
@@ -863,13 +880,14 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
- handler=None, ready=None):
+ handler=None, ready=None, verify=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
self.ready = ready
+ self.verify = verify
def serve_until_stopped(self):
import select
@@ -887,16 +905,18 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
class Server(threading.Thread):
- def __init__(self, rcvr, hdlr, port):
+ def __init__(self, rcvr, hdlr, port, verify):
super(Server, self).__init__()
self.rcvr = rcvr
self.hdlr = hdlr
self.port = port
+ self.verify = verify
self.ready = threading.Event()
def run(self):
server = self.rcvr(port=self.port, handler=self.hdlr,
- ready=self.ready)
+ ready=self.ready,
+ verify=self.verify)
if self.port == 0:
self.port = server.server_address[1]
self.ready.set()
@@ -906,7 +926,7 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
logging._releaseLock()
server.serve_until_stopped()
- return Server(ConfigSocketReceiver, ConfigStreamHandler, port)
+ return Server(ConfigSocketReceiver, ConfigStreamHandler, port, verify)
def stopListening():
"""
diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
index f286cd6..263acc9 100644
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -72,9 +72,7 @@ class BaseRotatingHandler(logging.FileHandler):
if self.shouldRollover(record):
self.doRollover()
logging.FileHandler.emit(self, record)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def rotation_filename(self, default_name):
@@ -440,11 +438,8 @@ class WatchedFileHandler(logging.FileHandler):
try:
# stat the file by path, checking for existence
sres = os.stat(self.baseFilename)
- except OSError as err:
- if err.errno == errno.ENOENT:
- sres = None
- else:
- raise
+ except FileNotFoundError:
+ sres = None
# compare file system stat with that of our stream file handle
if not sres or sres[ST_DEV] != self.dev or sres[ST_INO] != self.ino:
if self.stream is not None:
@@ -496,15 +491,7 @@ class SocketHandler(logging.Handler):
A factory method which allows subclasses to define the precise
type of socket they want.
"""
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- if hasattr(s, 'settimeout'):
- s.settimeout(timeout)
- try:
- s.connect((self.host, self.port))
- return s
- except socket.error:
- s.close()
- raise
+ return socket.create_connection((self.host, self.port), timeout=timeout)
def createSocket(self):
"""
@@ -524,7 +511,7 @@ class SocketHandler(logging.Handler):
try:
self.sock = self.makeSocket()
self.retryTime = None # next time, no delay before trying
- except socket.error:
+ except OSError:
#Creation failed, so set the retry time and return.
if self.retryTime is None:
self.retryPeriod = self.retryStart
@@ -548,16 +535,8 @@ class SocketHandler(logging.Handler):
#but are still unable to connect.
if self.sock:
try:
- if hasattr(self.sock, "sendall"):
- self.sock.sendall(s)
- else: #pragma: no cover
- sentsofar = 0
- left = len(s)
- while left > 0:
- sent = self.sock.send(s[sentsofar:])
- sentsofar = sentsofar + sent
- left = left - sent
- except socket.error: #pragma: no cover
+ self.sock.sendall(s)
+ except OSError: #pragma: no cover
self.sock.close()
self.sock = None # so we can call createSocket next time
@@ -607,9 +586,7 @@ class SocketHandler(logging.Handler):
try:
s = self.makePickle(record)
self.send(s)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def close(self):
@@ -795,7 +772,7 @@ class SysLogHandler(logging.Handler):
self.socket = socket.socket(socket.AF_UNIX, self.socktype)
try:
self.socket.connect(address)
- except socket.error:
+ except OSError:
self.socket.close()
raise
@@ -862,16 +839,14 @@ class SysLogHandler(logging.Handler):
if self.unixsocket:
try:
self.socket.send(msg)
- except socket.error:
+ except OSError:
self._connect_unixsocket(self.address)
self.socket.send(msg)
elif self.socktype == socket.SOCK_DGRAM:
self.socket.sendto(msg, self.address)
else:
self.socket.sendall(msg)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class SMTPHandler(logging.Handler):
@@ -949,9 +924,7 @@ class SMTPHandler(logging.Handler):
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class NTEventLogHandler(logging.Handler):
@@ -1036,9 +1009,7 @@ class NTEventLogHandler(logging.Handler):
type = self.getEventType(record)
msg = self.format(record)
self._welu.ReportEvent(self.appname, id, cat, type, [msg])
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def close(self):
@@ -1123,9 +1094,7 @@ class HTTPHandler(logging.Handler):
if self.method == "POST":
h.send(data.encode('utf-8'))
h.getresponse() #can't do anything with the result
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class BufferingHandler(logging.Handler):
@@ -1305,9 +1274,7 @@ class QueueHandler(logging.Handler):
"""
try:
self.enqueue(self.prepare(record))
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
if threading:
diff --git a/Lib/lzma.py b/Lib/lzma.py
index 1a1b065..b2e2f7e 100644
--- a/Lib/lzma.py
+++ b/Lib/lzma.py
@@ -55,7 +55,7 @@ class LZMAFile(io.BufferedIOBase):
be an existing file object to read from or write to.
mode can be "r" for reading (default), "w" for (over)writing, or
- "a" for appending. These can equivalently be given as "rb", "wb",
+ "a" for appending. These can equivalently be given as "rb", "wb"
and "ab" respectively.
format specifies the container format to use for the file.
@@ -110,7 +110,8 @@ class LZMAFile(io.BufferedIOBase):
# stream will need a separate decompressor object.
self._init_args = {"format":format, "filters":filters}
self._decompressor = LZMADecompressor(**self._init_args)
- self._buffer = None
+ self._buffer = b""
+ self._buffer_offset = 0
elif mode in ("w", "wb", "a", "ab"):
if format is None:
format = FORMAT_XZ
@@ -143,7 +144,7 @@ class LZMAFile(io.BufferedIOBase):
try:
if self._mode in (_MODE_READ, _MODE_READ_EOF):
self._decompressor = None
- self._buffer = None
+ self._buffer = b""
elif self._mode == _MODE_WRITE:
self._fp.write(self._compressor.flush())
self._compressor = None
@@ -187,15 +188,18 @@ class LZMAFile(io.BufferedIOBase):
raise ValueError("I/O operation on closed file")
def _check_can_read(self):
- if not self.readable():
+ if self._mode not in (_MODE_READ, _MODE_READ_EOF):
+ self._check_not_closed()
raise io.UnsupportedOperation("File not open for reading")
def _check_can_write(self):
- if not self.writable():
+ if self._mode != _MODE_WRITE:
+ self._check_not_closed()
raise io.UnsupportedOperation("File not open for writing")
def _check_can_seek(self):
- if not self.readable():
+ if self._mode not in (_MODE_READ, _MODE_READ_EOF):
+ self._check_not_closed()
raise io.UnsupportedOperation("Seeking is only supported "
"on files open for reading")
if not self._fp.seekable():
@@ -204,16 +208,13 @@ class LZMAFile(io.BufferedIOBase):
# Fill the readahead buffer if it is empty. Returns False on EOF.
def _fill_buffer(self):
+ if self._mode == _MODE_READ_EOF:
+ return False
# Depending on the input data, our call to the decompressor may not
# return any data. In this case, try again after reading another block.
- while True:
- if self._buffer:
- return True
-
- if self._decompressor.unused_data:
- rawblock = self._decompressor.unused_data
- else:
- rawblock = self._fp.read(_BUFFER_SIZE)
+ while self._buffer_offset == len(self._buffer):
+ rawblock = (self._decompressor.unused_data or
+ self._fp.read(_BUFFER_SIZE))
if not rawblock:
if self._decompressor.eof:
@@ -229,30 +230,48 @@ class LZMAFile(io.BufferedIOBase):
self._decompressor = LZMADecompressor(**self._init_args)
self._buffer = self._decompressor.decompress(rawblock)
+ self._buffer_offset = 0
+ return True
# Read data until EOF.
# If return_data is false, consume the data without returning it.
def _read_all(self, return_data=True):
+ # The loop assumes that _buffer_offset is 0. Ensure that this is true.
+ self._buffer = self._buffer[self._buffer_offset:]
+ self._buffer_offset = 0
+
blocks = []
while self._fill_buffer():
if return_data:
blocks.append(self._buffer)
self._pos += len(self._buffer)
- self._buffer = None
+ self._buffer = b""
if return_data:
return b"".join(blocks)
# Read a block of up to n bytes.
# If return_data is false, consume the data without returning it.
def _read_block(self, n, return_data=True):
+ # If we have enough data buffered, return immediately.
+ end = self._buffer_offset + n
+ if end <= len(self._buffer):
+ data = self._buffer[self._buffer_offset : end]
+ self._buffer_offset = end
+ self._pos += len(data)
+ return data if return_data else None
+
+ # The loop assumes that _buffer_offset is 0. Ensure that this is true.
+ self._buffer = self._buffer[self._buffer_offset:]
+ self._buffer_offset = 0
+
blocks = []
while n > 0 and self._fill_buffer():
if n < len(self._buffer):
data = self._buffer[:n]
- self._buffer = self._buffer[n:]
+ self._buffer_offset = n
else:
data = self._buffer
- self._buffer = None
+ self._buffer = b""
if return_data:
blocks.append(data)
self._pos += len(data)
@@ -267,9 +286,9 @@ class LZMAFile(io.BufferedIOBase):
The exact number of bytes returned is unspecified.
"""
self._check_can_read()
- if self._mode == _MODE_READ_EOF or not self._fill_buffer():
+ if not self._fill_buffer():
return b""
- return self._buffer
+ return self._buffer[self._buffer_offset:]
def read(self, size=-1):
"""Read up to size uncompressed bytes from the file.
@@ -278,7 +297,7 @@ class LZMAFile(io.BufferedIOBase):
Returns b"" if the file is already at EOF.
"""
self._check_can_read()
- if self._mode == _MODE_READ_EOF or size == 0:
+ if size == 0:
return b""
elif size < 0:
return self._read_all()
@@ -295,18 +314,40 @@ class LZMAFile(io.BufferedIOBase):
# this does not give enough data for the decompressor to make progress.
# In this case we make multiple reads, to avoid returning b"".
self._check_can_read()
- if (size == 0 or self._mode == _MODE_READ_EOF or
- not self._fill_buffer()):
+ if (size == 0 or
+ # Only call _fill_buffer() if the buffer is actually empty.
+ # This gives a significant speedup if *size* is small.
+ (self._buffer_offset == len(self._buffer) and not self._fill_buffer())):
return b""
- if 0 < size < len(self._buffer):
- data = self._buffer[:size]
- self._buffer = self._buffer[size:]
+ if size > 0:
+ data = self._buffer[self._buffer_offset :
+ self._buffer_offset + size]
+ self._buffer_offset += len(data)
else:
- data = self._buffer
- self._buffer = None
+ data = self._buffer[self._buffer_offset:]
+ self._buffer = b""
+ self._buffer_offset = 0
self._pos += len(data)
return data
+ def readline(self, size=-1):
+ """Read a line of uncompressed bytes from the file.
+
+ The terminating newline (if present) is retained. If size is
+ non-negative, no more than size bytes will be read (in which
+ case the line may be incomplete). Returns b'' if already at EOF.
+ """
+ self._check_can_read()
+ # Shortcut for the common case - the whole line is in the buffer.
+ if size < 0:
+ end = self._buffer.find(b"\n", self._buffer_offset) + 1
+ if end > 0:
+ line = self._buffer[self._buffer_offset : end]
+ self._buffer_offset = end
+ self._pos += len(line)
+ return line
+ return io.BufferedIOBase.readline(self, size)
+
def write(self, data):
"""Write a bytes object to the file.
@@ -326,7 +367,8 @@ class LZMAFile(io.BufferedIOBase):
self._mode = _MODE_READ
self._pos = 0
self._decompressor = LZMADecompressor(**self._init_args)
- self._buffer = None
+ self._buffer = b""
+ self._buffer_offset = 0
def seek(self, offset, whence=0):
"""Change the file position.
@@ -365,8 +407,7 @@ class LZMAFile(io.BufferedIOBase):
offset -= self._pos
# Read and discard data until we reach the desired position.
- if self._mode != _MODE_READ_EOF:
- self._read_block(offset, return_data=False)
+ self._read_block(offset, return_data=False)
return self._pos
@@ -381,23 +422,24 @@ def open(filename, mode="rb", *,
encoding=None, errors=None, newline=None):
"""Open an LZMA-compressed file in binary or text mode.
- filename can be either an actual file name (given as a str or bytes object),
- in which case the named file is opened, or it can be an existing file object
- to read from or write to.
+ filename can be either an actual file name (given as a str or bytes
+ object), in which case the named file is opened, or it can be an
+ existing file object to read from or write to.
- The mode argument can be "r", "rb" (default), "w", "wb", "a", or "ab" for
- binary mode, or "rt", "wt" or "at" for text mode.
+ The mode argument can be "r", "rb" (default), "w", "wb", "a" or "ab"
+ for binary mode, or "rt", "wt" or "at" for text mode.
- The format, check, preset and filters arguments specify the compression
- settings, as for LZMACompressor, LZMADecompressor and LZMAFile.
+ The format, check, preset and filters arguments specify the
+ compression settings, as for LZMACompressor, LZMADecompressor and
+ LZMAFile.
- For binary mode, this function is equivalent to the LZMAFile constructor:
- LZMAFile(filename, mode, ...). In this case, the encoding, errors and
- newline arguments must not be provided.
+ For binary mode, this function is equivalent to the LZMAFile
+ constructor: LZMAFile(filename, mode, ...). In this case, the
+ encoding, errors and newline arguments must not be provided.
For text mode, a LZMAFile object is created, and wrapped in an
- io.TextIOWrapper instance with the specified encoding, error handling
- behavior, and line ending(s).
+ io.TextIOWrapper instance with the specified encoding, error
+ handling behavior, and line ending(s).
"""
if "t" in mode:
@@ -427,7 +469,7 @@ def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None):
Refer to LZMACompressor's docstring for a description of the
optional arguments *format*, *check*, *preset* and *filters*.
- For incremental compression, use an LZMACompressor object instead.
+ For incremental compression, use an LZMACompressor instead.
"""
comp = LZMACompressor(format, check, preset, filters)
return comp.compress(data) + comp.flush()
@@ -439,7 +481,7 @@ def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None):
Refer to LZMADecompressor's docstring for a description of the
optional arguments *format*, *check* and *filters*.
- For incremental decompression, use a LZMADecompressor object instead.
+ For incremental decompression, use an LZMADecompressor instead.
"""
results = []
while True:
diff --git a/Lib/macpath.py b/Lib/macpath.py
index 1615d91..d34f9e94 100644
--- a/Lib/macpath.py
+++ b/Lib/macpath.py
@@ -127,7 +127,7 @@ def lexists(path):
try:
st = os.lstat(path)
- except os.error:
+ except OSError:
return False
return True
diff --git a/Lib/mailbox.py b/Lib/mailbox.py
index d3bf3fd..2049516 100644
--- a/Lib/mailbox.py
+++ b/Lib/mailbox.py
@@ -22,9 +22,6 @@ import email.generator
import io
import contextlib
try:
- if sys.platform == 'os2emx':
- # OS/2 EMX fcntl() not adequate
- raise ImportError
import fcntl
except ImportError:
fcntl = None
@@ -337,11 +334,8 @@ class Maildir(Mailbox):
# This overrides an inapplicable implementation in the superclass.
try:
self.remove(key)
- except KeyError:
+ except (KeyError, FileNotFoundError):
pass
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
def __setitem__(self, key, message):
"""Replace the keyed message; raise KeyError if it doesn't exist."""
@@ -369,14 +363,11 @@ class Maildir(Mailbox):
def get_message(self, key):
"""Return a Message representation or raise a KeyError."""
subpath = self._lookup(key)
- f = open(os.path.join(self._path, subpath), 'rb')
- try:
+ with open(os.path.join(self._path, subpath), 'rb') as f:
if self._factory:
msg = self._factory(f)
else:
msg = MaildirMessage(f)
- finally:
- f.close()
subdir, name = os.path.split(subpath)
msg.set_subdir(subdir)
if self.colon in name:
@@ -386,11 +377,8 @@ class Maildir(Mailbox):
def get_bytes(self, key):
"""Return a bytes representation or raise a KeyError."""
- f = open(os.path.join(self._path, self._lookup(key)), 'rb')
- try:
+ with open(os.path.join(self._path, self._lookup(key)), 'rb') as f:
return f.read().replace(linesep, b'\n')
- finally:
- f.close()
def get_file(self, key):
"""Return a file-like representation or raise a KeyError."""
@@ -502,16 +490,12 @@ class Maildir(Mailbox):
path = os.path.join(self._path, 'tmp', uniq)
try:
os.stat(path)
- except OSError as e:
- if e.errno == errno.ENOENT:
- Maildir._count += 1
- try:
- return _create_carefully(path)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
- else:
- raise
+ except FileNotFoundError:
+ Maildir._count += 1
+ try:
+ return _create_carefully(path)
+ except FileExistsError:
+ pass
# Fall through to here if stat succeeded or open raised EEXIST.
raise ExternalClashError('Name clash prevented file creation: %s' %
@@ -588,7 +572,7 @@ class _singlefileMailbox(Mailbox):
Mailbox.__init__(self, path, factory, create)
try:
f = open(self._path, 'rb+')
- except IOError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
if create:
f = open(self._path, 'wb+')
@@ -631,8 +615,7 @@ class _singlefileMailbox(Mailbox):
def iterkeys(self):
"""Return an iterator over keys."""
self._lookup()
- for key in self._toc.keys():
- yield key
+ yield from self._toc.keys()
def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise."""
@@ -710,13 +693,9 @@ class _singlefileMailbox(Mailbox):
os.chmod(new_file.name, mode)
try:
os.rename(new_file.name, self._path)
- except OSError as e:
- if e.errno == errno.EEXIST or \
- (os.name == 'os2' and e.errno == errno.EACCES):
- os.remove(self._path)
- os.rename(new_file.name, self._path)
- else:
- raise
+ except FileExistsError:
+ os.remove(self._path)
+ os.rename(new_file.name, self._path)
self._file = open(self._path, 'rb+')
self._toc = new_toc
self._pending = False
@@ -993,7 +972,7 @@ class MH(Mailbox):
path = os.path.join(self._path, str(key))
try:
f = open(path, 'rb+')
- except IOError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
raise KeyError('No message with key: %s' % key)
else:
@@ -1007,7 +986,7 @@ class MH(Mailbox):
path = os.path.join(self._path, str(key))
try:
f = open(path, 'rb+')
- except IOError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
raise KeyError('No message with key: %s' % key)
else:
@@ -1033,12 +1012,12 @@ class MH(Mailbox):
f = open(os.path.join(self._path, str(key)), 'rb+')
else:
f = open(os.path.join(self._path, str(key)), 'rb')
- except IOError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
raise KeyError('No message with key: %s' % key)
else:
raise
- try:
+ with f:
if self._locked:
_lock_file(f)
try:
@@ -1046,8 +1025,6 @@ class MH(Mailbox):
finally:
if self._locked:
_unlock_file(f)
- finally:
- f.close()
for name, key_list in self.get_sequences().items():
if key in key_list:
msg.add_sequence(name)
@@ -1060,12 +1037,12 @@ class MH(Mailbox):
f = open(os.path.join(self._path, str(key)), 'rb+')
else:
f = open(os.path.join(self._path, str(key)), 'rb')
- except IOError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
raise KeyError('No message with key: %s' % key)
else:
raise
- try:
+ with f:
if self._locked:
_lock_file(f)
try:
@@ -1073,14 +1050,12 @@ class MH(Mailbox):
finally:
if self._locked:
_unlock_file(f)
- finally:
- f.close()
def get_file(self, key):
"""Return a file-like representation or raise a KeyError."""
try:
f = open(os.path.join(self._path, str(key)), 'rb')
- except IOError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
raise KeyError('No message with key: %s' % key)
else:
@@ -2073,7 +2048,7 @@ def _lock_file(f, dotlock=True):
if fcntl:
try:
fcntl.lockf(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
- except IOError as e:
+ except OSError as e:
if e.errno in (errno.EAGAIN, errno.EACCES, errno.EROFS):
raise ExternalClashError('lockf: lock unavailable: %s' %
f.name)
@@ -2083,7 +2058,7 @@ def _lock_file(f, dotlock=True):
try:
pre_lock = _create_temporary(f.name + '.lock')
pre_lock.close()
- except IOError as e:
+ except OSError as e:
if e.errno in (errno.EACCES, errno.EROFS):
return # Without write access, just skip dotlocking.
else:
@@ -2096,14 +2071,10 @@ def _lock_file(f, dotlock=True):
else:
os.rename(pre_lock.name, f.name + '.lock')
dotlock_done = True
- except OSError as e:
- if e.errno == errno.EEXIST or \
- (os.name == 'os2' and e.errno == errno.EACCES):
- os.remove(pre_lock.name)
- raise ExternalClashError('dot lock unavailable: %s' %
- f.name)
- else:
- raise
+ except FileExistsError:
+ os.remove(pre_lock.name)
+ raise ExternalClashError('dot lock unavailable: %s' %
+ f.name)
except:
if fcntl:
fcntl.lockf(f, fcntl.LOCK_UN)
diff --git a/Lib/mailcap.py b/Lib/mailcap.py
index 99f4958..bd61b0b 100644
--- a/Lib/mailcap.py
+++ b/Lib/mailcap.py
@@ -20,7 +20,7 @@ def getcaps():
for mailcap in listmailcapfiles():
try:
fp = open(mailcap, 'r')
- except IOError:
+ except OSError:
continue
morecaps = readmailcapfile(fp)
fp.close()
diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py
index 3f0bd0e..5aaa908 100644
--- a/Lib/mimetypes.py
+++ b/Lib/mimetypes.py
@@ -243,7 +243,7 @@ class MimeTypes:
while True:
try:
ctype = _winreg.EnumKey(mimedb, i)
- except EnvironmentError:
+ except OSError:
break
else:
yield ctype
@@ -256,7 +256,7 @@ class MimeTypes:
with _winreg.OpenKey(mimedb, ctype) as key:
suffix, datatype = _winreg.QueryValueEx(key,
'Extension')
- except EnvironmentError:
+ except OSError:
continue
if datatype != _winreg.REG_SZ:
continue
@@ -359,7 +359,7 @@ def init(files=None):
def read_mime_types(file):
try:
f = open(file)
- except IOError:
+ except OSError:
return None
db = MimeTypes()
db.readfp(f, True)
@@ -378,12 +378,14 @@ def _default_mime_types():
'.taz': '.tar.gz',
'.tz': '.tar.gz',
'.tbz2': '.tar.bz2',
+ '.txz': '.tar.xz',
}
encodings_map = {
'.gz': 'gzip',
'.Z': 'compress',
'.bz2': 'bzip2',
+ '.xz': 'xz',
}
# Before adding new types, make sure they are either registered with IANA,
diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py
index f90a432..4996d7a 100644
--- a/Lib/modulefinder.py
+++ b/Lib/modulefinder.py
@@ -229,7 +229,7 @@ class ModuleFinder:
for dir in m.__path__:
try:
names = os.listdir(dir)
- except os.error:
+ except OSError:
self.msg(2, "can't list directory", dir)
continue
for name in names:
diff --git a/Lib/multiprocessing/__init__.py b/Lib/multiprocessing/__init__.py
index 1f3e67c..b5f16d7 100644
--- a/Lib/multiprocessing/__init__.py
+++ b/Lib/multiprocessing/__init__.py
@@ -8,10 +8,6 @@
# subpackage 'multiprocessing.dummy' has the same API but is a simple
# wrapper for 'threading'.
#
-# Try calling `multiprocessing.doc.main()` to read the html
-# documentation in a webbrowser.
-#
-#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
@@ -27,8 +23,6 @@ __all__ = [
'Value', 'Array', 'RawValue', 'RawArray', 'SUBDEBUG', 'SUBWARNING',
]
-__author__ = 'R. Oudkerk (r.m.oudkerk@gmail.com)'
-
#
# Imports
#
@@ -40,6 +34,13 @@ from multiprocessing.process import Process, current_process, active_children
from multiprocessing.util import SUBDEBUG, SUBWARNING
#
+# Alias for main module -- will be reset by bootstrapping child processes
+#
+
+if '__main__' in sys.modules:
+ sys.modules['__mp_main__'] = sys.modules['__main__']
+
+#
# Exceptions
#
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
index 1d65f46..9a357f6 100644
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -132,22 +132,22 @@ class _ConnectionBase:
def _check_closed(self):
if self._handle is None:
- raise IOError("handle is closed")
+ raise OSError("handle is closed")
def _check_readable(self):
if not self._readable:
- raise IOError("connection is write-only")
+ raise OSError("connection is write-only")
def _check_writable(self):
if not self._writable:
- raise IOError("connection is read-only")
+ raise OSError("connection is read-only")
def _bad_message_length(self):
if self._writable:
self._readable = False
else:
self.close()
- raise IOError("bad message length")
+ raise OSError("bad message length")
@property
def closed(self):
@@ -317,7 +317,7 @@ if _winapi:
return f
elif err == _winapi.ERROR_MORE_DATA:
return self._get_more_data(ov, maxsize)
- except IOError as e:
+ except OSError as e:
if e.winerror == _winapi.ERROR_BROKEN_PIPE:
raise EOFError
else:
@@ -383,7 +383,7 @@ class Connection(_ConnectionBase):
if remaining == size:
raise EOFError
else:
- raise IOError("got end of file during message")
+ raise OSError("got end of file during message")
buf.write(chunk)
remaining -= n
return buf
@@ -443,7 +443,7 @@ class Listener(object):
Returns a `Connection` object.
'''
if self._listener is None:
- raise IOError('listener is closed')
+ raise OSError('listener is closed')
c = self._listener.accept()
if self._authkey:
deliver_challenge(c, self._authkey)
@@ -676,7 +676,7 @@ if sys.platform == 'win32':
0, _winapi.NULL, _winapi.OPEN_EXISTING,
_winapi.FILE_FLAG_OVERLAPPED, _winapi.NULL
)
- except WindowsError as e:
+ except OSError as e:
if e.winerror not in (_winapi.ERROR_SEM_TIMEOUT,
_winapi.ERROR_PIPE_BUSY) or _check_timeout(t):
raise
diff --git a/Lib/multiprocessing/dummy/__init__.py b/Lib/multiprocessing/dummy/__init__.py
index e31fc61..20ae957 100644
--- a/Lib/multiprocessing/dummy/__init__.py
+++ b/Lib/multiprocessing/dummy/__init__.py
@@ -4,32 +4,7 @@
# multiprocessing/dummy/__init__.py
#
# Copyright (c) 2006-2008, R Oudkerk
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-#
-# 1. Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# 2. Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-# 3. Neither the name of author nor the names of any contributors may be
-# used to endorse or promote products derived from this software
-# without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-# SUCH DAMAGE.
+# Licensed to PSF under a Contributor Agreement.
#
__all__ = [
diff --git a/Lib/multiprocessing/dummy/connection.py b/Lib/multiprocessing/dummy/connection.py
index 874ec8e..694ef96 100644
--- a/Lib/multiprocessing/dummy/connection.py
+++ b/Lib/multiprocessing/dummy/connection.py
@@ -4,32 +4,7 @@
# multiprocessing/dummy/connection.py
#
# Copyright (c) 2006-2008, R Oudkerk
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-#
-# 1. Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# 2. Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-# 3. Neither the name of author nor the names of any contributors may be
-# used to endorse or promote products derived from this software
-# without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
-# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-# SUCH DAMAGE.
+# Licensed to PSF under a Contributor Agreement.
#
__all__ = [ 'Client', 'Listener', 'Pipe' ]
diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py
index 0bb21c4..7bda412 100644
--- a/Lib/multiprocessing/forking.py
+++ b/Lib/multiprocessing/forking.py
@@ -113,7 +113,7 @@ if sys.platform != 'win32':
while True:
try:
pid, sts = os.waitpid(self.pid, flag)
- except os.error as e:
+ except OSError as e:
if e.errno == errno.EINTR:
continue
# Child process not yet created. See #1731717
@@ -448,27 +448,17 @@ def prepare(data):
dirs = [os.path.dirname(main_path)]
assert main_name not in sys.modules, main_name
+ sys.modules.pop('__mp_main__', None)
file, path_name, etc = imp.find_module(main_name, dirs)
try:
- # We would like to do "imp.load_module('__main__', ...)"
- # here. However, that would cause 'if __name__ ==
- # "__main__"' clauses to be executed.
+ # We should not do 'imp.load_module("__main__", ...)'
+ # since that would execute 'if __name__ == "__main__"'
+ # clauses, potentially causing a psuedo fork bomb.
main_module = imp.load_module(
- '__parents_main__', file, path_name, etc
+ '__mp_main__', file, path_name, etc
)
finally:
if file:
file.close()
- sys.modules['__main__'] = main_module
- main_module.__name__ = '__main__'
-
- # Try to make the potentially picklable objects in
- # sys.modules['__main__'] realize they are in the main
- # module -- somewhat ugly.
- for obj in list(main_module.__dict__.values()):
- try:
- if obj.__module__ == '__parents_main__':
- obj.__module__ = '__main__'
- except Exception:
- pass
+ sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module
diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py
index 1ab147e..30ef771 100644
--- a/Lib/multiprocessing/managers.py
+++ b/Lib/multiprocessing/managers.py
@@ -167,7 +167,7 @@ class Server(object):
while True:
try:
c = self.listener.accept()
- except (OSError, IOError):
+ except OSError:
continue
t = threading.Thread(target=self.handle_request, args=(c,))
t.daemon = True
diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py
index 7f73b44..c0aa717 100644
--- a/Lib/multiprocessing/pool.py
+++ b/Lib/multiprocessing/pool.py
@@ -78,8 +78,8 @@ def worker(inqueue, outqueue, initializer=None, initargs=(), maxtasks=None):
while maxtasks is None or (maxtasks and completed < maxtasks):
try:
task = get()
- except (EOFError, IOError):
- debug('worker got EOFError or IOError -- exiting')
+ except (EOFError, OSError):
+ debug('worker got EOFError or OSError -- exiting')
break
if task is None:
@@ -349,7 +349,7 @@ class Pool(object):
break
try:
put(task)
- except IOError:
+ except OSError:
debug('could not put task on queue')
break
else:
@@ -371,8 +371,8 @@ class Pool(object):
debug('task handler sending sentinel to workers')
for p in pool:
put(None)
- except IOError:
- debug('task handler got IOError when sending sentinels')
+ except OSError:
+ debug('task handler got OSError when sending sentinels')
debug('task handler exiting')
@@ -383,8 +383,8 @@ class Pool(object):
while 1:
try:
task = get()
- except (IOError, EOFError):
- debug('result handler got EOFError/IOError -- exiting')
+ except (OSError, EOFError):
+ debug('result handler got EOFError/OSError -- exiting')
return
if thread._state:
@@ -405,8 +405,8 @@ class Pool(object):
while cache and thread._state != TERMINATE:
try:
task = get()
- except (IOError, EOFError):
- debug('result handler got EOFError/IOError -- exiting')
+ except (OSError, EOFError):
+ debug('result handler got EOFError/OSError -- exiting')
return
if task is None:
@@ -428,7 +428,7 @@ class Pool(object):
if not outqueue._reader.poll():
break
get()
- except (IOError, EOFError):
+ except (OSError, EOFError):
pass
debug('result handler exiting: len(cache)=%s, thread._state=%s',
diff --git a/Lib/multiprocessing/queues.py b/Lib/multiprocessing/queues.py
index 37271fb..f6f02b6 100644
--- a/Lib/multiprocessing/queues.py
+++ b/Lib/multiprocessing/queues.py
@@ -243,10 +243,14 @@ class Queue(object):
if wacquire is None:
send(obj)
+ # Delete references to object. See issue16284
+ del obj
else:
wacquire()
try:
send(obj)
+ # Delete references to object. See issue16284
+ del obj
finally:
wrelease()
except IndexError:
diff --git a/Lib/netrc.py b/Lib/netrc.py
index c96db6f..7fe69ee 100644
--- a/Lib/netrc.py
+++ b/Lib/netrc.py
@@ -25,7 +25,7 @@ class netrc:
try:
file = os.path.join(os.environ['HOME'], ".netrc")
except KeyError:
- raise IOError("Could not find .netrc: $HOME is not set")
+ raise OSError("Could not find .netrc: $HOME is not set")
self.hosts = {}
self.macros = {}
with open(file) as fp:
diff --git a/Lib/nntplib.py b/Lib/nntplib.py
index 2de6ebd..01d4303 100644
--- a/Lib/nntplib.py
+++ b/Lib/nntplib.py
@@ -359,7 +359,7 @@ class _NNTPBase:
if is_connected():
try:
self.quit()
- except (socket.error, EOFError):
+ except (OSError, EOFError):
pass
finally:
if is_connected():
@@ -947,7 +947,7 @@ class _NNTPBase:
if auth:
user = auth[0]
password = auth[2]
- except IOError:
+ except OSError:
pass
# Perform NNTP authentication if needed.
if not user:
diff --git a/Lib/ntpath.py b/Lib/ntpath.py
index 826be87..d81f728 100644
--- a/Lib/ntpath.py
+++ b/Lib/ntpath.py
@@ -17,7 +17,7 @@ __all__ = ["normcase","isabs","join","splitdrive","split","splitext",
"ismount", "expanduser","expandvars","normpath","abspath",
"splitunc","curdir","pardir","sep","pathsep","defpath","altsep",
"extsep","devnull","realpath","supports_unicode_filenames","relpath",
- "samefile", "sameopenfile",]
+ "samefile", "sameopenfile", "samestat",]
# strings representing various path-related bits and pieces
# These are primarily for export; internally, they are hardcoded.
@@ -30,9 +30,6 @@ altsep = '/'
defpath = '.;C:\\bin'
if 'ce' in sys.builtin_module_names:
defpath = '\\Windows'
-elif 'os2' in sys.builtin_module_names:
- # OS/2 w/ VACPP
- altsep = '/'
devnull = 'nul'
def _get_empty(path):
@@ -320,12 +317,11 @@ def dirname(p):
def islink(path):
"""Test whether a path is a symbolic link.
- This will always return false for Windows prior to 6.0
- and for OS/2.
+ This will always return false for Windows prior to 6.0.
"""
try:
st = os.lstat(path)
- except (os.error, AttributeError):
+ except (OSError, AttributeError):
return False
return stat.S_ISLNK(st.st_mode)
@@ -335,7 +331,7 @@ def lexists(path):
"""Test whether a path exists. Returns True for broken symbolic links"""
try:
st = os.lstat(path)
- except (os.error, WindowsError):
+ except OSError:
return False
return True
@@ -588,7 +584,7 @@ else: # use native Windows method on Windows
if path: # Empty path must return current working directory.
try:
path = _getfullpathname(path)
- except WindowsError:
+ except OSError:
pass # Bad path - return unchanged.
elif isinstance(path, bytes):
path = os.getcwdb()
@@ -656,23 +652,6 @@ except (AttributeError, ImportError):
def _getfinalpathname(f):
return normcase(abspath(f))
-def samefile(f1, f2):
- "Test whether two pathnames reference the same actual file"
- return _getfinalpathname(f1) == _getfinalpathname(f2)
-
-
-try:
- from nt import _getfileinformation
-except ImportError:
- # On other operating systems, just return the fd and see that
- # it compares equal in sameopenfile.
- def _getfileinformation(fd):
- return fd
-
-def sameopenfile(f1, f2):
- """Test whether two file objects reference the same file"""
- return _getfileinformation(f1) == _getfileinformation(f2)
-
try:
# The genericpath.isdir implementation uses os.stat and checks the mode
diff --git a/Lib/nturl2path.py b/Lib/nturl2path.py
index 511dcec..5a6d44a 100644
--- a/Lib/nturl2path.py
+++ b/Lib/nturl2path.py
@@ -23,7 +23,7 @@ def url2pathname(url):
comp = url.split('|')
if len(comp) != 2 or comp[0][-1] not in string.ascii_letters:
error = 'Bad URL: ' + url
- raise IOError(error)
+ raise OSError(error)
drive = comp[0][-1].upper()
components = comp[1].split('/')
path = drive + ':'
@@ -55,7 +55,7 @@ def pathname2url(p):
comp = p.split(':')
if len(comp) != 2 or len(comp[0]) > 1:
error = 'Bad path: ' + p
- raise IOError(error)
+ raise OSError(error)
drive = urllib.parse.quote(comp[0].upper())
components = comp[1].split('\\')
diff --git a/Lib/os.py b/Lib/os.py
index e4ea142..5f35f4c 100644
--- a/Lib/os.py
+++ b/Lib/os.py
@@ -1,9 +1,9 @@
r"""OS routines for Mac, NT, or Posix depending on what system we're on.
This exports:
- - all functions from posix, nt, os2, or ce, e.g. unlink, stat, etc.
+ - all functions from posix, nt or ce, e.g. unlink, stat, etc.
- os.path is either posixpath or ntpath
- - os.name is either 'posix', 'nt', 'os2' or 'ce'.
+ - os.name is either 'posix', 'nt' or 'ce'.
- os.curdir is a string representing the current directory ('.' or ':')
- os.pardir is a string representing the parent directory ('..' or '::')
- os.sep is the (or a most common) pathname separator ('/' or ':' or '\\')
@@ -81,30 +81,6 @@ elif 'nt' in _names:
except ImportError:
pass
-elif 'os2' in _names:
- name = 'os2'
- linesep = '\r\n'
- from os2 import *
- try:
- from os2 import _exit
- __all__.append('_exit')
- except ImportError:
- pass
- if sys.version.find('EMX GCC') == -1:
- import ntpath as path
- else:
- import os2emxpath as path
- from _emx_link import link
-
- import os2
- __all__.extend(_get_exports_list(os2))
- del os2
-
- try:
- from os2 import _have_functions
- except ImportError:
- pass
-
elif 'ce' in _names:
name = 'ce'
linesep = '\r\n'
@@ -256,10 +232,9 @@ def makedirs(name, mode=0o777, exist_ok=False):
if head and tail and not path.exists(head):
try:
makedirs(head, mode, exist_ok)
- except OSError as e:
+ except FileExistsError:
# be happy if someone already created the path
- if e.errno != errno.EEXIST:
- raise
+ pass
cdir = curdir
if isinstance(tail, bytes):
cdir = bytes(curdir, 'ASCII')
@@ -302,7 +277,7 @@ def removedirs(name):
while head and tail:
try:
rmdir(head)
- except error:
+ except OSError:
break
head, tail = path.split(head)
@@ -329,7 +304,7 @@ def renames(old, new):
if head and tail:
try:
removedirs(head)
- except error:
+ except OSError:
pass
__all__.extend(["makedirs", "removedirs", "renames"])
@@ -365,7 +340,7 @@ def walk(top, topdown=True, onerror=None, followlinks=False):
By default errors from the os.listdir() call are ignored. If
optional arg 'onerror' is specified, it should be a function; it
- will be called with one argument, an os.error instance. It can
+ will be called with one argument, an OSError instance. It can
report the error to continue with the walk, or raise the exception
to abort the walk. Note that the filename is available as the
filename attribute of the exception object.
@@ -399,10 +374,10 @@ def walk(top, topdown=True, onerror=None, followlinks=False):
# minor reason when (say) a thousand readable directories are still
# left to visit. That logic is copied here.
try:
- # Note that listdir and error are globals in this module due
+ # Note that listdir is global in this module due
# to earlier import-*.
names = listdir(top)
- except error as err:
+ except OSError as err:
if onerror is not None:
onerror(err)
return
@@ -504,7 +479,7 @@ if {open, stat} <= supports_dir_fd and {listdir, stat} <= supports_fd:
try:
orig_st = stat(name, dir_fd=topfd, follow_symlinks=follow_symlinks)
dirfd = open(name, O_RDONLY, dir_fd=topfd)
- except error as err:
+ except OSError as err:
if onerror is not None:
onerror(err)
return
@@ -599,7 +574,7 @@ def _execvpe(file, args, env=None):
fullname = path.join(dir, file)
try:
exec_func(fullname, *argrest)
- except error as e:
+ except OSError as e:
last_exc = e
tb = sys.exc_info()[2]
if (e.errno != errno.ENOENT and e.errno != errno.ENOTDIR
@@ -718,7 +693,7 @@ else:
__all__.append("unsetenv")
def _createenviron():
- if name in ('os2', 'nt'):
+ if name == 'nt':
# Where Env Var Names Must Be UPPERCASE
def check_str(value):
if not isinstance(value, str):
@@ -758,7 +733,7 @@ def getenv(key, default=None):
key, default and the result are str."""
return environ.get(key, default)
-supports_bytes_environ = name not in ('os2', 'nt')
+supports_bytes_environ = (name != 'nt')
__all__.extend(("getenv", "supports_bytes_environ"))
if supports_bytes_environ:
@@ -857,7 +832,7 @@ if _exists("fork") and not _exists("spawnv") and _exists("execv"):
elif WIFEXITED(sts):
return WEXITSTATUS(sts)
else:
- raise error("Not stopped, signaled or exited???")
+ raise OSError("Not stopped, signaled or exited???")
def spawnv(mode, file, args):
"""spawnv(mode, file, args) -> integer
diff --git a/Lib/os2emxpath.py b/Lib/os2emxpath.py
deleted file mode 100644
index 0ccbf8a..0000000
--- a/Lib/os2emxpath.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Module 'os2emxpath' -- common operations on OS/2 pathnames
-"""Common pathname manipulations, OS/2 EMX version.
-
-Instead of importing this module directly, import os and refer to this
-module as os.path.
-"""
-
-import os
-import stat
-from genericpath import *
-from ntpath import (expanduser, expandvars, isabs, islink, splitdrive,
- splitext, split)
-
-__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
- "basename","dirname","commonprefix","getsize","getmtime",
- "getatime","getctime", "islink","exists","lexists","isdir","isfile",
- "ismount","expanduser","expandvars","normpath","abspath",
- "splitunc","curdir","pardir","sep","pathsep","defpath","altsep",
- "extsep","devnull","realpath","supports_unicode_filenames"]
-
-# strings representing various path-related bits and pieces
-curdir = '.'
-pardir = '..'
-extsep = '.'
-sep = '/'
-altsep = '\\'
-pathsep = ';'
-defpath = '.;C:\\bin'
-devnull = 'nul'
-
-# Normalize the case of a pathname and map slashes to backslashes.
-# Other normalizations (such as optimizing '../' away) are not done
-# (this is done by normpath).
-
-def normcase(s):
- """Normalize case of pathname.
-
- Makes all characters lowercase and all altseps into seps."""
- if not isinstance(s, (bytes, str)):
- raise TypeError("normcase() argument must be str or bytes, "
- "not '{}'".format(s.__class__.__name__))
- return s.replace('\\', '/').lower()
-
-
-# Join two (or more) paths.
-
-def join(a, *p):
- """Join two or more pathname components, inserting sep as needed"""
- path = a
- for b in p:
- if isabs(b):
- path = b
- elif path == '' or path[-1:] in '/\\:':
- path = path + b
- else:
- path = path + '/' + b
- return path
-
-
-# Parse UNC paths
-def splitunc(p):
- """Split a pathname into UNC mount point and relative path specifiers.
-
- Return a 2-tuple (unc, rest); either part may be empty.
- If unc is not empty, it has the form '//host/mount' (or similar
- using backslashes). unc+rest is always the input path.
- Paths containing drive letters never have an UNC part.
- """
- if p[1:2] == ':':
- return '', p # Drive letter present
- firstTwo = p[0:2]
- if firstTwo == '/' * 2 or firstTwo == '\\' * 2:
- # is a UNC path:
- # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter
- # \\machine\mountpoint\directories...
- # directory ^^^^^^^^^^^^^^^
- normp = normcase(p)
- index = normp.find('/', 2)
- if index == -1:
- ##raise RuntimeError, 'illegal UNC path: "' + p + '"'
- return ("", p)
- index = normp.find('/', index + 1)
- if index == -1:
- index = len(p)
- return p[:index], p[index:]
- return '', p
-
-
-# Return the tail (basename) part of a path.
-
-def basename(p):
- """Returns the final component of a pathname"""
- return split(p)[1]
-
-
-# Return the head (dirname) part of a path.
-
-def dirname(p):
- """Returns the directory component of a pathname"""
- return split(p)[0]
-
-
-# alias exists to lexists
-lexists = exists
-
-
-# Is a path a directory?
-
-# Is a path a mount point? Either a root (with or without drive letter)
-# or an UNC path with at most a / or \ after the mount point.
-
-def ismount(path):
- """Test whether a path is a mount point (defined as root of drive)"""
- unc, rest = splitunc(path)
- if unc:
- return rest in ("", "/", "\\")
- p = splitdrive(path)[1]
- return len(p) == 1 and p[0] in '/\\'
-
-
-# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
-
-def normpath(path):
- """Normalize path, eliminating double slashes, etc."""
- path = path.replace('\\', '/')
- prefix, path = splitdrive(path)
- while path[:1] == '/':
- prefix = prefix + '/'
- path = path[1:]
- comps = path.split('/')
- i = 0
- while i < len(comps):
- if comps[i] == '.':
- del comps[i]
- elif comps[i] == '..' and i > 0 and comps[i-1] not in ('', '..'):
- del comps[i-1:i+1]
- i = i - 1
- elif comps[i] == '' and i > 0 and comps[i-1] != '':
- del comps[i]
- else:
- i = i + 1
- # If the path is now empty, substitute '.'
- if not prefix and not comps:
- comps.append('.')
- return prefix + '/'.join(comps)
-
-
-# Return an absolute path.
-def abspath(path):
- """Return the absolute version of a path"""
- if not isabs(path):
- path = join(os.getcwd(), path)
- return normpath(path)
-
-# realpath is a no-op on systems without islink support
-realpath = abspath
-
-supports_unicode_filenames = False
diff --git a/Lib/pdb.py b/Lib/pdb.py
index e6d7f8f..e5596e5 100755
--- a/Lib/pdb.py
+++ b/Lib/pdb.py
@@ -92,7 +92,7 @@ def find_function(funcname, filename):
cre = re.compile(r'def\s+%s\s*[(]' % re.escape(funcname))
try:
fp = open(filename)
- except IOError:
+ except OSError:
return None
# consumer of this info expects the first line to be 1
lineno = 1
@@ -170,12 +170,12 @@ class Pdb(bdb.Bdb, cmd.Cmd):
try:
with open(os.path.join(envHome, ".pdbrc")) as rcFile:
self.rcLines.extend(rcFile)
- except IOError:
+ except OSError:
pass
try:
with open(".pdbrc") as rcFile:
self.rcLines.extend(rcFile)
- except IOError:
+ except OSError:
pass
self.commands = {} # associates a command list to breakpoint numbers
@@ -1241,7 +1241,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
breaklist = self.get_file_breaks(filename)
try:
lines, lineno = getsourcelines(self.curframe)
- except IOError as err:
+ except OSError as err:
self.error(err)
return
self._print_lines(lines, lineno, breaklist, self.curframe)
@@ -1257,7 +1257,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
return
try:
lines, lineno = getsourcelines(obj)
- except (IOError, TypeError) as err:
+ except (OSError, TypeError) as err:
self.error(err)
return
self._print_lines(lines, lineno)
diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py
index a5de04d..8fa8405 100644
--- a/Lib/pkgutil.py
+++ b/Lib/pkgutil.py
@@ -121,8 +121,7 @@ def walk_packages(path=None, prefix='', onerror=None):
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
- for item in walk_packages(path, name+'.', onerror):
- yield item
+ yield from walk_packages(path, name+'.', onerror)
def iter_modules(path=None, prefix=''):
@@ -350,9 +349,8 @@ class ImpLoader:
self.file.close()
elif mod_type==imp.PY_COMPILED:
if os.path.exists(self.filename[:-1]):
- f = open(self.filename[:-1], 'r')
- self.source = f.read()
- f.close()
+ with open(self.filename[:-1], 'r') as f:
+ self.source = f.read()
elif mod_type==imp.PKG_DIRECTORY:
self.source = self._get_delegate().get_source()
return self.source
@@ -456,8 +454,7 @@ def iter_importers(fullname=""):
if path is None:
return
else:
- for importer in sys.meta_path:
- yield importer
+ yield from sys.meta_path
path = sys.path
for item in path:
yield get_importer(item)
@@ -589,16 +586,16 @@ def extend_path(path, name):
if os.path.isfile(pkgfile):
try:
f = open(pkgfile)
- except IOError as msg:
+ except OSError as msg:
sys.stderr.write("Can't open %s: %s\n" %
(pkgfile, msg))
else:
- for line in f:
- line = line.rstrip('\n')
- if not line or line.startswith('#'):
- continue
- path.append(line) # Don't check for existence!
- f.close()
+ with f:
+ for line in f:
+ line = line.rstrip('\n')
+ if not line or line.startswith('#'):
+ continue
+ path.append(line) # Don't check for existence!
return path
diff --git a/Lib/plat-os2emx/IN.py b/Lib/plat-os2emx/IN.py
deleted file mode 100644
index 753ae24..0000000
--- a/Lib/plat-os2emx/IN.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Generated by h2py from f:/emx/include/netinet/in.h
-
-# Included from sys/param.h
-PAGE_SIZE = 0x1000
-HZ = 100
-MAXNAMLEN = 260
-MAXPATHLEN = 260
-def htonl(X): return _swapl(X)
-
-def ntohl(X): return _swapl(X)
-
-def htons(X): return _swaps(X)
-
-def ntohs(X): return _swaps(X)
-
-IPPROTO_IP = 0
-IPPROTO_ICMP = 1
-IPPROTO_IGMP = 2
-IPPROTO_GGP = 3
-IPPROTO_TCP = 6
-IPPROTO_EGP = 8
-IPPROTO_PUP = 12
-IPPROTO_UDP = 17
-IPPROTO_IDP = 22
-IPPROTO_TP = 29
-IPPROTO_EON = 80
-IPPROTO_RAW = 255
-IPPROTO_MAX = 256
-IPPORT_RESERVED = 1024
-IPPORT_USERRESERVED = 5000
-def IN_CLASSA(i): return (((int)(i) & 0x80000000) == 0)
-
-IN_CLASSA_NET = 0xff000000
-IN_CLASSA_NSHIFT = 24
-IN_CLASSA_HOST = 0x00ffffff
-IN_CLASSA_MAX = 128
-def IN_CLASSB(i): return (((int)(i) & 0xc0000000) == 0x80000000)
-
-IN_CLASSB_NET = 0xffff0000
-IN_CLASSB_NSHIFT = 16
-IN_CLASSB_HOST = 0x0000ffff
-IN_CLASSB_MAX = 65536
-def IN_CLASSC(i): return (((int)(i) & 0xe0000000) == 0xc0000000)
-
-IN_CLASSC_NET = 0xffffff00
-IN_CLASSC_NSHIFT = 8
-IN_CLASSC_HOST = 0x000000ff
-def IN_CLASSD(i): return (((int)(i) & 0xf0000000) == 0xe0000000)
-
-IN_CLASSD_NET = 0xf0000000
-IN_CLASSD_NSHIFT = 28
-IN_CLASSD_HOST = 0x0fffffff
-def IN_MULTICAST(i): return IN_CLASSD(i)
-
-def IN_EXPERIMENTAL(i): return (((int)(i) & 0xe0000000) == 0xe0000000)
-
-def IN_BADCLASS(i): return (((int)(i) & 0xf0000000) == 0xf0000000)
-
-INADDR_ANY = 0x00000000
-INADDR_LOOPBACK = 0x7f000001
-INADDR_BROADCAST = 0xffffffff
-INADDR_NONE = 0xffffffff
-INADDR_UNSPEC_GROUP = 0xe0000000
-INADDR_ALLHOSTS_GROUP = 0xe0000001
-INADDR_MAX_LOCAL_GROUP = 0xe00000ff
-IN_LOOPBACKNET = 127
-IP_OPTIONS = 1
-IP_MULTICAST_IF = 2
-IP_MULTICAST_TTL = 3
-IP_MULTICAST_LOOP = 4
-IP_ADD_MEMBERSHIP = 5
-IP_DROP_MEMBERSHIP = 6
-IP_HDRINCL = 2
-IP_TOS = 3
-IP_TTL = 4
-IP_RECVOPTS = 5
-IP_RECVRETOPTS = 6
-IP_RECVDSTADDR = 7
-IP_RETOPTS = 8
-IP_DEFAULT_MULTICAST_TTL = 1
-IP_DEFAULT_MULTICAST_LOOP = 1
-IP_MAX_MEMBERSHIPS = 20
diff --git a/Lib/plat-os2emx/SOCKET.py b/Lib/plat-os2emx/SOCKET.py
deleted file mode 100644
index dac594a..0000000
--- a/Lib/plat-os2emx/SOCKET.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Generated by h2py from f:/emx/include/sys/socket.h
-
-# Included from sys/types.h
-FD_SETSIZE = 256
-
-# Included from sys/uio.h
-FREAD = 1
-FWRITE = 2
-SOCK_STREAM = 1
-SOCK_DGRAM = 2
-SOCK_RAW = 3
-SOCK_RDM = 4
-SOCK_SEQPACKET = 5
-SO_DEBUG = 0x0001
-SO_ACCEPTCONN = 0x0002
-SO_REUSEADDR = 0x0004
-SO_KEEPALIVE = 0x0008
-SO_DONTROUTE = 0x0010
-SO_BROADCAST = 0x0020
-SO_USELOOPBACK = 0x0040
-SO_LINGER = 0x0080
-SO_OOBINLINE = 0x0100
-SO_L_BROADCAST = 0x0200
-SO_RCV_SHUTDOWN = 0x0400
-SO_SND_SHUTDOWN = 0x0800
-SO_SNDBUF = 0x1001
-SO_RCVBUF = 0x1002
-SO_SNDLOWAT = 0x1003
-SO_RCVLOWAT = 0x1004
-SO_SNDTIMEO = 0x1005
-SO_RCVTIMEO = 0x1006
-SO_ERROR = 0x1007
-SO_TYPE = 0x1008
-SO_OPTIONS = 0x1010
-SOL_SOCKET = 0xffff
-AF_UNSPEC = 0
-AF_UNIX = 1
-AF_INET = 2
-AF_IMPLINK = 3
-AF_PUP = 4
-AF_CHAOS = 5
-AF_NS = 6
-AF_NBS = 7
-AF_ISO = 7
-AF_OSI = AF_ISO
-AF_ECMA = 8
-AF_DATAKIT = 9
-AF_CCITT = 10
-AF_SNA = 11
-AF_DECnet = 12
-AF_DLI = 13
-AF_LAT = 14
-AF_HYLINK = 15
-AF_APPLETALK = 16
-AF_NB = 17
-AF_NETBIOS = AF_NB
-AF_OS2 = AF_UNIX
-AF_MAX = 18
-PF_UNSPEC = AF_UNSPEC
-PF_UNIX = AF_UNIX
-PF_INET = AF_INET
-PF_IMPLINK = AF_IMPLINK
-PF_PUP = AF_PUP
-PF_CHAOS = AF_CHAOS
-PF_NS = AF_NS
-PF_NBS = AF_NBS
-PF_ISO = AF_ISO
-PF_OSI = AF_ISO
-PF_ECMA = AF_ECMA
-PF_DATAKIT = AF_DATAKIT
-PF_CCITT = AF_CCITT
-PF_SNA = AF_SNA
-PF_DECnet = AF_DECnet
-PF_DLI = AF_DLI
-PF_LAT = AF_LAT
-PF_HYLINK = AF_HYLINK
-PF_APPLETALK = AF_APPLETALK
-PF_NB = AF_NB
-PF_NETBIOS = AF_NB
-PF_OS2 = AF_UNIX
-PF_MAX = AF_MAX
-SOMAXCONN = 5
-MSG_OOB = 0x1
-MSG_PEEK = 0x2
-MSG_DONTROUTE = 0x4
-MSG_EOR = 0x8
-MSG_TRUNC = 0x10
-MSG_CTRUNC = 0x20
-MSG_WAITALL = 0x40
-MSG_MAXIOVLEN = 16
-SCM_RIGHTS = 0x01
-MT_FREE = 0
-MT_DATA = 1
-MT_HEADER = 2
-MT_SOCKET = 3
-MT_PCB = 4
-MT_RTABLE = 5
-MT_HTABLE = 6
-MT_ATABLE = 7
-MT_SONAME = 8
-MT_ZOMBIE = 9
-MT_SOOPTS = 10
-MT_FTABLE = 11
-MT_RIGHTS = 12
-MT_IFADDR = 13
-MAXSOCKETS = 2048
diff --git a/Lib/plat-os2emx/_emx_link.py b/Lib/plat-os2emx/_emx_link.py
deleted file mode 100644
index 01e6b54..0000000
--- a/Lib/plat-os2emx/_emx_link.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# _emx_link.py
-
-# Written by Andrew I MacIntyre, December 2002.
-
-"""_emx_link.py is a simplistic emulation of the Unix link(2) library routine
-for creating so-called hard links. It is intended to be imported into
-the os module in place of the unimplemented (on OS/2) Posix link()
-function (os.link()).
-
-We do this on OS/2 by implementing a file copy, with link(2) semantics:-
- - the target cannot already exist;
- - we hope that the actual file open (if successful) is actually
- atomic...
-
-Limitations of this approach/implementation include:-
- - no support for correct link counts (EMX stat(target).st_nlink
- is always 1);
- - thread safety undefined;
- - default file permissions (r+w) used, can't be over-ridden;
- - implemented in Python so comparatively slow, especially for large
- source files;
- - need sufficient free disk space to store the copy.
-
-Behaviour:-
- - any exception should propagate to the caller;
- - want target to be an exact copy of the source, so use binary mode;
- - returns None, same as os.link() which is implemented in posixmodule.c;
- - target removed in the event of a failure where possible;
- - given the motivation to write this emulation came from trying to
- support a Unix resource lock implementation, where minimal overhead
- during creation of the target is desirable and the files are small,
- we read a source block before attempting to create the target so that
- we're ready to immediately write some data into it.
-"""
-
-import os
-import errno
-
-__all__ = ['link']
-
-def link(source, target):
- """link(source, target) -> None
-
- Attempt to hard link the source file to the target file name.
- On OS/2, this creates a complete copy of the source file.
- """
-
- s = os.open(source, os.O_RDONLY | os.O_BINARY)
- if os.isatty(s):
- raise OSError(errno.EXDEV, 'Cross-device link')
- data = os.read(s, 1024)
-
- try:
- t = os.open(target, os.O_WRONLY | os.O_BINARY | os.O_CREAT | os.O_EXCL)
- except OSError:
- os.close(s)
- raise
-
- try:
- while data:
- os.write(t, data)
- data = os.read(s, 1024)
- except OSError:
- os.close(s)
- os.close(t)
- os.unlink(target)
- raise
-
- os.close(s)
- os.close(t)
-
-if __name__ == '__main__':
- import sys
- try:
- link(sys.argv[1], sys.argv[2])
- except IndexError:
- print('Usage: emx_link <source> <target>')
- except OSError:
- print('emx_link: %s' % str(sys.exc_info()[1]))
diff --git a/Lib/plat-os2emx/grp.py b/Lib/plat-os2emx/grp.py
deleted file mode 100644
index ee63ef8..0000000
--- a/Lib/plat-os2emx/grp.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# this module is an OS/2 oriented replacement for the grp standard
-# extension module.
-
-# written by Andrew MacIntyre, April 2001.
-# updated July 2003, adding field accessor support
-
-# note that this implementation checks whether ":" or ";" as used as
-# the field separator character.
-
-"""Replacement for grp standard extension module, intended for use on
-OS/2 and similar systems which don't normally have an /etc/group file.
-
-The standard Unix group database is an ASCII text file with 4 fields per
-record (line), separated by a colon:
- - group name (string)
- - group password (optional encrypted string)
- - group id (integer)
- - group members (comma delimited list of userids, with no spaces)
-
-Note that members are only included in the group file for groups that
-aren't their primary groups.
-(see the section 8.2 of the Python Library Reference)
-
-This implementation differs from the standard Unix implementation by
-allowing use of the platform's native path separator character - ';' on OS/2,
-DOS and MS-Windows - as the field separator in addition to the Unix
-standard ":".
-
-The module looks for the group database at the following locations
-(in order first to last):
- - ${ETC_GROUP} (or %ETC_GROUP%)
- - ${ETC}/group (or %ETC%/group)
- - ${PYTHONHOME}/Etc/group (or %PYTHONHOME%/Etc/group)
-
-Classes
--------
-
-None
-
-Functions
----------
-
-getgrgid(gid) - return the record for group-id gid as a 4-tuple
-
-getgrnam(name) - return the record for group 'name' as a 4-tuple
-
-getgrall() - return a list of 4-tuples, each tuple being one record
- (NOTE: the order is arbitrary)
-
-Attributes
-----------
-
-group_file - the path of the group database file
-
-"""
-
-import os
-
-# try and find the group file
-__group_path = []
-if 'ETC_GROUP' in os.environ:
- __group_path.append(os.environ['ETC_GROUP'])
-if 'ETC' in os.environ:
- __group_path.append('%s/group' % os.environ['ETC'])
-if 'PYTHONHOME' in os.environ:
- __group_path.append('%s/Etc/group' % os.environ['PYTHONHOME'])
-
-group_file = None
-for __i in __group_path:
- try:
- __f = open(__i, 'r')
- __f.close()
- group_file = __i
- break
- except:
- pass
-
-# decide what field separator we can try to use - Unix standard, with
-# the platform's path separator as an option. No special field conversion
-# handlers are required for the group file.
-__field_sep = [':']
-if os.pathsep:
- if os.pathsep != ':':
- __field_sep.append(os.pathsep)
-
-# helper routine to identify which separator character is in use
-def __get_field_sep(record):
- fs = None
- for c in __field_sep:
- # there should be 3 delimiter characters (for 4 fields)
- if record.count(c) == 3:
- fs = c
- break
- if fs:
- return fs
- else:
- raise KeyError('>> group database fields not delimited <<')
-
-# class to match the new record field name accessors.
-# the resulting object is intended to behave like a read-only tuple,
-# with each member also accessible by a field name.
-class Group:
- def __init__(self, name, passwd, gid, mem):
- self.__dict__['gr_name'] = name
- self.__dict__['gr_passwd'] = passwd
- self.__dict__['gr_gid'] = gid
- self.__dict__['gr_mem'] = mem
- self.__dict__['_record'] = (self.gr_name, self.gr_passwd,
- self.gr_gid, self.gr_mem)
-
- def __len__(self):
- return 4
-
- def __getitem__(self, key):
- return self._record[key]
-
- def __setattr__(self, name, value):
- raise AttributeError('attribute read-only: %s' % name)
-
- def __repr__(self):
- return str(self._record)
-
- def __cmp__(self, other):
- this = str(self._record)
- if this == other:
- return 0
- elif this < other:
- return -1
- else:
- return 1
-
-
-# read the whole file, parsing each entry into tuple form
-# with dictionaries to speed recall by GID or group name
-def __read_group_file():
- if group_file:
- group = open(group_file, 'r')
- else:
- raise KeyError('>> no group database <<')
- gidx = {}
- namx = {}
- sep = None
- while 1:
- entry = group.readline().strip()
- if len(entry) > 3:
- if sep is None:
- sep = __get_field_sep(entry)
- fields = entry.split(sep)
- fields[2] = int(fields[2])
- fields[3] = [f.strip() for f in fields[3].split(',')]
- record = Group(*fields)
- if fields[2] not in gidx:
- gidx[fields[2]] = record
- if fields[0] not in namx:
- namx[fields[0]] = record
- elif len(entry) > 0:
- pass # skip empty or malformed records
- else:
- break
- group.close()
- if len(gidx) == 0:
- raise KeyError
- return (gidx, namx)
-
-# return the group database entry by GID
-def getgrgid(gid):
- g, n = __read_group_file()
- return g[gid]
-
-# return the group database entry by group name
-def getgrnam(name):
- g, n = __read_group_file()
- return n[name]
-
-# return all the group database entries
-def getgrall():
- g, n = __read_group_file()
- return g.values()
-
-# test harness
-if __name__ == '__main__':
- getgrall()
diff --git a/Lib/plat-os2emx/pwd.py b/Lib/plat-os2emx/pwd.py
deleted file mode 100644
index 2cb077f..0000000
--- a/Lib/plat-os2emx/pwd.py
+++ /dev/null
@@ -1,208 +0,0 @@
-# this module is an OS/2 oriented replacement for the pwd standard
-# extension module.
-
-# written by Andrew MacIntyre, April 2001.
-# updated July 2003, adding field accessor support
-
-# note that this implementation checks whether ":" or ";" as used as
-# the field separator character. Path conversions are are applied when
-# the database uses ":" as the field separator character.
-
-"""Replacement for pwd standard extension module, intended for use on
-OS/2 and similar systems which don't normally have an /etc/passwd file.
-
-The standard Unix password database is an ASCII text file with 7 fields
-per record (line), separated by a colon:
- - user name (string)
- - password (encrypted string, or "*" or "")
- - user id (integer)
- - group id (integer)
- - description (usually user's name)
- - home directory (path to user's home directory)
- - shell (path to the user's login shell)
-
-(see the section 8.1 of the Python Library Reference)
-
-This implementation differs from the standard Unix implementation by
-allowing use of the platform's native path separator character - ';' on OS/2,
-DOS and MS-Windows - as the field separator in addition to the Unix
-standard ":". Additionally, when ":" is the separator path conversions
-are applied to deal with any munging of the drive letter reference.
-
-The module looks for the password database at the following locations
-(in order first to last):
- - ${ETC_PASSWD} (or %ETC_PASSWD%)
- - ${ETC}/passwd (or %ETC%/passwd)
- - ${PYTHONHOME}/Etc/passwd (or %PYTHONHOME%/Etc/passwd)
-
-Classes
--------
-
-None
-
-Functions
----------
-
-getpwuid(uid) - return the record for user-id uid as a 7-tuple
-
-getpwnam(name) - return the record for user 'name' as a 7-tuple
-
-getpwall() - return a list of 7-tuples, each tuple being one record
- (NOTE: the order is arbitrary)
-
-Attributes
-----------
-
-passwd_file - the path of the password database file
-
-"""
-
-import os
-
-# try and find the passwd file
-__passwd_path = []
-if 'ETC_PASSWD' in os.environ:
- __passwd_path.append(os.environ['ETC_PASSWD'])
-if 'ETC' in os.environ:
- __passwd_path.append('%s/passwd' % os.environ['ETC'])
-if 'PYTHONHOME' in os.environ:
- __passwd_path.append('%s/Etc/passwd' % os.environ['PYTHONHOME'])
-
-passwd_file = None
-for __i in __passwd_path:
- try:
- __f = open(__i, 'r')
- __f.close()
- passwd_file = __i
- break
- except:
- pass
-
-# path conversion handlers
-def __nullpathconv(path):
- return path.replace(os.altsep, os.sep)
-
-def __unixpathconv(path):
- # two known drive letter variations: "x;" and "$x"
- if path[0] == '$':
- conv = path[1] + ':' + path[2:]
- elif path[1] == ';':
- conv = path[0] + ':' + path[2:]
- else:
- conv = path
- return conv.replace(os.altsep, os.sep)
-
-# decide what field separator we can try to use - Unix standard, with
-# the platform's path separator as an option. No special field conversion
-# handler is required when using the platform's path separator as field
-# separator, but are required for the home directory and shell fields when
-# using the standard Unix (":") field separator.
-__field_sep = {':': __unixpathconv}
-if os.pathsep:
- if os.pathsep != ':':
- __field_sep[os.pathsep] = __nullpathconv
-
-# helper routine to identify which separator character is in use
-def __get_field_sep(record):
- fs = None
- for c in __field_sep.keys():
- # there should be 6 delimiter characters (for 7 fields)
- if record.count(c) == 6:
- fs = c
- break
- if fs:
- return fs
- else:
- raise KeyError('>> passwd database fields not delimited <<')
-
-# class to match the new record field name accessors.
-# the resulting object is intended to behave like a read-only tuple,
-# with each member also accessible by a field name.
-class Passwd:
- def __init__(self, name, passwd, uid, gid, gecos, dir, shell):
- self.__dict__['pw_name'] = name
- self.__dict__['pw_passwd'] = passwd
- self.__dict__['pw_uid'] = uid
- self.__dict__['pw_gid'] = gid
- self.__dict__['pw_gecos'] = gecos
- self.__dict__['pw_dir'] = dir
- self.__dict__['pw_shell'] = shell
- self.__dict__['_record'] = (self.pw_name, self.pw_passwd,
- self.pw_uid, self.pw_gid,
- self.pw_gecos, self.pw_dir,
- self.pw_shell)
-
- def __len__(self):
- return 7
-
- def __getitem__(self, key):
- return self._record[key]
-
- def __setattr__(self, name, value):
- raise AttributeError('attribute read-only: %s' % name)
-
- def __repr__(self):
- return str(self._record)
-
- def __cmp__(self, other):
- this = str(self._record)
- if this == other:
- return 0
- elif this < other:
- return -1
- else:
- return 1
-
-
-# read the whole file, parsing each entry into tuple form
-# with dictionaries to speed recall by UID or passwd name
-def __read_passwd_file():
- if passwd_file:
- passwd = open(passwd_file, 'r')
- else:
- raise KeyError('>> no password database <<')
- uidx = {}
- namx = {}
- sep = None
- while True:
- entry = passwd.readline().strip()
- if len(entry) > 6:
- if sep is None:
- sep = __get_field_sep(entry)
- fields = entry.split(sep)
- for i in (2, 3):
- fields[i] = int(fields[i])
- for i in (5, 6):
- fields[i] = __field_sep[sep](fields[i])
- record = Passwd(*fields)
- if fields[2] not in uidx:
- uidx[fields[2]] = record
- if fields[0] not in namx:
- namx[fields[0]] = record
- elif len(entry) > 0:
- pass # skip empty or malformed records
- else:
- break
- passwd.close()
- if len(uidx) == 0:
- raise KeyError
- return (uidx, namx)
-
-# return the passwd database entry by UID
-def getpwuid(uid):
- u, n = __read_passwd_file()
- return u[uid]
-
-# return the passwd database entry by passwd name
-def getpwnam(name):
- u, n = __read_passwd_file()
- return n[name]
-
-# return all the passwd database entries
-def getpwall():
- u, n = __read_passwd_file()
- return n.values()
-
-# test harness
-if __name__ == '__main__':
- getpwall()
diff --git a/Lib/plat-os2emx/regen b/Lib/plat-os2emx/regen
deleted file mode 100755
index 3ecd2a8..0000000
--- a/Lib/plat-os2emx/regen
+++ /dev/null
@@ -1,7 +0,0 @@
-#! /bin/sh
-export INCLUDE=$C_INCLUDE_PATH
-set -v
-python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/fcntl.h
-python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/sys/socket.h
-python.exe ../../Tools/scripts/h2py.py -i '(u_long)' $C_INCLUDE_PATH/netinet/in.h
-#python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/termios.h
diff --git a/Lib/platform.py b/Lib/platform.py
index 2b8a24a..c54f768 100755
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -122,7 +122,7 @@ try:
except AttributeError:
# os.devnull was added in Python 2.4, so emulate it for earlier
# Python versions
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# Use the old CP/M NUL as device name
DEV_NULL = 'NUL'
else:
@@ -316,7 +316,7 @@ def linux_distribution(distname='', version='', id='',
"""
try:
etc = os.listdir('/etc')
- except os.error:
+ except OSError:
# Probably not a Unix system
return distname,version,id
etc.sort()
@@ -403,13 +403,13 @@ _ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) '
def _syscmd_ver(system='', release='', version='',
- supported_platforms=('win32','win16','dos','os2')):
+ supported_platforms=('win32','win16','dos')):
""" Tries to figure out the OS version used and returns
a tuple (system,release,version).
It uses the "ver" shell command for this which is known
- to exists on Windows, DOS and OS/2. XXX Others too ?
+ to exists on Windows, DOS. XXX Others too ?
In case this fails, the given parameters are used as
defaults.
@@ -424,13 +424,10 @@ def _syscmd_ver(system='', release='', version='',
pipe = popen(cmd)
info = pipe.read()
if pipe.close():
- raise os.error('command failed')
+ raise OSError('command failed')
# XXX How can I suppress shell errors from being written
# to stderr ?
- except os.error as why:
- #print 'Command %s failed: %s' % (cmd,why)
- continue
- except IOError as why:
+ except OSError as why:
#print 'Command %s failed: %s' % (cmd,why)
continue
else:
@@ -581,7 +578,7 @@ def win32_ver(release='',version='',csd='',ptype=''):
# Discard any type that isn't REG_SZ
if type == REG_SZ and name.find("Server") != -1:
product_type = VER_NT_SERVER
- except WindowsError:
+ except OSError:
# Use default of VER_NT_WORKSTATION
pass
@@ -882,7 +879,7 @@ def _node(default=''):
return default
try:
return socket.gethostname()
- except socket.error:
+ except OSError:
# Still not working...
return default
@@ -901,12 +898,12 @@ def _syscmd_uname(option,default=''):
""" Interface to the system's uname command.
"""
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# XXX Others too ?
return default
try:
f = os.popen('uname %s 2> %s' % (option, DEV_NULL))
- except (AttributeError,os.error):
+ except (AttributeError, OSError):
return default
output = f.read().strip()
rc = f.close()
@@ -924,7 +921,7 @@ def _syscmd_file(target,default=''):
default in case the command should fail.
"""
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# XXX Others too ?
return default
target = _follow_symlinks(target)
@@ -932,7 +929,7 @@ def _syscmd_file(target,default=''):
proc = subprocess.Popen(['file', target],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- except (AttributeError,os.error):
+ except (AttributeError, OSError):
return default
output = proc.communicate()[0].decode('latin-1')
rc = proc.wait()
diff --git a/Lib/poplib.py b/Lib/poplib.py
index d42d9dd..0f12ae2 100644
--- a/Lib/poplib.py
+++ b/Lib/poplib.py
@@ -13,7 +13,15 @@ Based on the J. Myers POP3 draft, Jan. 96
# Imports
-import re, socket
+import errno
+import re
+import socket
+
+try:
+ import ssl
+ HAVE_SSL = True
+except ImportError:
+ HAVE_SSL = False
__all__ = ["POP3","error_proto"]
@@ -55,6 +63,8 @@ class POP3:
APOP name digest apop(name, digest)
TOP msg n top(msg, n)
UIDL [msg] uidl(msg = None)
+ CAPA capa()
+ STLS stls()
Raises one exception: 'error_proto'.
@@ -81,6 +91,7 @@ class POP3:
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
self.port = port
+ self._tls_established = False
self.sock = self._create_socket(timeout)
self.file = self.sock.makefile('rb')
self._debugging = 0
@@ -259,7 +270,14 @@ class POP3:
if self.file is not None:
self.file.close()
if self.sock is not None:
- self.sock.close()
+ try:
+ self.sock.shutdown(socket.SHUT_RDWR)
+ except OSError as e:
+ # The server might already have closed the connection
+ if e.errno != errno.ENOTCONN:
+ raise
+ finally:
+ self.sock.close()
self.file = self.sock = None
#__del__ = quit
@@ -315,21 +333,71 @@ class POP3:
return self._shortcmd('UIDL %s' % which)
return self._longcmd('UIDL')
-try:
- import ssl
-except ImportError:
- pass
-else:
+
+ def capa(self):
+ """Return server capabilities (RFC 2449) as a dictionary
+ >>> c=poplib.POP3('localhost')
+ >>> c.capa()
+ {'IMPLEMENTATION': ['Cyrus', 'POP3', 'server', 'v2.2.12'],
+ 'TOP': [], 'LOGIN-DELAY': ['0'], 'AUTH-RESP-CODE': [],
+ 'EXPIRE': ['NEVER'], 'USER': [], 'STLS': [], 'PIPELINING': [],
+ 'UIDL': [], 'RESP-CODES': []}
+ >>>
+
+ Really, according to RFC 2449, the cyrus folks should avoid
+ having the implementation splitted into multiple arguments...
+ """
+ def _parsecap(line):
+ lst = line.decode('ascii').split()
+ return lst[0], lst[1:]
+
+ caps = {}
+ try:
+ resp = self._longcmd('CAPA')
+ rawcaps = resp[1]
+ for capline in rawcaps:
+ capnm, capargs = _parsecap(capline)
+ caps[capnm] = capargs
+ except error_proto as _err:
+ raise error_proto('-ERR CAPA not supported by server')
+ return caps
+
+
+ def stls(self, context=None):
+ """Start a TLS session on the active connection as specified in RFC 2595.
+
+ context - a ssl.SSLContext
+ """
+ if not HAVE_SSL:
+ raise error_proto('-ERR TLS support missing')
+ if self._tls_established:
+ raise error_proto('-ERR TLS session already established')
+ caps = self.capa()
+ if not 'STLS' in caps:
+ raise error_proto('-ERR STLS not supported by server')
+ if context is None:
+ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ context.options |= ssl.OP_NO_SSLv2
+ resp = self._shortcmd('STLS')
+ self.sock = context.wrap_socket(self.sock)
+ self.file = self.sock.makefile('rb')
+ self._tls_established = True
+ return resp
+
+
+if HAVE_SSL:
class POP3_SSL(POP3):
"""POP3 client class over SSL connection
- Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None)
+ Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None,
+ context=None)
hostname - the hostname of the pop3 over ssl server
port - port number
keyfile - PEM formatted file that countains your private key
certfile - PEM formatted certificate chain file
+ context - a ssl.SSLContext
See the methods of the parent class POP3 for more documentation.
"""
@@ -355,6 +423,13 @@ else:
sock = ssl.wrap_socket(sock, self.keyfile, self.certfile)
return sock
+ def stls(self, keyfile=None, certfile=None, context=None):
+ """The method unconditionally raises an exception since the
+ STLS command doesn't make any sense on an already established
+ SSL/TLS session.
+ """
+ raise error_proto('-ERR TLS session already established')
+
__all__.append("POP3_SSL")
if __name__ == "__main__":
diff --git a/Lib/posixpath.py b/Lib/posixpath.py
index b1e1a92..e1d59b6 100644
--- a/Lib/posixpath.py
+++ b/Lib/posixpath.py
@@ -162,7 +162,7 @@ def islink(path):
"""Test whether a path is a symbolic link"""
try:
st = os.lstat(path)
- except (os.error, AttributeError):
+ except (OSError, AttributeError):
return False
return stat.S_ISLNK(st.st_mode)
@@ -172,39 +172,11 @@ def lexists(path):
"""Test whether a path exists. Returns True for broken symbolic links"""
try:
os.lstat(path)
- except os.error:
+ except OSError:
return False
return True
-# Are two filenames really pointing to the same file?
-
-def samefile(f1, f2):
- """Test whether two pathnames reference the same actual file"""
- s1 = os.stat(f1)
- s2 = os.stat(f2)
- return samestat(s1, s2)
-
-
-# Are two open files really referencing the same file?
-# (Not necessarily the same file descriptor!)
-
-def sameopenfile(fp1, fp2):
- """Test whether two open file objects reference the same file"""
- s1 = os.fstat(fp1)
- s2 = os.fstat(fp2)
- return samestat(s1, s2)
-
-
-# Are two stat buffers (obtained from stat, fstat or lstat)
-# describing the same file?
-
-def samestat(s1, s2):
- """Test whether two stat buffers reference the same file"""
- return s1.st_ino == s2.st_ino and \
- s1.st_dev == s2.st_dev
-
-
# Is a path a mount point?
# (Does this work for all UNIXes? Is it even guaranteed to work by Posix?)
@@ -220,7 +192,7 @@ def ismount(path):
else:
parent = join(path, '..')
s2 = os.lstat(parent)
- except os.error:
+ except OSError:
return False # It doesn't exist -- so not a mount point :-)
dev1 = s1.st_dev
dev2 = s2.st_dev
diff --git a/Lib/pprint.py b/Lib/pprint.py
index ae96dde..1f98f5c 100644
--- a/Lib/pprint.py
+++ b/Lib/pprint.py
@@ -34,6 +34,7 @@ saferepr()
"""
+import re
import sys as _sys
from collections import OrderedDict as _OrderedDict
from io import StringIO as _StringIO
@@ -158,13 +159,10 @@ class PrettyPrinter:
return
rep = self._repr(object, context, level - 1)
typ = _type(object)
- sepLines = _len(rep) > (self._width - 1 - indent - allowance)
+ max_width = self._width - 1 - indent - allowance
+ sepLines = _len(rep) > max_width
write = stream.write
- if self._depth and level > self._depth:
- write(rep)
- return
-
if sepLines:
r = getattr(typ, "__repr__", None)
if issubclass(typ, dict):
@@ -242,6 +240,37 @@ class PrettyPrinter:
write(endchar)
return
+ if issubclass(typ, str) and len(object) > 0 and r is str.__repr__:
+ def _str_parts(s):
+ """
+ Return a list of string literals comprising the repr()
+ of the given string using literal concatenation.
+ """
+ lines = s.splitlines(True)
+ for i, line in enumerate(lines):
+ rep = repr(line)
+ if _len(rep) <= max_width:
+ yield rep
+ else:
+ # A list of alternating (non-space, space) strings
+ parts = re.split(r'(\s+)', line) + ['']
+ current = ''
+ for i in range(0, len(parts), 2):
+ part = parts[i] + parts[i+1]
+ candidate = current + part
+ if len(repr(candidate)) > max_width:
+ if current:
+ yield repr(current)
+ current = part
+ else:
+ current = candidate
+ if current:
+ yield repr(current)
+ for i, rep in enumerate(_str_parts(object)):
+ if i > 0:
+ write('\n' + ' '*indent)
+ write(rep)
+ return
write(rep)
def _repr(self, object, context, level):
diff --git a/Lib/profile.py b/Lib/profile.py
index 743e77d..5d0e968 100755
--- a/Lib/profile.py
+++ b/Lib/profile.py
@@ -40,6 +40,40 @@ __all__ = ["run", "runctx", "Profile"]
# return i_count
#itimes = integer_timer # replace with C coded timer returning integers
+class _Utils:
+ """Support class for utility functions which are shared by
+ profile.py and cProfile.py modules.
+ Not supposed to be used directly.
+ """
+
+ def __init__(self, profiler):
+ self.profiler = profiler
+
+ def run(self, statement, filename, sort):
+ prof = self.profiler()
+ try:
+ prof.run(statement)
+ except SystemExit:
+ pass
+ finally:
+ self._show(prof, filename, sort)
+
+ def runctx(self, statement, globals, locals, filename, sort):
+ prof = self.profiler()
+ try:
+ prof.runctx(statement, globals, locals)
+ except SystemExit:
+ pass
+ finally:
+ self._show(prof, filename, sort)
+
+ def _show(self, prof, filename, sort):
+ if filename is not None:
+ prof.dump_stats(filename)
+ else:
+ prof.print_stats(sort)
+
+
#**************************************************************************
# The following are the static member functions for the profiler class
# Note that an instance of Profile() is *not* needed to call them.
@@ -56,15 +90,7 @@ def run(statement, filename=None, sort=-1):
standard name string (file/line/function-name) that is presented in
each line.
"""
- prof = Profile()
- try:
- prof = prof.run(statement)
- except SystemExit:
- pass
- if filename is not None:
- prof.dump_stats(filename)
- else:
- return prof.print_stats(sort)
+ return _Utils(Profile).run(statement, filename, sort)
def runctx(statement, globals, locals, filename=None, sort=-1):
"""Run statement under profiler, supplying your own globals and locals,
@@ -72,16 +98,8 @@ def runctx(statement, globals, locals, filename=None, sort=-1):
statement and filename have the same semantics as profile.run
"""
- prof = Profile()
- try:
- prof = prof.runctx(statement, globals, locals)
- except SystemExit:
- pass
-
- if filename is not None:
- prof.dump_stats(filename)
- else:
- return prof.print_stats(sort)
+ return _Utils(Profile).runctx(statement, globals, locals, filename, sort)
+
class Profile:
"""Profiler class.
@@ -373,10 +391,9 @@ class Profile:
print_stats()
def dump_stats(self, file):
- f = open(file, 'wb')
- self.create_stats()
- marshal.dump(self.stats, f)
- f.close()
+ with open(file, 'wb') as f:
+ self.create_stats()
+ marshal.dump(self.stats, f)
def create_stats(self):
self.simulate_cmd_complete()
diff --git a/Lib/pstats.py b/Lib/pstats.py
index 6a77605..e1ec355 100644
--- a/Lib/pstats.py
+++ b/Lib/pstats.py
@@ -93,9 +93,8 @@ class Stats:
self.stats = {}
return
elif isinstance(arg, str):
- f = open(arg, 'rb')
- self.stats = marshal.load(f)
- f.close()
+ with open(arg, 'rb') as f:
+ self.stats = marshal.load(f)
try:
file_stats = os.stat(arg)
arg = time.ctime(file_stats.st_mtime) + " " + arg
@@ -149,11 +148,8 @@ class Stats:
def dump_stats(self, filename):
"""Write the profile data to a file we know how to load back."""
- f = open(filename, 'wb')
- try:
+ with open(filename, 'wb') as f:
marshal.dump(self.stats, f)
- finally:
- f.close()
# list the tuple indices and directions for sorting,
# along with some printable description
@@ -612,7 +608,7 @@ if __name__ == '__main__':
if line:
try:
self.stats = Stats(line)
- except IOError as err:
+ except OSError as err:
print(err.args[1], file=self.stream)
return
except Exception as err:
diff --git a/Lib/pty.py b/Lib/pty.py
index 3ccf619..5fe53c1 100644
--- a/Lib/pty.py
+++ b/Lib/pty.py
@@ -56,18 +56,18 @@ def _open_terminal():
else:
try:
tty_name, master_fd = sgi._getpty(os.O_RDWR, 0o666, 0)
- except IOError as msg:
- raise os.error(msg)
+ except OSError as msg:
+ raise OSError(msg)
return master_fd, tty_name
for x in 'pqrstuvwxyzPQRST':
for y in '0123456789abcdef':
pty_name = '/dev/pty' + x + y
try:
fd = os.open(pty_name, os.O_RDWR)
- except os.error:
+ except OSError:
continue
return (fd, '/dev/tty' + x + y)
- raise os.error('out of pty devices')
+ raise OSError('out of pty devices')
def slave_open(tty_name):
"""slave_open(tty_name) -> slave_fd
@@ -83,7 +83,7 @@ def slave_open(tty_name):
try:
ioctl(result, I_PUSH, "ptem")
ioctl(result, I_PUSH, "ldterm")
- except IOError:
+ except OSError:
pass
return result
@@ -173,8 +173,9 @@ def spawn(argv, master_read=_read, stdin_read=_read):
restore = 0
try:
_copy(master_fd, master_read, stdin_read)
- except (IOError, OSError):
+ except OSError:
if restore:
tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
os.close(master_fd)
+ return os.waitpid(pid, 0)[1]
diff --git a/Lib/py_compile.py b/Lib/py_compile.py
index 62d69ad..d255a2d 100644
--- a/Lib/py_compile.py
+++ b/Lib/py_compile.py
@@ -3,17 +3,13 @@
This module has intimate knowledge of the format of .pyc files.
"""
-import builtins
-import errno
import imp
-import marshal
+import importlib._bootstrap
+import importlib.machinery
import os
import sys
-import tokenize
import traceback
-MAGIC = imp.get_magic()
-
__all__ = ["compile", "main", "PyCompileError"]
@@ -65,13 +61,6 @@ class PyCompileError(Exception):
return self.msg
-def wr_long(f, x):
- """Internal; write a 32-bit int to a file in little-endian order."""
- f.write(bytes([x & 0xff,
- (x >> 8) & 0xff,
- (x >> 16) & 0xff,
- (x >> 24) & 0xff]))
-
def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1):
"""Byte-compile one Python source file to Python bytecode.
@@ -108,17 +97,16 @@ def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1):
byte-compile all installed files (or all files in selected
directories).
"""
- with tokenize.open(file) as f:
- try:
- st = os.fstat(f.fileno())
- except AttributeError:
- st = os.stat(file)
- timestamp = int(st.st_mtime)
- size = st.st_size & 0xFFFFFFFF
- codestring = f.read()
+ if cfile is None:
+ if optimize >= 0:
+ cfile = imp.cache_from_source(file, debug_override=not optimize)
+ else:
+ cfile = imp.cache_from_source(file)
+ loader = importlib.machinery.SourceFileLoader('<py_compile>', file)
+ source_bytes = loader.get_data(file)
try:
- codeobject = builtins.compile(codestring, dfile or file, 'exec',
- optimize=optimize)
+ code = loader.source_to_code(source_bytes, dfile or file,
+ _optimize=optimize)
except Exception as err:
py_exc = PyCompileError(err.__class__, err, dfile or file)
if doraise:
@@ -126,26 +114,16 @@ def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1):
else:
sys.stderr.write(py_exc.msg + '\n')
return
- if cfile is None:
- if optimize >= 0:
- cfile = imp.cache_from_source(file, debug_override=not optimize)
- else:
- cfile = imp.cache_from_source(file)
try:
dirname = os.path.dirname(cfile)
if dirname:
os.makedirs(dirname)
- except OSError as error:
- if error.errno != errno.EEXIST:
- raise
- with open(cfile, 'wb') as fc:
- fc.write(b'\0\0\0\0')
- wr_long(fc, timestamp)
- wr_long(fc, size)
- marshal.dump(codeobject, fc)
- fc.flush()
- fc.seek(0, 0)
- fc.write(MAGIC)
+ except FileExistsError:
+ pass
+ source_stats = loader.path_stats(file)
+ bytecode = importlib._bootstrap._code_to_bytecode(code,
+ source_stats['mtime'], len(source_bytes))
+ loader._cache_bytecode(file, cfile, bytecode)
return cfile
def main(args=None):
@@ -173,7 +151,7 @@ def main(args=None):
except PyCompileError as error:
rv = 1
sys.stderr.write("%s\n" % error.msg)
- except IOError as error:
+ except OSError as error:
rv = 1
sys.stderr.write("%s\n" % error)
else:
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index 9dce079..4d681b0 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -226,7 +226,7 @@ def synopsis(filename, cache={}):
if lastupdate is None or lastupdate < mtime:
try:
file = tokenize.open(filename)
- except IOError:
+ except OSError:
# module can't be opened, so skip it
return None
binary_suffixes = importlib.machinery.BYTECODE_SUFFIXES[:]
@@ -1396,7 +1396,7 @@ def getpager():
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
- if sys.platform == 'win32' or sys.platform.startswith('os2'):
+ if sys.platform == 'win32':
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
@@ -1422,16 +1422,15 @@ def pipepager(text, cmd):
try:
pipe.write(text)
pipe.close()
- except IOError:
+ except OSError:
pass # Ignore broken pipes caused by quitting the pager program.
def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
- file = open(filename, 'w')
- file.write(text)
- file.close()
+ with open(filename, 'w') as file:
+ file.write(text)
try:
os.system(cmd + ' "' + filename + '"')
finally:
@@ -1850,10 +1849,10 @@ Enter the name of any module, keyword, or topic to get help on writing
Python programs and using Python modules. To quit this help utility and
return to the interpreter, just type "quit".
-To get a list of available modules, keywords, or topics, type "modules",
-"keywords", or "topics". Each module also comes with a one-line summary
-of what it does; to list the modules whose summaries contain a given word
-such as "spam", type "modules spam".
+To get a list of available modules, keywords, symbols, or topics, type
+"modules", "keywords", "symbols", or "topics". Each module also comes
+with a one-line summary of what it does; to list the modules whose name
+or summary contain a given string such as "spam", type "modules spam".
''' % tuple([sys.version[:3]]*2))
def list(self, items, columns=4, width=80):
@@ -1958,9 +1957,10 @@ module "pydoc_data.topics" could not be found.
def listmodules(self, key=''):
if key:
self.output.write('''
-Here is a list of matching modules. Enter any module name to get more help.
+Here is a list of modules whose name or summary contains '{}'.
+If there are any, enter a module name to get more help.
-''')
+'''.format(key))
apropos(key)
else:
self.output.write('''
@@ -1979,7 +1979,7 @@ Please wait a moment while I gather a list of all available modules...
self.list(modules.keys())
self.output.write('''
Enter any module name to get more help. Or, type "modules spam" to search
-for modules whose descriptions contain the word "spam".
+for modules whose name or summary contain the string "spam".
''')
help = Helper()
diff --git a/Lib/quopri.py b/Lib/quopri.py
index 3d0f0ac..e5bd010 100755
--- a/Lib/quopri.py
+++ b/Lib/quopri.py
@@ -223,7 +223,7 @@ def main():
else:
try:
fp = open(file, "rb")
- except IOError as msg:
+ except OSError as msg:
sys.stderr.write("%s: can't open (%s)\n" % (file, msg))
sts = 1
continue
diff --git a/Lib/random.py b/Lib/random.py
index 7d8d4f3..b2e9ae1 100644
--- a/Lib/random.py
+++ b/Lib/random.py
@@ -252,10 +252,11 @@ class Random(_random.Random):
return seq[i]
def shuffle(self, x, random=None, int=int):
- """x, random=random.random -> shuffle list x in place; return None.
+ """Shuffle list x in place, and return None.
- Optional arg random is a 0-argument function returning a random
- float in [0.0, 1.0); by default, the standard random.random.
+ Optional argument random is a 0-argument function returning a
+ random float in [0.0, 1.0); if it is the default None, the
+ standard random.random will be used.
"""
randbelow = self._randbelow
diff --git a/Lib/sched.py b/Lib/sched.py
index ccf8ce9..9a82a89 100644
--- a/Lib/sched.py
+++ b/Lib/sched.py
@@ -71,10 +71,10 @@ class scheduler:
"""
if kwargs is _sentinel:
kwargs = {}
+ event = Event(time, priority, action, argument, kwargs)
with self._lock:
- event = Event(time, priority, action, argument, kwargs)
heapq.heappush(self._queue, event)
- return event # The ID
+ return event # The ID
def enter(self, delay, priority, action, argument=(), kwargs=_sentinel):
"""A variant that specifies the time as a relative time.
@@ -82,9 +82,8 @@ class scheduler:
This is actually the more commonly used interface.
"""
- with self._lock:
- time = self.timefunc() + delay
- return self.enterabs(time, priority, action, argument, kwargs)
+ time = self.timefunc() + delay
+ return self.enterabs(time, priority, action, argument, kwargs)
def cancel(self, event):
"""Remove an event from the queue.
@@ -165,4 +164,4 @@ class scheduler:
# the actual order they would be retrieved.
with self._lock:
events = self._queue[:]
- return map(heapq.heappop, [events]*len(events))
+ return map(heapq.heappop, [events]*len(events))
diff --git a/Lib/shelve.py b/Lib/shelve.py
index cc1815e..cef580e 100644
--- a/Lib/shelve.py
+++ b/Lib/shelve.py
@@ -61,7 +61,7 @@ from io import BytesIO
import collections
-__all__ = ["Shelf","BsdDbShelf","DbfilenameShelf","open"]
+__all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"]
class _ClosedDict(collections.MutableMapping):
'Marker for a closed dict. Access attempts raise a ValueError.'
@@ -131,6 +131,12 @@ class Shelf(collections.MutableMapping):
except KeyError:
pass
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.close()
+
def close(self):
self.sync()
try:
@@ -147,6 +153,7 @@ class Shelf(collections.MutableMapping):
def __del__(self):
if not hasattr(self, 'writeback'):
# __init__ didn't succeed, so don't bother closing
+ # see http://bugs.python.org/issue1339007 for details
return
self.close()
diff --git a/Lib/shutil.py b/Lib/shutil.py
index a188408..c2f0278 100644
--- a/Lib/shutil.py
+++ b/Lib/shutil.py
@@ -39,17 +39,20 @@ __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
"ignore_patterns", "chown", "which"]
# disk_usage is added later, if available on the platform
-class Error(EnvironmentError):
+class Error(OSError):
pass
-class SpecialFileError(EnvironmentError):
+class SameFileError(Error):
+ """Raised when source and destination are the same file."""
+
+class SpecialFileError(OSError):
"""Raised when trying to do a kind of operation (e.g. copying) which is
not supported on a special file (e.g. a named pipe)"""
-class ExecError(EnvironmentError):
+class ExecError(OSError):
"""Raised when a command could not be executed"""
-class ReadError(EnvironmentError):
+class ReadError(OSError):
"""Raised when an archive cannot be read"""
class RegistryError(Exception):
@@ -57,11 +60,6 @@ class RegistryError(Exception):
and unpacking registeries fails"""
-try:
- WindowsError
-except NameError:
- WindowsError = None
-
def copyfileobj(fsrc, fdst, length=16*1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while 1:
@@ -90,7 +88,7 @@ def copyfile(src, dst, *, follow_symlinks=True):
"""
if _samefile(src, dst):
- raise Error("`%s` and `%s` are the same file" % (src, dst))
+ raise SameFileError("{!r} and {!r} are the same file".format(src, dst))
for fn in [src, dst]:
try:
@@ -221,6 +219,9 @@ def copy(src, dst, *, follow_symlinks=True):
If follow_symlinks is false, symlinks won't be followed. This
resembles GNU's "cp -P src dst".
+ If source and destination are the same file, a SameFileError will be
+ raised.
+
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
@@ -329,15 +330,13 @@ def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
# continue with other files
except Error as err:
errors.extend(err.args[0])
- except EnvironmentError as why:
+ except OSError as why:
errors.append((srcname, dstname, str(why)))
try:
copystat(src, dst)
except OSError as why:
- if WindowsError is not None and isinstance(why, WindowsError):
- # Copying file access times may fail on Windows
- pass
- else:
+ # Copying file access times may fail on Windows
+ if why.winerror is None:
errors.append((src, dst, str(why)))
if errors:
raise Error(errors)
@@ -356,24 +355,24 @@ def _rmtree_unsafe(path, onerror):
names = []
try:
names = os.listdir(path)
- except os.error:
+ except OSError:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
- except os.error:
+ except OSError:
mode = 0
if stat.S_ISDIR(mode):
_rmtree_unsafe(fullname, onerror)
else:
try:
os.unlink(fullname)
- except os.error:
+ except OSError:
onerror(os.unlink, fullname, sys.exc_info())
try:
os.rmdir(path)
- except os.error:
+ except OSError:
onerror(os.rmdir, path, sys.exc_info())
# Version using fd-based APIs to protect against races
@@ -464,7 +463,7 @@ def rmtree(path, ignore_errors=False, onerror=None):
_rmtree_safe_fd(fd, path, onerror)
try:
os.rmdir(path)
- except os.error:
+ except OSError:
onerror(os.rmdir, path, sys.exc_info())
else:
try:
diff --git a/Lib/site.py b/Lib/site.py
index b751006..acaeb3e 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -146,14 +146,14 @@ def addpackage(sitedir, name, known_paths):
and add that to known_paths, or execute it if it starts with 'import '.
"""
if known_paths is None:
- _init_pathinfo()
+ known_paths = _init_pathinfo()
reset = 1
else:
reset = 0
fullname = os.path.join(sitedir, name)
try:
f = open(fullname, "r")
- except IOError:
+ except OSError:
return
with f:
for n, line in enumerate(f):
@@ -196,7 +196,7 @@ def addsitedir(sitedir, known_paths=None):
known_paths.add(sitedircase)
try:
names = os.listdir(sitedir)
- except os.error:
+ except OSError:
return
names = [name for name in names if name.endswith(".pth")]
for name in sorted(names):
@@ -300,9 +300,7 @@ def getsitepackages(prefixes=None):
continue
seen.add(prefix)
- if sys.platform in ('os2emx', 'riscos'):
- sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
- elif os.sep == '/':
+ if os.sep == '/':
sitepackages.append(os.path.join(prefix, "lib",
"python" + sys.version[:3],
"site-packages"))
@@ -329,23 +327,6 @@ def addsitepackages(known_paths, prefixes=None):
return known_paths
-def setBEGINLIBPATH():
- """The OS/2 EMX port has optional extension modules that do double duty
- as DLLs (and must use the .DLL file extension) for other extensions.
- The library search path needs to be amended so these will be found
- during module import. Use BEGINLIBPATH so that these are at the start
- of the library search path.
-
- """
- dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
- libpath = os.environ['BEGINLIBPATH'].split(';')
- if libpath[-1]:
- libpath.append(dllpath)
- else:
- libpath[-1] = dllpath
- os.environ['BEGINLIBPATH'] = ';'.join(libpath)
-
-
def setquit():
"""Define new builtins 'quit' and 'exit'.
@@ -403,11 +384,10 @@ class _Printer(object):
for filename in self.__files:
filename = os.path.join(dir, filename)
try:
- fp = open(filename, "r")
- data = fp.read()
- fp.close()
+ with open(filename, "r") as fp:
+ data = fp.read()
break
- except IOError:
+ except OSError:
pass
if data:
break
@@ -593,8 +573,6 @@ def main():
ENABLE_USER_SITE = check_enableusersite()
known_paths = addusersitepackages(known_paths)
known_paths = addsitepackages(known_paths)
- if sys.platform == 'os2emx':
- setBEGINLIBPATH()
setquit()
setcopyright()
sethelper()
diff --git a/Lib/smtpd.py b/Lib/smtpd.py
index 778d6d6..3cab264 100755
--- a/Lib/smtpd.py
+++ b/Lib/smtpd.py
@@ -137,7 +137,7 @@ class SMTPChannel(asynchat.async_chat):
self.num_bytes = 0
try:
self.peer = conn.getpeername()
- except socket.error as err:
+ except OSError as err:
# a race condition may occur if the other end is closing
# before we can get the peername
self.close()
@@ -668,7 +668,7 @@ class PureProxy(SMTPServer):
except smtplib.SMTPRecipientsRefused as e:
print('got SMTPRecipientsRefused', file=DEBUGSTREAM)
refused = e.recipients
- except (socket.error, smtplib.SMTPException) as e:
+ except (OSError, smtplib.SMTPException) as e:
print('got', e.__class__, file=DEBUGSTREAM)
# All recipients were refused. If the exception had an associated
# error code, use it. Otherwise,fake it with a non-triggering
@@ -850,8 +850,7 @@ if __name__ == '__main__':
nobody = pwd.getpwnam('nobody')[2]
try:
os.setuid(nobody)
- except OSError as e:
- if e.errno != errno.EPERM: raise
+ except PermissionError:
print('Cannot setuid "nobody"; try running with -n option.', file=sys.stderr)
sys.exit(1)
try:
diff --git a/Lib/smtplib.py b/Lib/smtplib.py
index 562a182..69c0186 100644
--- a/Lib/smtplib.py
+++ b/Lib/smtplib.py
@@ -309,7 +309,7 @@ class SMTP:
try:
port = int(port)
except ValueError:
- raise socket.error("nonnumeric port")
+ raise OSError("nonnumeric port")
if not port:
port = self.default_port
if self.debuglevel > 0:
@@ -330,7 +330,7 @@ class SMTP:
s = s.encode("ascii")
try:
self.sock.sendall(s)
- except socket.error:
+ except OSError:
self.close()
raise SMTPServerDisconnected('Server not connected')
else:
@@ -363,7 +363,7 @@ class SMTP:
while 1:
try:
line = self.file.readline()
- except socket.error as e:
+ except OSError as e:
self.close()
raise SMTPServerDisconnected("Connection unexpectedly closed: "
+ str(e))
@@ -929,7 +929,7 @@ class LMTP(SMTP):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.file = None
self.sock.connect(host)
- except socket.error:
+ except OSError:
if self.debuglevel > 0:
print('connect fail:', host, file=stderr)
if self.sock:
diff --git a/Lib/sndhdr.py b/Lib/sndhdr.py
index 9f5dcc9..240e507 100644
--- a/Lib/sndhdr.py
+++ b/Lib/sndhdr.py
@@ -11,7 +11,7 @@ The return tuple contains the following items, in this order:
- number of bits/sample, or 'U' for U-LAW, or 'A' for A-LAW
If the file doesn't have a recognizable type, it returns None.
-If the file can't be opened, IOError is raised.
+If the file can't be opened, OSError is raised.
To compute the total time, divide the number of frames by the
sampling rate (a frame contains a sample for each channel).
@@ -137,14 +137,17 @@ tests.append(test_voc)
def test_wav(h, f):
+ import wave
# 'RIFF' <len> 'WAVE' 'fmt ' <len>
if not h.startswith(b'RIFF') or h[8:12] != b'WAVE' or h[12:16] != b'fmt ':
return None
- style = get_short_le(h[20:22])
- nchannels = get_short_le(h[22:24])
- rate = get_long_le(h[24:28])
- sample_bits = get_short_le(h[34:36])
- return 'wav', rate, nchannels, -1, sample_bits
+ f.seek(0)
+ try:
+ w = wave.openfp(f, 'r')
+ except (EOFError, wave.Error):
+ return None
+ return ('wav', w.getframerate(), w.getnchannels(),
+ w.getnframes(), 8*w.getsampwidth())
tests.append(test_wav)
@@ -230,7 +233,7 @@ def testall(list, recursive, toplevel):
sys.stdout.flush()
try:
print(what(filename))
- except IOError:
+ except OSError:
print('*** not found ***')
if __name__ == '__main__':
diff --git a/Lib/socket.py b/Lib/socket.py
index d4f1b65..da0c39a 100644
--- a/Lib/socket.py
+++ b/Lib/socket.py
@@ -291,7 +291,7 @@ class SocketIO(io.RawIOBase):
self._checkClosed()
self._checkReadable()
if self._timeout_occurred:
- raise IOError("cannot read from timed out object")
+ raise OSError("cannot read from timed out object")
while True:
try:
return self._sock.recv_into(b)
diff --git a/Lib/socketserver.py b/Lib/socketserver.py
index 8332fdf..e9e4e4e 100644
--- a/Lib/socketserver.py
+++ b/Lib/socketserver.py
@@ -299,7 +299,7 @@ class BaseServer:
"""
try:
request, client_address = self.get_request()
- except socket.error:
+ except OSError:
return
if self.verify_request(request, client_address):
try:
@@ -479,7 +479,7 @@ class TCPServer(BaseServer):
#explicitly shutdown. socket.close() merely releases
#the socket and waits for GC to perform the actual close.
request.shutdown(socket.SHUT_WR)
- except socket.error:
+ except OSError:
pass #some platforms may raise ENOTCONN here
self.close_request(request)
@@ -532,7 +532,7 @@ class ForkingMixIn:
# children.
try:
pid, status = os.waitpid(0, 0)
- except os.error:
+ except OSError:
pid = None
if pid not in self.active_children: continue
self.active_children.remove(pid)
@@ -545,7 +545,7 @@ class ForkingMixIn:
for child in self.active_children:
try:
pid, status = os.waitpid(child, os.WNOHANG)
- except os.error:
+ except OSError:
pid = None
if not pid: continue
try:
diff --git a/Lib/sqlite3/test/dbapi.py b/Lib/sqlite3/test/dbapi.py
index b7ec1ad..04649fc 100644
--- a/Lib/sqlite3/test/dbapi.py
+++ b/Lib/sqlite3/test/dbapi.py
@@ -28,6 +28,9 @@ try:
except ImportError:
threading = None
+from test.support import TESTFN, unlink
+
+
class ModuleTests(unittest.TestCase):
def CheckAPILevel(self):
self.assertEqual(sqlite.apilevel, "2.0",
@@ -163,6 +166,21 @@ class ConnectionTests(unittest.TestCase):
with self.assertRaises(AttributeError):
self.cx.in_transaction = True
+ def CheckOpenUri(self):
+ if sqlite.sqlite_version_info < (3, 7, 7):
+ with self.assertRaises(sqlite.NotSupportedError):
+ sqlite.connect(':memory:', uri=True)
+ return
+ self.addCleanup(unlink, TESTFN)
+ with sqlite.connect(TESTFN) as cx:
+ cx.execute('create table test(id integer)')
+ with sqlite.connect('file:' + TESTFN, uri=True) as cx:
+ cx.execute('insert into test(id) values(0)')
+ with sqlite.connect('file:' + TESTFN + '?mode=ro', uri=True) as cx:
+ with self.assertRaises(sqlite.OperationalError):
+ cx.execute('insert into test(id) values(1)')
+
+
class CursorTests(unittest.TestCase):
def setUp(self):
self.cx = sqlite.connect(":memory:")
diff --git a/Lib/ssl.py b/Lib/ssl.py
index 6ff5c53..c8311bc 100644
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -52,6 +52,37 @@ PROTOCOL_SSLv2
PROTOCOL_SSLv3
PROTOCOL_SSLv23
PROTOCOL_TLSv1
+
+The following constants identify various SSL alert message descriptions as per
+http://www.iana.org/assignments/tls-parameters/tls-parameters.xml#tls-parameters-6
+
+ALERT_DESCRIPTION_CLOSE_NOTIFY
+ALERT_DESCRIPTION_UNEXPECTED_MESSAGE
+ALERT_DESCRIPTION_BAD_RECORD_MAC
+ALERT_DESCRIPTION_RECORD_OVERFLOW
+ALERT_DESCRIPTION_DECOMPRESSION_FAILURE
+ALERT_DESCRIPTION_HANDSHAKE_FAILURE
+ALERT_DESCRIPTION_BAD_CERTIFICATE
+ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE
+ALERT_DESCRIPTION_CERTIFICATE_REVOKED
+ALERT_DESCRIPTION_CERTIFICATE_EXPIRED
+ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN
+ALERT_DESCRIPTION_ILLEGAL_PARAMETER
+ALERT_DESCRIPTION_UNKNOWN_CA
+ALERT_DESCRIPTION_ACCESS_DENIED
+ALERT_DESCRIPTION_DECODE_ERROR
+ALERT_DESCRIPTION_DECRYPT_ERROR
+ALERT_DESCRIPTION_PROTOCOL_VERSION
+ALERT_DESCRIPTION_INSUFFICIENT_SECURITY
+ALERT_DESCRIPTION_INTERNAL_ERROR
+ALERT_DESCRIPTION_USER_CANCELLED
+ALERT_DESCRIPTION_NO_RENEGOTIATION
+ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION
+ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE
+ALERT_DESCRIPTION_UNRECOGNIZED_NAME
+ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE
+ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE
+ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY
"""
import textwrap
@@ -66,35 +97,24 @@ from _ssl import (
SSLSyscallError, SSLEOFError,
)
from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
-from _ssl import (
- OP_ALL, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1,
- OP_CIPHER_SERVER_PREFERENCE, OP_SINGLE_DH_USE
- )
-try:
- from _ssl import OP_NO_COMPRESSION
-except ImportError:
- pass
-try:
- from _ssl import OP_SINGLE_ECDH_USE
-except ImportError:
- pass
from _ssl import RAND_status, RAND_egd, RAND_add, RAND_bytes, RAND_pseudo_bytes
-from _ssl import (
- SSL_ERROR_ZERO_RETURN,
- SSL_ERROR_WANT_READ,
- SSL_ERROR_WANT_WRITE,
- SSL_ERROR_WANT_X509_LOOKUP,
- SSL_ERROR_SYSCALL,
- SSL_ERROR_SSL,
- SSL_ERROR_WANT_CONNECT,
- SSL_ERROR_EOF,
- SSL_ERROR_INVALID_ERROR_CODE,
- )
+
+def _import_symbols(prefix):
+ for n in dir(_ssl):
+ if n.startswith(prefix):
+ globals()[n] = getattr(_ssl, n)
+
+_import_symbols('OP_')
+_import_symbols('ALERT_DESCRIPTION_')
+_import_symbols('SSL_ERROR_')
+
from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN
+
from _ssl import (PROTOCOL_SSLv3, PROTOCOL_SSLv23,
PROTOCOL_TLSv1)
from _ssl import _OPENSSL_API_VERSION
+
_PROTOCOL_NAMES = {
PROTOCOL_TLSv1: "TLSv1",
PROTOCOL_SSLv23: "SSLv23",
@@ -109,12 +129,14 @@ else:
_PROTOCOL_NAMES[PROTOCOL_SSLv2] = "SSLv2"
from socket import getnameinfo as _getnameinfo
-from socket import error as socket_error
from socket import socket, AF_INET, SOCK_STREAM, create_connection
import base64 # for DER-to-PEM translation
import traceback
import errno
+
+socket_error = OSError # keep that public name in module namespace
+
if _ssl.HAS_TLS_UNIQUE:
CHANNEL_BINDING_TYPES = ['tls-unique']
else:
@@ -188,7 +210,7 @@ class SSLContext(_SSLContext):
"""An SSLContext holds various SSL-related configuration options and
data, such as certificates and possibly a private key."""
- __slots__ = ('protocol',)
+ __slots__ = ('protocol', '__weakref__')
def __new__(cls, protocol, *args, **kwargs):
self = _SSLContext.__new__(cls, protocol)
@@ -236,7 +258,7 @@ class SSLSocket(socket):
_context=None):
if _context:
- self.context = _context
+ self._context = _context
else:
if server_side and not certfile:
raise ValueError("certfile must be specified for server-side "
@@ -245,16 +267,16 @@ class SSLSocket(socket):
raise ValueError("certfile must be specified")
if certfile and not keyfile:
keyfile = certfile
- self.context = SSLContext(ssl_version)
- self.context.verify_mode = cert_reqs
+ self._context = SSLContext(ssl_version)
+ self._context.verify_mode = cert_reqs
if ca_certs:
- self.context.load_verify_locations(ca_certs)
+ self._context.load_verify_locations(ca_certs)
if certfile:
- self.context.load_cert_chain(certfile, keyfile)
+ self._context.load_cert_chain(certfile, keyfile)
if npn_protocols:
- self.context.set_npn_protocols(npn_protocols)
+ self._context.set_npn_protocols(npn_protocols)
if ciphers:
- self.context.set_ciphers(ciphers)
+ self._context.set_ciphers(ciphers)
self.keyfile = keyfile
self.certfile = certfile
self.cert_reqs = cert_reqs
@@ -279,7 +301,7 @@ class SSLSocket(socket):
# see if it's connected
try:
sock.getpeername()
- except socket_error as e:
+ except OSError as e:
if e.errno != errno.ENOTCONN:
raise
else:
@@ -296,7 +318,7 @@ class SSLSocket(socket):
if connected:
# create the SSL object
try:
- self._sslobj = self.context._wrap_socket(self, server_side,
+ self._sslobj = self._context._wrap_socket(self, server_side,
server_hostname)
if do_handshake_on_connect:
timeout = self.gettimeout()
@@ -305,9 +327,17 @@ class SSLSocket(socket):
raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
self.do_handshake()
- except socket_error as x:
+ except OSError as x:
self.close()
raise x
+ @property
+ def context(self):
+ return self._context
+
+ @context.setter
+ def context(self, ctx):
+ self._context = ctx
+ self._sslobj.context = ctx
def dup(self):
raise NotImplemented("Can't dup() %s instances" %
@@ -533,7 +563,7 @@ class SSLSocket(socket):
self.do_handshake()
self._connected = True
return rc
- except socket_error:
+ except OSError:
self._sslobj = None
raise
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index 689046e..f69b42a 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -395,8 +395,6 @@ if mswindows:
hStdOutput = None
hStdError = None
wShowWindow = 0
- class pywintypes:
- error = IOError
else:
import select
_has_poll = hasattr(select, 'poll')
@@ -823,7 +821,7 @@ class Popen(object):
for f in filter(None, (self.stdin, self.stdout, self.stderr)):
try:
f.close()
- except EnvironmentError:
+ except OSError:
pass # Ignore EBADF or other errors.
# Make sure the child pipes are closed as well.
@@ -837,7 +835,7 @@ class Popen(object):
for fd in to_close:
try:
os.close(fd)
- except EnvironmentError:
+ except OSError:
pass
raise
@@ -901,7 +899,7 @@ class Popen(object):
if input:
try:
self.stdin.write(input)
- except IOError as e:
+ except OSError as e:
if e.errno != errno.EPIPE and e.errno != errno.EINVAL:
raise
self.stdin.close()
@@ -1033,23 +1031,6 @@ class Popen(object):
return Handle(h)
- def _find_w9xpopen(self):
- """Find and return absolut path to w9xpopen.exe"""
- w9xpopen = os.path.join(
- os.path.dirname(_winapi.GetModuleFileName(0)),
- "w9xpopen.exe")
- if not os.path.exists(w9xpopen):
- # Eeek - file-not-found - possibly an embedding
- # situation - see if we can locate it in sys.exec_prefix
- w9xpopen = os.path.join(os.path.dirname(sys.base_exec_prefix),
- "w9xpopen.exe")
- if not os.path.exists(w9xpopen):
- raise RuntimeError("Cannot locate w9xpopen.exe, which is "
- "needed for Popen to work with your "
- "shell or platform.")
- return w9xpopen
-
-
def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
@@ -1078,21 +1059,6 @@ class Popen(object):
startupinfo.wShowWindow = _winapi.SW_HIDE
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = '{} /c "{}"'.format (comspec, args)
- if (_winapi.GetVersion() >= 0x80000000 or
- os.path.basename(comspec).lower() == "command.com"):
- # Win9x, or using command.com on NT. We need to
- # use the w9xpopen intermediate program. For more
- # information, see KB Q150956
- # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
- w9xpopen = self._find_w9xpopen()
- args = '"%s" %s' % (w9xpopen, args)
- # Not passing CREATE_NEW_CONSOLE has been known to
- # cause random failures on win9x. Specifically a
- # dialog: "Your program accessed mem currently in
- # use at xxx" and a hopeful warning about the
- # stability of your system. Cost is Ctrl+C won't
- # kill children.
- creationflags |= _winapi.CREATE_NEW_CONSOLE
# Start the process
try:
@@ -1104,12 +1070,6 @@ class Popen(object):
env,
cwd,
startupinfo)
- except pywintypes.error as e:
- # Translate pywintypes.error to WindowsError, which is
- # a subclass of OSError. FIXME: We should really
- # translate errno using _sys_errlist (or similar), but
- # how can this be done from Python?
- raise WindowsError(*e.args)
finally:
# Child is launched. Close the parent's copy of those pipe
# handles that only the child should have open. You need
@@ -1194,7 +1154,7 @@ class Popen(object):
if input is not None:
try:
self.stdin.write(input)
- except IOError as e:
+ except OSError as e:
if e.errno != errno.EPIPE:
raise
self.stdin.close()
@@ -1414,13 +1374,13 @@ class Popen(object):
exception_name, hex_errno, err_msg = (
errpipe_data.split(b':', 2))
except ValueError:
- exception_name = b'RuntimeError'
+ exception_name = b'SubprocessError'
hex_errno = b'0'
err_msg = (b'Bad exception data from child: ' +
repr(errpipe_data))
child_exception_type = getattr(
builtins, exception_name.decode('ascii'),
- RuntimeError)
+ SubprocessError)
err_msg = err_msg.decode(errors="surrogatepass")
if issubclass(child_exception_type, OSError) and hex_errno:
errno_num = int(hex_errno, 16)
@@ -1450,11 +1410,11 @@ class Popen(object):
self.returncode = _WEXITSTATUS(sts)
else:
# Should never happen
- raise RuntimeError("Unknown child exit status!")
+ raise SubprocessError("Unknown child exit status!")
def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid,
- _WNOHANG=os.WNOHANG, _os_error=os.error, _ECHILD=errno.ECHILD):
+ _WNOHANG=os.WNOHANG, _ECHILD=errno.ECHILD):
"""Check if child process has terminated. Returns returncode
attribute.
@@ -1467,7 +1427,7 @@ class Popen(object):
pid, sts = _waitpid(self.pid, _WNOHANG)
if pid == self.pid:
self._handle_exitstatus(sts)
- except _os_error as e:
+ except OSError as e:
if _deadstate is not None:
self.returncode = _deadstate
elif e.errno == _ECHILD:
@@ -1624,7 +1584,7 @@ class Popen(object):
raise TimeoutExpired(self.args, orig_timeout)
try:
ready = poller.poll(timeout)
- except select.error as e:
+ except OSError as e:
if e.args[0] == errno.EINTR:
continue
raise
@@ -1692,7 +1652,7 @@ class Popen(object):
(rlist, wlist, xlist) = \
select.select(self._read_set, self._write_set, [],
timeout)
- except select.error as e:
+ except OSError as e:
if e.args[0] == errno.EINTR:
continue
raise
diff --git a/Lib/symbol.py b/Lib/symbol.py
index 34143b5..5cf4a65 100755
--- a/Lib/symbol.py
+++ b/Lib/symbol.py
@@ -100,7 +100,7 @@ for _name, _value in list(globals().items()):
sym_name[_value] = _name
-def main():
+def _main():
import sys
import token
if len(sys.argv) == 1:
@@ -108,4 +108,4 @@ def main():
token._main()
if __name__ == "__main__":
- main()
+ _main()
diff --git a/Lib/symtable.py b/Lib/symtable.py
index 39c1a80..c0e32df 100644
--- a/Lib/symtable.py
+++ b/Lib/symtable.py
@@ -235,7 +235,8 @@ class Symbol(object):
if __name__ == "__main__":
import os, sys
- src = open(sys.argv[0]).read()
+ with open(sys.argv[0]) as f:
+ src = f.read()
mod = symtable(src, os.path.split(sys.argv[0])[1], "exec")
for ident in mod.get_identifiers():
info = mod.lookup(ident)
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index 2f5f740..2095657 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -52,25 +52,6 @@ _INSTALL_SCHEMES = {
'scripts': '{base}/Scripts',
'data': '{base}',
},
- 'os2': {
- 'stdlib': '{installed_base}/Lib',
- 'platstdlib': '{base}/Lib',
- 'purelib': '{base}/Lib/site-packages',
- 'platlib': '{base}/Lib/site-packages',
- 'include': '{installed_base}/Include',
- 'platinclude': '{installed_base}/Include',
- 'scripts': '{base}/Scripts',
- 'data': '{base}',
- },
- 'os2_home': {
- 'stdlib': '{userbase}/lib/python{py_version_short}',
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
- 'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'include': '{userbase}/include/python{py_version_short}',
- 'scripts': '{userbase}/bin',
- 'data': '{userbase}',
- },
'nt_user': {
'stdlib': '{userbase}/Python{py_version_nodot}',
'platstdlib': '{userbase}/Python{py_version_nodot}',
@@ -210,7 +191,6 @@ def _getuserbase():
def joinuser(*args):
return os.path.expanduser(os.path.join(*args))
- # what about 'os2emx', 'riscos' ?
if os.name == "nt":
base = os.environ.get("APPDATA") or "~"
if env_base:
@@ -369,21 +349,21 @@ def _generate_posix_vars():
makefile = get_makefile_filename()
try:
_parse_makefile(makefile, vars)
- except IOError as e:
+ except OSError as e:
msg = "invalid Python installation: unable to open %s" % makefile
if hasattr(e, "strerror"):
msg = msg + " (%s)" % e.strerror
- raise IOError(msg)
+ raise OSError(msg)
# load the installed pyconfig.h:
config_h = get_config_h_filename()
try:
with open(config_h) as f:
parse_config_h(f, vars)
- except IOError as e:
+ except OSError as e:
msg = "invalid Python installation: unable to open %s" % config_h
if hasattr(e, "strerror"):
msg = msg + " (%s)" % e.strerror
- raise IOError(msg)
+ raise OSError(msg)
# On AIX, there are wrong paths to the linker scripts in the Makefile
# -- these paths are relative to the Python source, but when installed
# the scripts are in another directory.
@@ -436,7 +416,6 @@ def _init_non_posix(vars):
vars['LIBDEST'] = get_path('stdlib')
vars['BINLIBDEST'] = get_path('platstdlib')
vars['INCLUDEPY'] = get_path('include')
- vars['SO'] = '.pyd'
vars['EXT_SUFFIX'] = '.pyd'
vars['EXE'] = '.exe'
vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
@@ -552,7 +531,7 @@ def get_config_vars(*args):
# sys.abiflags may not be defined on all platforms.
_CONFIG_VARS['abiflags'] = ''
- if os.name in ('nt', 'os2'):
+ if os.name == 'nt':
_init_non_posix(_CONFIG_VARS)
if os.name == 'posix':
_init_posix(_CONFIG_VARS)
diff --git a/Lib/tabnanny.py b/Lib/tabnanny.py
index 5b9b444..46e0f56 100755
--- a/Lib/tabnanny.py
+++ b/Lib/tabnanny.py
@@ -95,7 +95,7 @@ def check(file):
try:
f = tokenize.open(file)
- except IOError as msg:
+ except OSError as msg:
errprint("%r: I/O Error: %s" % (file, msg))
return
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index 11b4b68..ffa7327 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -56,9 +56,9 @@ except ImportError:
# os.symlink on Windows prior to 6.0 raises NotImplementedError
symlink_exception = (AttributeError, NotImplementedError)
try:
- # WindowsError (1314) will be raised if the caller does not hold the
+ # OSError (winerror=1314) will be raised if the caller does not hold the
# SeCreateSymbolicLinkPrivilege privilege
- symlink_exception += (WindowsError,)
+ symlink_exception += (OSError,)
except NameError:
pass
@@ -264,13 +264,13 @@ def copyfileobj(src, dst, length=None):
for b in range(blocks):
buf = src.read(BUFSIZE)
if len(buf) < BUFSIZE:
- raise IOError("end of file reached")
+ raise OSError("end of file reached")
dst.write(buf)
if remainder != 0:
buf = src.read(remainder)
if len(buf) < remainder:
- raise IOError("end of file reached")
+ raise OSError("end of file reached")
dst.write(buf)
return
@@ -399,7 +399,7 @@ class _Stream:
if mode == "r":
self.dbuf = b""
self.cmp = bz2.BZ2Decompressor()
- self.exception = IOError
+ self.exception = OSError
else:
self.cmp = bz2.BZ2Compressor()
@@ -1631,7 +1631,7 @@ class TarFile(object):
try:
fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj)
t = cls.taropen(name, mode, fileobj, **kwargs)
- except IOError:
+ except OSError:
if not extfileobj and fileobj is not None:
fileobj.close()
if fileobj is None:
@@ -1662,7 +1662,7 @@ class TarFile(object):
try:
t = cls.taropen(name, mode, fileobj, **kwargs)
- except (IOError, EOFError):
+ except (OSError, EOFError):
fileobj.close()
raise ReadError("not a bzip2 file")
t._extfileobj = False
@@ -2022,7 +2022,7 @@ class TarFile(object):
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
set_attrs=set_attrs)
- except EnvironmentError as e:
+ except OSError as e:
if self.errorlevel > 0:
raise
else:
@@ -2211,9 +2211,8 @@ class TarFile(object):
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
else:
- if sys.platform != "os2emx":
- os.chown(targetpath, u, g)
- except EnvironmentError as e:
+ os.chown(targetpath, u, g)
+ except OSError as e:
raise ExtractError("could not change owner")
def chmod(self, tarinfo, targetpath):
@@ -2222,7 +2221,7 @@ class TarFile(object):
if hasattr(os, 'chmod'):
try:
os.chmod(targetpath, tarinfo.mode)
- except EnvironmentError as e:
+ except OSError as e:
raise ExtractError("could not change mode")
def utime(self, tarinfo, targetpath):
@@ -2232,7 +2231,7 @@ class TarFile(object):
return
try:
os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
- except EnvironmentError as e:
+ except OSError as e:
raise ExtractError("could not change modification time")
#--------------------------------------------------------------------------
@@ -2323,9 +2322,9 @@ class TarFile(object):
corresponds to TarFile's mode.
"""
if self.closed:
- raise IOError("%s is closed" % self.__class__.__name__)
+ raise OSError("%s is closed" % self.__class__.__name__)
if mode is not None and self.mode not in mode:
- raise IOError("bad operation for mode %r" % self.mode)
+ raise OSError("bad operation for mode %r" % self.mode)
def _find_link_target(self, tarinfo):
"""Find the target member of a symlink or hardlink member in the
diff --git a/Lib/telnetlib.py b/Lib/telnetlib.py
index a59693e..b4b7cd4 100644
--- a/Lib/telnetlib.py
+++ b/Lib/telnetlib.py
@@ -273,7 +273,7 @@ class Telnet:
"""Write a string to the socket, doubling any IAC characters.
Can block if the connection is blocked. May raise
- socket.error if the connection is closed.
+ OSError if the connection is closed.
"""
if IAC in buffer:
@@ -313,7 +313,7 @@ class Telnet:
while i < 0 and not self.eof:
try:
ready = poller.poll(call_timeout)
- except select.error as e:
+ except OSError as e:
if e.errno == errno.EINTR:
if timeout is not None:
elapsed = time() - time_start
@@ -683,7 +683,7 @@ class Telnet:
while not m and not self.eof:
try:
ready = poller.poll(call_timeout)
- except select.error as e:
+ except OSError as e:
if e.errno == errno.EINTR:
if timeout is not None:
elapsed = time() - time_start
diff --git a/Lib/tempfile.py b/Lib/tempfile.py
index 4aad7b5..10b6a1b 100644
--- a/Lib/tempfile.py
+++ b/Lib/tempfile.py
@@ -58,6 +58,8 @@ except ImportError:
_allocate_lock = _thread.allocate_lock
_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL
+if hasattr(_os, 'O_CLOEXEC'):
+ _text_openflags |= _os.O_CLOEXEC
if hasattr(_os, 'O_NOINHERIT'):
_text_openflags |= _os.O_NOINHERIT
if hasattr(_os, 'O_NOFOLLOW'):
@@ -684,7 +686,6 @@ class TemporaryDirectory(object):
_islink = staticmethod(_os.path.islink)
_remove = staticmethod(_os.remove)
_rmdir = staticmethod(_os.rmdir)
- _os_error = OSError
_warn = _warnings.warn
def _rmtree(self, path):
@@ -694,16 +695,16 @@ class TemporaryDirectory(object):
fullname = self._path_join(path, name)
try:
isdir = self._isdir(fullname) and not self._islink(fullname)
- except self._os_error:
+ except OSError:
isdir = False
if isdir:
self._rmtree(fullname)
else:
try:
self._remove(fullname)
- except self._os_error:
+ except OSError:
pass
try:
self._rmdir(path)
- except self._os_error:
+ except OSError:
pass
diff --git a/Lib/test/__main__.py b/Lib/test/__main__.py
index ce5615b..d5fbe15 100644
--- a/Lib/test/__main__.py
+++ b/Lib/test/__main__.py
@@ -1,13 +1,3 @@
-from test import regrtest, support
+from test import regrtest
-
-TEMPDIR, TESTCWD = regrtest._make_temp_dir_for_build(regrtest.TEMPDIR)
-regrtest.TEMPDIR = TEMPDIR
-regrtest.TESTCWD = TESTCWD
-
-# Run the tests in a context manager that temporary changes the CWD to a
-# temporary and writable directory. If it's not possible to create or
-# change the CWD, the original CWD will be used. The original CWD is
-# available from support.SAVEDCWD.
-with support.temp_cwd(TESTCWD, quiet=True):
- regrtest.main()
+regrtest.main_in_temp_cwd()
diff --git a/Lib/test/badsyntax_future10.py b/Lib/test/badsyntax_future10.py
new file mode 100644
index 0000000..fa5ab67
--- /dev/null
+++ b/Lib/test/badsyntax_future10.py
@@ -0,0 +1,3 @@
+from __future__ import absolute_import
+"spam, bar, blah"
+from __future__ import print_function
diff --git a/Lib/test/fork_wait.py b/Lib/test/fork_wait.py
index 88527df..19b54ec 100644
--- a/Lib/test/fork_wait.py
+++ b/Lib/test/fork_wait.py
@@ -28,7 +28,7 @@ class ForkWait(unittest.TestCase):
self.alive[id] = os.getpid()
try:
time.sleep(SHORTSLEEP)
- except IOError:
+ except OSError:
pass
def wait_impl(self, cpid):
diff --git a/Lib/test/json_tests/test_fail.py b/Lib/test/json_tests/test_fail.py
index 7809056..5b652e8 100644
--- a/Lib/test/json_tests/test_fail.py
+++ b/Lib/test/json_tests/test_fail.py
@@ -1,4 +1,5 @@
from test.json_tests import PyTest, CTest
+import re
# 2007-10-05
JSONDOCS = [
@@ -100,6 +101,94 @@ class TestFail:
#This is for python encoder
self.assertRaises(TypeError, self.dumps, data, indent=True)
+ def test_truncated_input(self):
+ test_cases = [
+ ('', 'Expecting value', 0),
+ ('[', 'Expecting value', 1),
+ ('[42', "Expecting ',' delimiter", 3),
+ ('[42,', 'Expecting value', 4),
+ ('["', 'Unterminated string starting at', 1),
+ ('["spam', 'Unterminated string starting at', 1),
+ ('["spam"', "Expecting ',' delimiter", 7),
+ ('["spam",', 'Expecting value', 8),
+ ('{', 'Expecting property name enclosed in double quotes', 1),
+ ('{"', 'Unterminated string starting at', 1),
+ ('{"spam', 'Unterminated string starting at', 1),
+ ('{"spam"', "Expecting ':' delimiter", 7),
+ ('{"spam":', 'Expecting value', 8),
+ ('{"spam":42', "Expecting ',' delimiter", 10),
+ ('{"spam":42,', 'Expecting property name enclosed in double quotes', 11),
+ ]
+ test_cases += [
+ ('"', 'Unterminated string starting at', 0),
+ ('"spam', 'Unterminated string starting at', 0),
+ ]
+ for data, msg, idx in test_cases:
+ self.assertRaisesRegex(ValueError,
+ r'^{0}: line 1 column {1} \(char {2}\)'.format(
+ re.escape(msg), idx + 1, idx),
+ self.loads, data)
+
+ def test_unexpected_data(self):
+ test_cases = [
+ ('[,', 'Expecting value', 1),
+ ('{"spam":[}', 'Expecting value', 9),
+ ('[42:', "Expecting ',' delimiter", 3),
+ ('[42 "spam"', "Expecting ',' delimiter", 4),
+ ('[42,]', 'Expecting value', 4),
+ ('{"spam":[42}', "Expecting ',' delimiter", 11),
+ ('["]', 'Unterminated string starting at', 1),
+ ('["spam":', "Expecting ',' delimiter", 7),
+ ('["spam",]', 'Expecting value', 8),
+ ('{:', 'Expecting property name enclosed in double quotes', 1),
+ ('{,', 'Expecting property name enclosed in double quotes', 1),
+ ('{42', 'Expecting property name enclosed in double quotes', 1),
+ ('[{]', 'Expecting property name enclosed in double quotes', 2),
+ ('{"spam",', "Expecting ':' delimiter", 7),
+ ('{"spam"}', "Expecting ':' delimiter", 7),
+ ('[{"spam"]', "Expecting ':' delimiter", 8),
+ ('{"spam":}', 'Expecting value', 8),
+ ('[{"spam":]', 'Expecting value', 9),
+ ('{"spam":42 "ham"', "Expecting ',' delimiter", 11),
+ ('[{"spam":42]', "Expecting ',' delimiter", 11),
+ ('{"spam":42,}', 'Expecting property name enclosed in double quotes', 11),
+ ]
+ for data, msg, idx in test_cases:
+ self.assertRaisesRegex(ValueError,
+ r'^{0}: line 1 column {1} \(char {2}\)'.format(
+ re.escape(msg), idx + 1, idx),
+ self.loads, data)
+
+ def test_extra_data(self):
+ test_cases = [
+ ('[]]', 'Extra data', 2),
+ ('{}}', 'Extra data', 2),
+ ('[],[]', 'Extra data', 2),
+ ('{},{}', 'Extra data', 2),
+ ]
+ test_cases += [
+ ('42,"spam"', 'Extra data', 2),
+ ('"spam",42', 'Extra data', 6),
+ ]
+ for data, msg, idx in test_cases:
+ self.assertRaisesRegex(ValueError,
+ r'^{0}: line 1 column {1} - line 1 column {2}'
+ r' \(char {3} - {4}\)'.format(
+ re.escape(msg), idx + 1, len(data) + 1, idx, len(data)),
+ self.loads, data)
+
+ def test_linecol(self):
+ test_cases = [
+ ('!', 1, 1, 0),
+ (' !', 1, 2, 1),
+ ('\n!', 2, 1, 1),
+ ('\n \n\n !', 4, 6, 10),
+ ]
+ for data, line, col, idx in test_cases:
+ self.assertRaisesRegex(ValueError,
+ r'^Expecting value: line {0} column {1}'
+ r' \(char {2}\)$'.format(line, col, idx),
+ self.loads, data)
class TestPyFail(TestFail, PyTest): pass
class TestCFail(TestFail, CTest): pass
diff --git a/Lib/test/json_tests/test_indent.py b/Lib/test/json_tests/test_indent.py
index 4c70646..4eb4f89 100644
--- a/Lib/test/json_tests/test_indent.py
+++ b/Lib/test/json_tests/test_indent.py
@@ -32,6 +32,8 @@ class TestIndent:
d1 = self.dumps(h)
d2 = self.dumps(h, indent=2, sort_keys=True, separators=(',', ': '))
d3 = self.dumps(h, indent='\t', sort_keys=True, separators=(',', ': '))
+ d4 = self.dumps(h, indent=2, sort_keys=True)
+ d5 = self.dumps(h, indent='\t', sort_keys=True)
h1 = self.loads(d1)
h2 = self.loads(d2)
@@ -42,6 +44,8 @@ class TestIndent:
self.assertEqual(h3, h)
self.assertEqual(d2, expect.expandtabs(2))
self.assertEqual(d3, expect)
+ self.assertEqual(d4, d2)
+ self.assertEqual(d5, d3)
def test_indent0(self):
h = {3: 1}
diff --git a/Lib/test/keycert3.pem b/Lib/test/keycert3.pem
new file mode 100644
index 0000000..5bfa62c
--- /dev/null
+++ b/Lib/test/keycert3.pem
@@ -0,0 +1,73 @@
+-----BEGIN PRIVATE KEY-----
+MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAMLgD0kAKDb5cFyP
+jbwNfR5CtewdXC+kMXAWD8DLxiTTvhMW7qVnlwOm36mZlszHKvsRf05lT4pegiFM
+9z2j1OlaN+ci/X7NU22TNN6crYSiN77FjYJP464j876ndSxyD+rzys386T+1r1aZ
+aggEdkj1TsSsv1zWIYKlPIjlvhuxAgMBAAECgYA0aH+T2Vf3WOPv8KdkcJg6gCRe
+yJKXOWgWRcicx/CUzOEsTxmFIDPLxqAWA3k7v0B+3vjGw5Y9lycV/5XqXNoQI14j
+y09iNsumds13u5AKkGdTJnZhQ7UKdoVHfuP44ZdOv/rJ5/VD6F4zWywpe90pcbK+
+AWDVtusgGQBSieEl1QJBAOyVrUG5l2yoUBtd2zr/kiGm/DYyXlIthQO/A3/LngDW
+5/ydGxVsT7lAVOgCsoT+0L4efTh90PjzW8LPQrPBWVMCQQDS3h/FtYYd5lfz+FNL
+9CEe1F1w9l8P749uNUD0g317zv1tatIqVCsQWHfVHNdVvfQ+vSFw38OORO00Xqs9
+1GJrAkBkoXXEkxCZoy4PteheO/8IWWLGGr6L7di6MzFl1lIqwT6D8L9oaV2vynFT
+DnKop0pa09Unhjyw57KMNmSE2SUJAkEArloTEzpgRmCq4IK2/NpCeGdHS5uqRlbh
+1VIa/xGps7EWQl5Mn8swQDel/YP3WGHTjfx7pgSegQfkyaRtGpZ9OQJAa9Vumj8m
+JAAtI0Bnga8hgQx7BhTQY4CadDxyiRGOGYhwUzYVCqkb2sbVRH9HnwUaJT7cWBY3
+RnJdHOMXWem7/w==
+-----END PRIVATE KEY-----
+Certificate:
+ Data:
+ Version: 1 (0x0)
+ Serial Number: 12723342612721443281 (0xb09264b1f2da21d1)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: C=XY, O=Python Software Foundation CA, CN=our-ca-server
+ Validity
+ Not Before: Jan 4 19:47:07 2013 GMT
+ Not After : Nov 13 19:47:07 2022 GMT
+ Subject: C=XY, L=Castle Anthrax, O=Python Software Foundation, CN=localhost
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (1024 bit)
+ Modulus:
+ 00:c2:e0:0f:49:00:28:36:f9:70:5c:8f:8d:bc:0d:
+ 7d:1e:42:b5:ec:1d:5c:2f:a4:31:70:16:0f:c0:cb:
+ c6:24:d3:be:13:16:ee:a5:67:97:03:a6:df:a9:99:
+ 96:cc:c7:2a:fb:11:7f:4e:65:4f:8a:5e:82:21:4c:
+ f7:3d:a3:d4:e9:5a:37:e7:22:fd:7e:cd:53:6d:93:
+ 34:de:9c:ad:84:a2:37:be:c5:8d:82:4f:e3:ae:23:
+ f3:be:a7:75:2c:72:0f:ea:f3:ca:cd:fc:e9:3f:b5:
+ af:56:99:6a:08:04:76:48:f5:4e:c4:ac:bf:5c:d6:
+ 21:82:a5:3c:88:e5:be:1b:b1
+ Exponent: 65537 (0x10001)
+ Signature Algorithm: sha1WithRSAEncryption
+ 2f:42:5f:a3:09:2c:fa:51:88:c7:37:7f:ea:0e:63:f0:a2:9a:
+ e5:5a:e2:c8:20:f0:3f:60:bc:c8:0f:b6:c6:76:ce:db:83:93:
+ f5:a3:33:67:01:8e:04:cd:00:9a:73:fd:f3:35:86:fa:d7:13:
+ e2:46:c6:9d:c0:29:53:d4:a9:90:b8:77:4b:e6:83:76:e4:92:
+ d6:9c:50:cf:43:d0:c6:01:77:61:9a:de:9b:70:f7:72:cd:59:
+ 00:31:69:d9:b4:ca:06:9c:6d:c3:c7:80:8c:68:e6:b5:a2:f8:
+ ef:1d:bb:16:9f:77:77:ef:87:62:22:9b:4d:69:a4:3a:1a:f1:
+ 21:5e:8c:32:ac:92:fd:15:6b:18:c2:7f:15:0d:98:30:ca:75:
+ 8f:1a:71:df:da:1d:b2:ef:9a:e8:2d:2e:02:fd:4a:3c:aa:96:
+ 0b:06:5d:35:b3:3d:24:87:4b:e0:b0:58:60:2f:45:ac:2e:48:
+ 8a:b0:99:10:65:27:ff:cc:b1:d8:fd:bd:26:6b:b9:0c:05:2a:
+ f4:45:63:35:51:07:ed:83:85:fe:6f:69:cb:bb:40:a8:ae:b6:
+ 3b:56:4a:2d:a4:ed:6d:11:2c:4d:ed:17:24:fd:47:bc:d3:41:
+ a2:d3:06:fe:0c:90:d8:d8:94:26:c4:ff:cc:a1:d8:42:77:eb:
+ fc:a9:94:71
+-----BEGIN CERTIFICATE-----
+MIICpDCCAYwCCQCwkmSx8toh0TANBgkqhkiG9w0BAQUFADBNMQswCQYDVQQGEwJY
+WTEmMCQGA1UECgwdUHl0aG9uIFNvZnR3YXJlIEZvdW5kYXRpb24gQ0ExFjAUBgNV
+BAMMDW91ci1jYS1zZXJ2ZXIwHhcNMTMwMTA0MTk0NzA3WhcNMjIxMTEzMTk0NzA3
+WjBfMQswCQYDVQQGEwJYWTEXMBUGA1UEBxMOQ2FzdGxlIEFudGhyYXgxIzAhBgNV
+BAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMRIwEAYDVQQDEwlsb2NhbGhv
+c3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMLgD0kAKDb5cFyPjbwNfR5C
+tewdXC+kMXAWD8DLxiTTvhMW7qVnlwOm36mZlszHKvsRf05lT4pegiFM9z2j1Ola
+N+ci/X7NU22TNN6crYSiN77FjYJP464j876ndSxyD+rzys386T+1r1aZaggEdkj1
+TsSsv1zWIYKlPIjlvhuxAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAC9CX6MJLPpR
+iMc3f+oOY/CimuVa4sgg8D9gvMgPtsZ2ztuDk/WjM2cBjgTNAJpz/fM1hvrXE+JG
+xp3AKVPUqZC4d0vmg3bkktacUM9D0MYBd2Ga3ptw93LNWQAxadm0ygacbcPHgIxo
+5rWi+O8duxafd3fvh2Iim01ppDoa8SFejDKskv0VaxjCfxUNmDDKdY8acd/aHbLv
+mugtLgL9SjyqlgsGXTWzPSSHS+CwWGAvRawuSIqwmRBlJ//Msdj9vSZruQwFKvRF
+YzVRB+2Dhf5vacu7QKiutjtWSi2k7W0RLE3tFyT9R7zTQaLTBv4MkNjYlCbE/8yh
+2EJ36/yplHE=
+-----END CERTIFICATE-----
diff --git a/Lib/test/keycert4.pem b/Lib/test/keycert4.pem
new file mode 100644
index 0000000..53355c8
--- /dev/null
+++ b/Lib/test/keycert4.pem
@@ -0,0 +1,73 @@
+-----BEGIN PRIVATE KEY-----
+MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAK5UQiMI5VkNs2Qv
+L7gUaiDdFevNUXRjU4DHAe3ZzzYLZNE69h9gO9VCSS16tJ5fT5VEu0EZyGr0e3V2
+NkX0ZoU0Hc/UaY4qx7LHmn5SYZpIxhJnkf7SyHJK1zUaGlU0/LxYqIuGCtF5dqx1
+L2OQhEx1GM6RydHdgX69G64LXcY5AgMBAAECgYAhsRMfJkb9ERLMl/oG/5sLQu9L
+pWDKt6+ZwdxzlZbggQ85CMYshjLKIod2DLL/sLf2x1PRXyRG131M1E3k8zkkz6de
+R1uDrIN/x91iuYzfLQZGh8bMY7Yjd2eoroa6R/7DjpElGejLxOAaDWO0ST2IFQy9
+myTGS2jSM97wcXfsSQJBANP3jelJoS5X6BRjTSneY21wcocxVuQh8pXpErALVNsT
+drrFTeaBuZp7KvbtnIM5g2WRNvaxLZlAY/hXPJvi6ncCQQDSix1cebml6EmPlEZS
+Mm8gwI2F9ufUunwJmBJcz826Do0ZNGByWDAM/JQZH4FX4GfAFNuj8PUb+GQfadkx
+i1DPAkEA0lVsNHojvuDsIo8HGuzarNZQT2beWjJ1jdxh9t7HrTx7LIps6rb/fhOK
+Zs0R6gVAJaEbcWAPZ2tFyECInAdnsQJAUjaeXXjuxFkjOFym5PvqpvhpivEx78Bu
+JPTr3rAKXmfGMxxfuOa0xK1wSyshP6ZR/RBn/+lcXPKubhHQDOegwwJAJF1DBQnN
++/tLmOPULtDwfP4Zixn+/8GmGOahFoRcu6VIGHmRilJTn6MOButw7Glv2YdeC6l/
+e83Gq6ffLVfKNQ==
+-----END PRIVATE KEY-----
+Certificate:
+ Data:
+ Version: 1 (0x0)
+ Serial Number: 12723342612721443282 (0xb09264b1f2da21d2)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: C=XY, O=Python Software Foundation CA, CN=our-ca-server
+ Validity
+ Not Before: Jan 4 19:47:07 2013 GMT
+ Not After : Nov 13 19:47:07 2022 GMT
+ Subject: C=XY, L=Castle Anthrax, O=Python Software Foundation, CN=fakehostname
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (1024 bit)
+ Modulus:
+ 00:ae:54:42:23:08:e5:59:0d:b3:64:2f:2f:b8:14:
+ 6a:20:dd:15:eb:cd:51:74:63:53:80:c7:01:ed:d9:
+ cf:36:0b:64:d1:3a:f6:1f:60:3b:d5:42:49:2d:7a:
+ b4:9e:5f:4f:95:44:bb:41:19:c8:6a:f4:7b:75:76:
+ 36:45:f4:66:85:34:1d:cf:d4:69:8e:2a:c7:b2:c7:
+ 9a:7e:52:61:9a:48:c6:12:67:91:fe:d2:c8:72:4a:
+ d7:35:1a:1a:55:34:fc:bc:58:a8:8b:86:0a:d1:79:
+ 76:ac:75:2f:63:90:84:4c:75:18:ce:91:c9:d1:dd:
+ 81:7e:bd:1b:ae:0b:5d:c6:39
+ Exponent: 65537 (0x10001)
+ Signature Algorithm: sha1WithRSAEncryption
+ ad:45:8a:8e:ef:c6:ef:04:41:5c:2c:4a:84:dc:02:76:0c:d0:
+ 66:0f:f0:16:04:58:4d:fd:68:b7:b8:d3:a8:41:a5:5c:3c:6f:
+ 65:3c:d1:f8:ce:43:35:e7:41:5f:53:3d:c9:2c:c3:7d:fc:56:
+ 4a:fa:47:77:38:9d:bb:97:28:0a:3b:91:19:7f:bc:74:ae:15:
+ 6b:bd:20:36:67:45:a5:1e:79:d7:75:e6:89:5c:6d:54:84:d1:
+ 95:d7:a7:b4:33:3c:af:37:c4:79:8f:5e:75:dc:75:c2:18:fb:
+ 61:6f:2d:dc:38:65:5b:ba:67:28:d0:88:d7:8d:b9:23:5a:8e:
+ e8:c6:bb:db:ce:d5:b8:41:2a:ce:93:08:b6:95:ad:34:20:18:
+ d5:3b:37:52:74:50:0b:07:2c:b0:6d:a4:4c:7b:f4:e0:fd:d1:
+ af:17:aa:20:cd:62:e3:f0:9d:37:69:db:41:bd:d4:1c:fb:53:
+ 20:da:88:9d:76:26:67:ce:01:90:a7:80:1d:a9:5b:39:73:68:
+ 54:0a:d1:2a:03:1b:8f:3c:43:5d:5d:c4:51:f1:a7:e7:11:da:
+ 31:2c:49:06:af:04:f4:b8:3c:99:c4:20:b9:06:36:a2:00:92:
+ 61:1d:0c:6d:24:05:e2:82:e1:47:db:a0:5f:ba:b9:fb:ba:fa:
+ 49:12:1e:ce
+-----BEGIN CERTIFICATE-----
+MIICpzCCAY8CCQCwkmSx8toh0jANBgkqhkiG9w0BAQUFADBNMQswCQYDVQQGEwJY
+WTEmMCQGA1UECgwdUHl0aG9uIFNvZnR3YXJlIEZvdW5kYXRpb24gQ0ExFjAUBgNV
+BAMMDW91ci1jYS1zZXJ2ZXIwHhcNMTMwMTA0MTk0NzA3WhcNMjIxMTEzMTk0NzA3
+WjBiMQswCQYDVQQGEwJYWTEXMBUGA1UEBxMOQ2FzdGxlIEFudGhyYXgxIzAhBgNV
+BAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMRUwEwYDVQQDEwxmYWtlaG9z
+dG5hbWUwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAK5UQiMI5VkNs2QvL7gU
+aiDdFevNUXRjU4DHAe3ZzzYLZNE69h9gO9VCSS16tJ5fT5VEu0EZyGr0e3V2NkX0
+ZoU0Hc/UaY4qx7LHmn5SYZpIxhJnkf7SyHJK1zUaGlU0/LxYqIuGCtF5dqx1L2OQ
+hEx1GM6RydHdgX69G64LXcY5AgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAK1Fio7v
+xu8EQVwsSoTcAnYM0GYP8BYEWE39aLe406hBpVw8b2U80fjOQzXnQV9TPcksw338
+Vkr6R3c4nbuXKAo7kRl/vHSuFWu9IDZnRaUeedd15olcbVSE0ZXXp7QzPK83xHmP
+XnXcdcIY+2FvLdw4ZVu6ZyjQiNeNuSNajujGu9vO1bhBKs6TCLaVrTQgGNU7N1J0
+UAsHLLBtpEx79OD90a8XqiDNYuPwnTdp20G91Bz7UyDaiJ12JmfOAZCngB2pWzlz
+aFQK0SoDG488Q11dxFHxp+cR2jEsSQavBPS4PJnEILkGNqIAkmEdDG0kBeKC4Ufb
+oF+6ufu6+kkSHs4=
+-----END CERTIFICATE-----
diff --git a/Lib/test/make_ssl_certs.py b/Lib/test/make_ssl_certs.py
index 48d2e57..f630813 100644
--- a/Lib/test/make_ssl_certs.py
+++ b/Lib/test/make_ssl_certs.py
@@ -2,6 +2,7 @@
and friends."""
import os
+import shutil
import sys
import tempfile
from subprocess import *
@@ -20,11 +21,52 @@ req_template = """
[req_x509_extensions]
subjectAltName = DNS:{hostname}
+
+ [ ca ]
+ default_ca = CA_default
+
+ [ CA_default ]
+ dir = cadir
+ database = $dir/index.txt
+ default_md = sha1
+ default_days = 3600
+ certificate = pycacert.pem
+ private_key = pycakey.pem
+ serial = $dir/serial
+ RANDFILE = $dir/.rand
+
+ policy = policy_match
+
+ [ policy_match ]
+ countryName = match
+ stateOrProvinceName = optional
+ organizationName = match
+ organizationalUnitName = optional
+ commonName = supplied
+ emailAddress = optional
+
+ [ policy_anything ]
+ countryName = optional
+ stateOrProvinceName = optional
+ localityName = optional
+ organizationName = optional
+ organizationalUnitName = optional
+ commonName = supplied
+ emailAddress = optional
+
+
+ [ v3_ca ]
+
+ subjectKeyIdentifier=hash
+ authorityKeyIdentifier=keyid:always,issuer
+ basicConstraints = CA:true
+
"""
here = os.path.abspath(os.path.dirname(__file__))
-def make_cert_key(hostname):
+def make_cert_key(hostname, sign=False):
+ print("creating cert for " + hostname)
tempnames = []
for i in range(3):
with tempfile.NamedTemporaryFile(delete=False) as f:
@@ -33,10 +75,25 @@ def make_cert_key(hostname):
try:
with open(req_file, 'w') as f:
f.write(req_template.format(hostname=hostname))
- args = ['req', '-new', '-days', '3650', '-nodes', '-x509',
+ args = ['req', '-new', '-days', '3650', '-nodes',
'-newkey', 'rsa:1024', '-keyout', key_file,
- '-out', cert_file, '-config', req_file]
+ '-config', req_file]
+ if sign:
+ with tempfile.NamedTemporaryFile(delete=False) as f:
+ tempnames.append(f.name)
+ reqfile = f.name
+ args += ['-out', reqfile ]
+
+ else:
+ args += ['-x509', '-out', cert_file ]
check_call(['openssl'] + args)
+
+ if sign:
+ args = ['ca', '-config', req_file, '-out', cert_file, '-outdir', 'cadir',
+ '-policy', 'policy_anything', '-batch', '-infiles', reqfile ]
+ check_call(['openssl'] + args)
+
+
with open(cert_file, 'r') as f:
cert = f.read()
with open(key_file, 'r') as f:
@@ -46,6 +103,32 @@ def make_cert_key(hostname):
for name in tempnames:
os.remove(name)
+TMP_CADIR = 'cadir'
+
+def unmake_ca():
+ shutil.rmtree(TMP_CADIR)
+
+def make_ca():
+ os.mkdir(TMP_CADIR)
+ with open(os.path.join('cadir','index.txt'),'a+') as f:
+ pass # empty file
+ with open(os.path.join('cadir','index.txt.attr'),'w+') as f:
+ f.write('unique_subject = no')
+
+ with tempfile.NamedTemporaryFile("w") as t:
+ t.write(req_template.format(hostname='our-ca-server'))
+ t.flush()
+ with tempfile.NamedTemporaryFile() as f:
+ args = ['req', '-new', '-days', '3650', '-extensions', 'v3_ca', '-nodes',
+ '-newkey', 'rsa:2048', '-keyout', 'pycakey.pem',
+ '-out', f.name,
+ '-subj', '/C=XY/L=Castle Anthrax/O=Python Software Foundation CA/CN=our-ca-server']
+ check_call(['openssl'] + args)
+ args = ['ca', '-config', t.name, '-create_serial',
+ '-out', 'pycacert.pem', '-batch', '-outdir', TMP_CADIR,
+ '-keyfile', 'pycakey.pem', '-days', '3650',
+ '-selfsign', '-extensions', 'v3_ca', '-infiles', f.name ]
+ check_call(['openssl'] + args)
if __name__ == '__main__':
os.chdir(here)
@@ -54,11 +137,34 @@ if __name__ == '__main__':
f.write(cert)
with open('ssl_key.pem', 'w') as f:
f.write(key)
+ print("password protecting ssl_key.pem in ssl_key.passwd.pem")
+ check_call(['openssl','rsa','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-des3','-passout','pass:somepass'])
+ check_call(['openssl','rsa','-in','ssl_key.pem','-out','keycert.passwd.pem','-des3','-passout','pass:somepass'])
+
with open('keycert.pem', 'w') as f:
f.write(key)
f.write(cert)
+
+ with open('keycert.passwd.pem', 'a+') as f:
+ f.write(cert)
+
# For certificate matching tests
+ make_ca()
cert, key = make_cert_key('fakehostname')
with open('keycert2.pem', 'w') as f:
f.write(key)
f.write(cert)
+
+ cert, key = make_cert_key('localhost', True)
+ with open('keycert3.pem', 'w') as f:
+ f.write(key)
+ f.write(cert)
+
+ cert, key = make_cert_key('fakehostname', True)
+ with open('keycert4.pem', 'w') as f:
+ f.write(key)
+ f.write(cert)
+
+ unmake_ca()
+ print("\n\nPlease change the values in test_ssl.py, test_parse_cert function related to notAfter,notBefore and serialNumber")
+ check_call(['openssl','x509','-in','keycert.pem','-dates','-serial','-noout'])
diff --git a/Lib/test/mock_socket.py b/Lib/test/mock_socket.py
index d09e78c..8ef0ec8 100644
--- a/Lib/test/mock_socket.py
+++ b/Lib/test/mock_socket.py
@@ -140,12 +140,8 @@ def gethostbyname(name):
return ""
-class gaierror(Exception):
- pass
-
-
-class error(Exception):
- pass
+gaierror = socket_module.gaierror
+error = socket_module.error
# Constants
diff --git a/Lib/test/multibytecodec_support.py b/Lib/test/multibytecodec_support.py
index 26bac7b..dcaae7b 100644
--- a/Lib/test/multibytecodec_support.py
+++ b/Lib/test/multibytecodec_support.py
@@ -282,7 +282,7 @@ class TestBase_Mapping(unittest.TestCase):
unittest.TestCase.__init__(self, *args, **kw)
try:
self.open_mapping_file().close() # test it to report the error early
- except (IOError, HTTPException):
+ except (OSError, HTTPException):
self.skipTest("Could not retrieve "+self.mapfileurl)
def open_mapping_file(self):
diff --git a/Lib/test/pycacert.pem b/Lib/test/pycacert.pem
new file mode 100644
index 0000000..09b1f3e
--- /dev/null
+++ b/Lib/test/pycacert.pem
@@ -0,0 +1,78 @@
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 12723342612721443280 (0xb09264b1f2da21d0)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: C=XY, O=Python Software Foundation CA, CN=our-ca-server
+ Validity
+ Not Before: Jan 4 19:47:07 2013 GMT
+ Not After : Jan 2 19:47:07 2023 GMT
+ Subject: C=XY, O=Python Software Foundation CA, CN=our-ca-server
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (2048 bit)
+ Modulus:
+ 00:e7:de:e9:e3:0c:9f:00:b6:a1:fd:2b:5b:96:d2:
+ 6f:cc:e0:be:86:b9:20:5e:ec:03:7a:55:ab:ea:a4:
+ e9:f9:49:85:d2:66:d5:ed:c7:7a:ea:56:8e:2d:8f:
+ e7:42:e2:62:28:a9:9f:d6:1b:8e:eb:b5:b4:9c:9f:
+ 14:ab:df:e6:94:8b:76:1d:3e:6d:24:61:ed:0c:bf:
+ 00:8a:61:0c:df:5c:c8:36:73:16:00:cd:47:ba:6d:
+ a4:a4:74:88:83:23:0a:19:fc:09:a7:3c:4a:4b:d3:
+ e7:1d:2d:e4:ea:4c:54:21:f3:26:db:89:37:18:d4:
+ 02:bb:40:32:5f:a4:ff:2d:1c:f7:d4:bb:ec:8e:cf:
+ 5c:82:ac:e6:7c:08:6c:48:85:61:07:7f:25:e0:5c:
+ e0:bc:34:5f:e0:b9:04:47:75:c8:47:0b:8d:bc:d6:
+ c8:68:5f:33:83:62:d2:20:44:35:b1:ad:81:1a:8a:
+ cd:bc:35:b0:5c:8b:47:d6:18:e9:9c:18:97:cc:01:
+ 3c:29:cc:e8:1e:e4:e4:c1:b8:de:e7:c2:11:18:87:
+ 5a:93:34:d8:a6:25:f7:14:71:eb:e4:21:a2:d2:0f:
+ 2e:2e:d4:62:00:35:d3:d6:ef:5c:60:4b:4c:a9:14:
+ e2:dd:15:58:46:37:33:26:b7:e7:2e:5d:ed:42:e4:
+ c5:4d
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Subject Key Identifier:
+ BC:DD:62:D9:76:DA:1B:D2:54:6B:CF:E0:66:9B:1E:1E:7B:56:0C:0B
+ X509v3 Authority Key Identifier:
+ keyid:BC:DD:62:D9:76:DA:1B:D2:54:6B:CF:E0:66:9B:1E:1E:7B:56:0C:0B
+
+ X509v3 Basic Constraints:
+ CA:TRUE
+ Signature Algorithm: sha1WithRSAEncryption
+ 7d:0a:f5:cb:8d:d3:5d:bd:99:8e:f8:2b:0f:ba:eb:c2:d9:a6:
+ 27:4f:2e:7b:2f:0e:64:d8:1c:35:50:4e:ee:fc:90:b9:8d:6d:
+ a8:c5:c6:06:b0:af:f3:2d:bf:3b:b8:42:07:dd:18:7d:6d:95:
+ 54:57:85:18:60:47:2f:eb:78:1b:f9:e8:17:fd:5a:0d:87:17:
+ 28:ac:4c:6a:e6:bc:29:f4:f4:55:70:29:42:de:85:ea:ab:6c:
+ 23:06:64:30:75:02:8e:53:bc:5e:01:33:37:cc:1e:cd:b8:a4:
+ fd:ca:e4:5f:65:3b:83:1c:86:f1:55:02:a0:3a:8f:db:91:b7:
+ 40:14:b4:e7:8d:d2:ee:73:ba:e3:e5:34:2d:bc:94:6f:4e:24:
+ 06:f7:5f:8b:0e:a7:8e:6b:de:5e:75:f4:32:9a:50:b1:44:33:
+ 9a:d0:05:e2:78:82:ff:db:da:8a:63:eb:a9:dd:d1:bf:a0:61:
+ ad:e3:9e:8a:24:5d:62:0e:e7:4c:91:7f:ef:df:34:36:3b:2f:
+ 5d:f5:84:b2:2f:c4:6d:93:96:1a:6f:30:28:f1:da:12:9a:64:
+ b4:40:33:1d:bd:de:2b:53:a8:ea:be:d6:bc:4e:96:f5:44:fb:
+ 32:18:ae:d5:1f:f6:69:af:b6:4e:7b:1d:58:ec:3b:a9:53:a3:
+ 5e:58:c8:9e
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIJALCSZLHy2iHQMA0GCSqGSIb3DQEBBQUAME0xCzAJBgNV
+BAYTAlhZMSYwJAYDVQQKDB1QeXRob24gU29mdHdhcmUgRm91bmRhdGlvbiBDQTEW
+MBQGA1UEAwwNb3VyLWNhLXNlcnZlcjAeFw0xMzAxMDQxOTQ3MDdaFw0yMzAxMDIx
+OTQ3MDdaME0xCzAJBgNVBAYTAlhZMSYwJAYDVQQKDB1QeXRob24gU29mdHdhcmUg
+Rm91bmRhdGlvbiBDQTEWMBQGA1UEAwwNb3VyLWNhLXNlcnZlcjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAOfe6eMMnwC2of0rW5bSb8zgvoa5IF7sA3pV
+q+qk6flJhdJm1e3HeupWji2P50LiYiipn9Ybjuu1tJyfFKvf5pSLdh0+bSRh7Qy/
+AIphDN9cyDZzFgDNR7ptpKR0iIMjChn8Cac8SkvT5x0t5OpMVCHzJtuJNxjUArtA
+Ml+k/y0c99S77I7PXIKs5nwIbEiFYQd/JeBc4Lw0X+C5BEd1yEcLjbzWyGhfM4Ni
+0iBENbGtgRqKzbw1sFyLR9YY6ZwYl8wBPCnM6B7k5MG43ufCERiHWpM02KYl9xRx
+6+QhotIPLi7UYgA109bvXGBLTKkU4t0VWEY3Mya35y5d7ULkxU0CAwEAAaNQME4w
+HQYDVR0OBBYEFLzdYtl22hvSVGvP4GabHh57VgwLMB8GA1UdIwQYMBaAFLzdYtl2
+2hvSVGvP4GabHh57VgwLMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AH0K9cuN0129mY74Kw+668LZpidPLnsvDmTYHDVQTu78kLmNbajFxgawr/Mtvzu4
+QgfdGH1tlVRXhRhgRy/reBv56Bf9Wg2HFyisTGrmvCn09FVwKULeheqrbCMGZDB1
+Ao5TvF4BMzfMHs24pP3K5F9lO4MchvFVAqA6j9uRt0AUtOeN0u5zuuPlNC28lG9O
+JAb3X4sOp45r3l519DKaULFEM5rQBeJ4gv/b2opj66nd0b+gYa3jnookXWIO50yR
+f+/fNDY7L131hLIvxG2TlhpvMCjx2hKaZLRAMx293itTqOq+1rxOlvVE+zIYrtUf
+9mmvtk57HVjsO6lTo15YyJ4=
+-----END CERTIFICATE-----
diff --git a/Lib/test/pycakey.pem b/Lib/test/pycakey.pem
new file mode 100644
index 0000000..fc6effe
--- /dev/null
+++ b/Lib/test/pycakey.pem
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDn3unjDJ8AtqH9
+K1uW0m/M4L6GuSBe7AN6VavqpOn5SYXSZtXtx3rqVo4tj+dC4mIoqZ/WG47rtbSc
+nxSr3+aUi3YdPm0kYe0MvwCKYQzfXMg2cxYAzUe6baSkdIiDIwoZ/AmnPEpL0+cd
+LeTqTFQh8ybbiTcY1AK7QDJfpP8tHPfUu+yOz1yCrOZ8CGxIhWEHfyXgXOC8NF/g
+uQRHdchHC4281shoXzODYtIgRDWxrYEais28NbBci0fWGOmcGJfMATwpzOge5OTB
+uN7nwhEYh1qTNNimJfcUcevkIaLSDy4u1GIANdPW71xgS0ypFOLdFVhGNzMmt+cu
+Xe1C5MVNAgMBAAECggEBAJPM7QuUrPn4cLN/Ysd15lwTWn9oHDFFgkYFvCs66gXE
+ju/6Kx2BjWE4wTJby09AHM/MqB0DvguT7Mf1Q2j3tPQ1HZowg8OwRDleuwp6KIls
+jBbhL0Jdl/5HC67ktWvZ9wNvO/wFG1rQfT6FVajf9LUbWEaSZbOG2SLhHfsHorzu
+xjTJaI3bQ/0+79B1exwk5ruwhzFRd/XpY8hls7D/RfPIuHDlBghkW3N59KFWrf5h
+6bNEh2THm0+IyGcGqs0FD+QCOXyvsjwSUswqrr2ctLREOeDcd5ReUjSxYgjcJRrm
+J7ceIY/+uwDJxw/OlnmBvF6pQMkKwYW2gFztu+g2t4UCgYEA/9yo01Exz4crxXsy
+tAlnDJM++nZcm07rtFjTKHUfKY/cCgNTa8udM0svnfwlid/dpgLsI38gx04HHC1i
+EZ4acz+ToIWedLxM0nq73//xeRWEazOvCz1mMTZaMldahTWAyzN8qVK2B/625Yy4
+wNYWyweBBwEB8MzaCs73spksXOsCgYEA5/7wvhiofYGFAfMuANeJIwDL2OtBnoOv
+mVNfCmi3GC38fzwyi5ZpskWDiS2woJ+LQfs9Qu4EcZbUFLd7gbeOvb5gmFUtYope
+LitUUKunIR18MkQ+mQDBpQPQPhk4QJP5reCbWkrfTu7b5o/iS41s6fBTFmuzhLcT
+C71vFdCyeKcCgYAiCCqYeOtELDmBOeLDmaCQRqGQ1N96dOPbCBmF/xYXBCCDYG/f
+HaUaJnz96YTgstsbcrYP/p/Qgqtlbw/lQf9IpwMuzbcG1ejt8g89OyDWNyt2ytgU
+iaUnFJCos3/Byh0Iah/BsdOueo2/OJl2ZMOBW80orlSgv86cs2y037TL4wKBgQDm
+OOyW+MlbowhnIvfoBfwlLEkefnej4nKD6WRLZBcue5Qyf355X06Mhsc9foXlH+6G
+D9h/bswiHNdhp6N82rdgPGiHQx/CxiUoE/+b/nvgNO5mw6qLE2EXbG1e8pAMJcyE
+bHw+YkawggDfELI036fRj5gki8SeUz8nS1nNgElbyQKBgCRDX9Jh+MwSLu4QBWdt
+/fi+lv3K6kun/fI7EOV1vCV/j871tICu7pu5BrOLxAHqoVfU9AUX299/2KjCb5pv
+kjogiUK6qWCWBlfuqDNWGCoUGt1rhznUva0nNjSMy5rinBhhjpROZC2pw48lOluP
+UuvXsaPph7GTqPuy4Kab12YC
+-----END PRIVATE KEY-----
diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py
index 636282e..45b4541 100755
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -1,11 +1,18 @@
#! /usr/bin/env python3
"""
-Usage:
+Script to run Python regression tests.
+Run this script with -h or --help for documentation.
+"""
+
+USAGE = """\
python -m test [options] [test_name1 [test_name2 ...]]
python path/to/Lib/test/regrtest.py [options] [test_name1 [test_name2 ...]]
+"""
+DESCRIPTION = """\
+Run Python regression tests.
If no arguments or options are provided, finds all files matching
the pattern "test_*" in the Lib/test subdirectory and runs
@@ -15,63 +22,10 @@ For more rigorous testing, it is useful to use the following
command line:
python -E -Wd -m test [options] [test_name1 ...]
+"""
-
-Options:
-
--h/--help -- print this text and exit
---timeout TIMEOUT
- -- dump the traceback and exit if a test takes more
- than TIMEOUT seconds; disabled if TIMEOUT is negative
- or equals to zero
---wait -- wait for user input, e.g., allow a debugger to be attached
-
-Verbosity
-
--v/--verbose -- run tests in verbose mode with output to stdout
--w/--verbose2 -- re-run failed tests in verbose mode
--W/--verbose3 -- display test output on failure
--d/--debug -- print traceback for failed tests
--q/--quiet -- no output unless one or more tests fail
--o/--slow -- print the slowest 10 tests
- --header -- print header with interpreter info
-
-Selecting tests
-
--r/--randomize -- randomize test execution order (see below)
- --randseed -- pass a random seed to reproduce a previous random run
--f/--fromfile -- read names of tests to run from a file (see below)
--x/--exclude -- arguments are tests to *exclude*
--s/--single -- single step through a set of tests (see below)
--m/--match PAT -- match test cases and methods with glob pattern PAT
--G/--failfast -- fail as soon as a test fails (only with -v or -W)
--u/--use RES1,RES2,...
- -- specify which special resource intensive tests to run
--M/--memlimit LIMIT
- -- run very large memory-consuming tests
- --testdir DIR
- -- execute test files in the specified directory (instead
- of the Python stdlib test suite)
-
-Special runs
-
--l/--findleaks -- if GC is available detect tests that leak memory
--L/--runleaks -- run the leaks(1) command just before exit
--R/--huntrleaks RUNCOUNTS
- -- search for reference leaks (needs debug build, v. slow)
--j/--multiprocess PROCESSES
- -- run PROCESSES processes at once
--T/--coverage -- turn on code coverage tracing using the trace module
--D/--coverdir DIRECTORY
- -- Directory where coverage files are put
--N/--nocoverdir -- Put coverage files alongside modules
--t/--threshold THRESHOLD
- -- call gc.set_threshold(THRESHOLD)
--n/--nowindows -- suppress error message boxes on Windows
--F/--forever -- run the specified tests in a loop, until an error happens
-
-
-Additional Option Details:
+EPILOG = """\
+Additional option details:
-r randomizes test execution order. You can use --randseed=int to provide a
int seed value for the randomizer; this is useful for reproducing troublesome
@@ -168,9 +122,9 @@ option '-uall,-gui'.
# We import importlib *ASAP* in order to test #15386
import importlib
+import argparse
import builtins
import faulthandler
-import getopt
import io
import json
import logging
@@ -246,12 +200,147 @@ from test import support
RESOURCE_NAMES = ('audio', 'curses', 'largefile', 'network',
'decimal', 'cpu', 'subprocess', 'urlfetch', 'gui')
-TEMPDIR = os.path.abspath(tempfile.gettempdir())
-
-def usage(msg):
- print(msg, file=sys.stderr)
- print("Use --help for usage", file=sys.stderr)
- sys.exit(2)
+# When tests are run from the Python build directory, it is best practice
+# to keep the test files in a subfolder. This eases the cleanup of leftover
+# files using the "make distclean" command.
+if sysconfig.is_python_build():
+ TEMPDIR = os.path.join(sysconfig.get_config_var('srcdir'), 'build')
+else:
+ TEMPDIR = tempfile.gettempdir()
+TEMPDIR = os.path.abspath(TEMPDIR)
+
+class _ArgParser(argparse.ArgumentParser):
+
+ def error(self, message):
+ super().error(message + "\nPass -h or --help for complete help.")
+
+def _create_parser():
+ # Set prog to prevent the uninformative "__main__.py" from displaying in
+ # error messages when using "python -m test ...".
+ parser = _ArgParser(prog='regrtest.py',
+ usage=USAGE,
+ description=DESCRIPTION,
+ epilog=EPILOG,
+ add_help=False,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+
+ # Arguments with this clause added to its help are described further in
+ # the epilog's "Additional option details" section.
+ more_details = ' See the section at bottom for more details.'
+
+ group = parser.add_argument_group('General options')
+ # We add help explicitly to control what argument group it renders under.
+ group.add_argument('-h', '--help', action='help',
+ help='show this help message and exit')
+ group.add_argument('--timeout', metavar='TIMEOUT',
+ help='dump the traceback and exit if a test takes '
+ 'more than TIMEOUT seconds; disabled if TIMEOUT '
+ 'is negative or equals to zero')
+ group.add_argument('--wait', action='store_true', help='wait for user '
+ 'input, e.g., allow a debugger to be attached')
+ group.add_argument('--slaveargs', metavar='ARGS')
+ group.add_argument('-S', '--start', metavar='START', help='the name of '
+ 'the test at which to start.' + more_details)
+
+ group = parser.add_argument_group('Verbosity')
+ group.add_argument('-v', '--verbose', action='store_true',
+ help='run tests in verbose mode with output to stdout')
+ group.add_argument('-w', '--verbose2', action='store_true',
+ help='re-run failed tests in verbose mode')
+ group.add_argument('-W', '--verbose3', action='store_true',
+ help='display test output on failure')
+ group.add_argument('-d', '--debug', action='store_true',
+ help='print traceback for failed tests')
+ group.add_argument('-q', '--quiet', action='store_true',
+ help='no output unless one or more tests fail')
+ group.add_argument('-o', '--slow', action='store_true',
+ help='print the slowest 10 tests')
+ group.add_argument('--header', action='store_true',
+ help='print header with interpreter info')
+
+ group = parser.add_argument_group('Selecting tests')
+ group.add_argument('-r', '--randomize', action='store_true',
+ help='randomize test execution order.' + more_details)
+ group.add_argument('--randseed', metavar='SEED', help='pass a random seed '
+ 'to reproduce a previous random run')
+ group.add_argument('-f', '--fromfile', metavar='FILE', help='read names '
+ 'of tests to run from a file.' + more_details)
+ group.add_argument('-x', '--exclude', action='store_true',
+ help='arguments are tests to *exclude*')
+ group.add_argument('-s', '--single', action='store_true', help='single '
+ 'step through a set of tests.' + more_details)
+ group.add_argument('-m', '--match', metavar='PAT', help='match test cases '
+ 'and methods with glob pattern PAT')
+ group.add_argument('-G', '--failfast', action='store_true', help='fail as '
+ 'soon as a test fails (only with -v or -W)')
+ group.add_argument('-u', '--use', metavar='RES1,RES2,...', help='specify '
+ 'which special resource intensive tests to run.' +
+ more_details)
+ group.add_argument('-M', '--memlimit', metavar='LIMIT', help='run very '
+ 'large memory-consuming tests.' + more_details)
+ group.add_argument('--testdir', metavar='DIR',
+ help='execute test files in the specified directory '
+ '(instead of the Python stdlib test suite)')
+
+ group = parser.add_argument_group('Special runs')
+ group.add_argument('-l', '--findleaks', action='store_true', help='if GC '
+ 'is available detect tests that leak memory')
+ group.add_argument('-L', '--runleaks', action='store_true',
+ help='run the leaks(1) command just before exit.' +
+ more_details)
+ group.add_argument('-R', '--huntrleaks', metavar='RUNCOUNTS',
+ help='search for reference leaks (needs debug build, '
+ 'very slow).' + more_details)
+ group.add_argument('-j', '--multiprocess', metavar='PROCESSES',
+ help='run PROCESSES processes at once')
+ group.add_argument('-T', '--coverage', action='store_true', help='turn on '
+ 'code coverage tracing using the trace module')
+ group.add_argument('-D', '--coverdir', metavar='DIR',
+ help='directory where coverage files are put')
+ group.add_argument('-N', '--nocoverdir', action='store_true',
+ help='put coverage files alongside modules')
+ group.add_argument('-t', '--threshold', metavar='THRESHOLD',
+ help='call gc.set_threshold(THRESHOLD)')
+ group.add_argument('-n', '--nowindows', action='store_true',
+ help='suppress error message boxes on Windows')
+ group.add_argument('-F', '--forever', action='store_true',
+ help='run the specified tests in a loop, until an '
+ 'error happens')
+
+ parser.add_argument('args', nargs=argparse.REMAINDER,
+ help=argparse.SUPPRESS)
+
+ return parser
+
+# TODO: remove this function as described in issue #16799, for example.
+# We use this function since regrtest.main() was originally written to use
+# getopt for parsing.
+def _convert_namespace_to_getopt(ns):
+ """Convert an argparse.Namespace object to a getopt-style opts list.
+
+ The return value of this function mimics the first element of
+ getopt.getopt()'s (opts, args) return value. In addition, the (option,
+ value) pairs in the opts list are sorted by option and use the long
+ option string. The args part of (opts, args) can be mimicked by the
+ args attribute of the Namespace object we are using in regrtest.
+ """
+ opts = []
+ args_dict = vars(ns)
+ for key in sorted(args_dict.keys()):
+ if key == 'args':
+ continue
+ val = args_dict[key]
+ # Don't continue if val equals '' because this means an option
+ # accepting a value was provided the empty string. Such values should
+ # show up in the returned opts list.
+ if val is None or val is False:
+ continue
+ if val is True:
+ # Then an option with action store_true was passed. getopt
+ # includes these with value '' in the opts list.
+ val = ''
+ opts.append(('--' + key, val))
+ return opts
def main(tests=None, testdir=None, verbose=0, quiet=False,
@@ -298,17 +387,12 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
replace_stdout()
support.record_original_stdout(sys.stdout)
- try:
- opts, args = getopt.getopt(sys.argv[1:], 'hvqxsoS:rf:lu:t:TD:NLR:FdwWM:nj:Gm:',
- ['help', 'verbose', 'verbose2', 'verbose3', 'quiet',
- 'exclude', 'single', 'slow', 'randomize', 'fromfile=', 'findleaks',
- 'use=', 'threshold=', 'coverdir=', 'nocoverdir',
- 'runleaks', 'huntrleaks=', 'memlimit=', 'randseed=',
- 'multiprocess=', 'coverage', 'slaveargs=', 'forever', 'debug',
- 'start=', 'nowindows', 'header', 'testdir=', 'timeout=', 'wait',
- 'failfast', 'match='])
- except getopt.error as msg:
- usage(msg)
+
+ parser = _create_parser()
+ ns = parser.parse_args()
+ opts = _convert_namespace_to_getopt(ns)
+ args = ns.args
+ usage = parser.error
# Defaults
if random_seed is None:
@@ -319,10 +403,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
start = None
timeout = None
for o, a in opts:
- if o in ('-h', '--help'):
- print(__doc__)
- return
- elif o in ('-v', '--verbose'):
+ if o in ('-v', '--verbose'):
verbose += 1
elif o in ('-w', '--verbose2'):
verbose2 = True
@@ -506,7 +587,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
next_test = fp.read().strip()
tests = [next_test]
fp.close()
- except IOError:
+ except OSError:
pass
if fromfile:
@@ -615,7 +696,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
sys.exit(2)
from queue import Queue
from subprocess import Popen, PIPE
- debug_output_pat = re.compile(r"\[\d+ refs\]$")
+ debug_output_pat = re.compile(r"\[\d+ refs, \d+ blocks\]$")
output = Queue()
pending = MultiprocessTests(tests)
opt_args = support.args_from_interpreter_flags()
@@ -763,20 +844,6 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
print(count(len(skipped), "test"), "skipped:")
printlist(skipped)
- e = _ExpectedSkips()
- plat = sys.platform
- if e.isvalid():
- surprise = set(skipped) - e.getexpected() - set(resource_denieds)
- if surprise:
- print(count(len(surprise), "skip"), \
- "unexpected on", plat + ":")
- printlist(surprise)
- else:
- print("Those skips are all expected on", plat + ".")
- else:
- print("Ask someone to teach regrtest.py about which tests are")
- print("expected to get skipped on", plat + ".")
-
if verbose2 and bad:
print("Re-running failed tests in verbose mode")
for test in bad:
@@ -1210,8 +1277,7 @@ def runtest_inner(test, verbose, quiet,
abstest = 'test.' + test
with saved_test_environment(test, verbose, quiet) as environment:
start_time = time.time()
- the_package = __import__(abstest, globals(), locals(), [])
- the_module = getattr(the_package, test)
+ the_module = importlib.import_module(abstest)
# If the test has a test_main, that will run the appropriate
# tests. If not, use normal unittest test loading.
test_runner = getattr(the_module, "test_main", None)
@@ -1327,41 +1393,50 @@ def dash_R(the_module, test, indirect_test, huntrleaks):
for obj in abc.__subclasses__() + [abc]:
abcs[obj] = obj._abc_registry.copy()
- if indirect_test:
- def run_the_test():
- indirect_test()
- else:
- def run_the_test():
- del sys.modules[the_module.__name__]
- exec('import ' + the_module.__name__)
-
- deltas = []
nwarmup, ntracked, fname = huntrleaks
fname = os.path.join(support.SAVEDCWD, fname)
repcount = nwarmup + ntracked
+ rc_deltas = [0] * repcount
+ alloc_deltas = [0] * repcount
+
print("beginning", repcount, "repetitions", file=sys.stderr)
print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr)
sys.stderr.flush()
- dash_R_cleanup(fs, ps, pic, zdc, abcs)
for i in range(repcount):
- rc_before = sys.gettotalrefcount()
- run_the_test()
+ indirect_test()
+ alloc_after, rc_after = dash_R_cleanup(fs, ps, pic, zdc, abcs)
sys.stderr.write('.')
sys.stderr.flush()
- dash_R_cleanup(fs, ps, pic, zdc, abcs)
- rc_after = sys.gettotalrefcount()
if i >= nwarmup:
- deltas.append(rc_after - rc_before)
+ rc_deltas[i] = rc_after - rc_before
+ alloc_deltas[i] = alloc_after - alloc_before
+ alloc_before, rc_before = alloc_after, rc_after
print(file=sys.stderr)
- if any(deltas):
- msg = '%s leaked %s references, sum=%s' % (test, deltas, sum(deltas))
- print(msg, file=sys.stderr)
- sys.stderr.flush()
- with open(fname, "a") as refrep:
- print(msg, file=refrep)
- refrep.flush()
- return True
- return False
+ # These checkers return False on success, True on failure
+ def check_rc_deltas(deltas):
+ return any(deltas)
+ def check_alloc_deltas(deltas):
+ # At least 1/3rd of 0s
+ if 3 * deltas.count(0) < len(deltas):
+ return True
+ # Nothing else than 1s, 0s and -1s
+ if not set(deltas) <= {1,0,-1}:
+ return True
+ return False
+ failed = False
+ for deltas, item_name, checker in [
+ (rc_deltas, 'references', check_rc_deltas),
+ (alloc_deltas, 'memory blocks', check_alloc_deltas)]:
+ if checker(deltas):
+ msg = '%s leaked %s %s, sum=%s' % (
+ test, deltas[nwarmup:], item_name, sum(deltas))
+ print(msg, file=sys.stderr)
+ sys.stderr.flush()
+ with open(fname, "a") as refrep:
+ print(msg, file=refrep)
+ refrep.flush()
+ failed = True
+ return failed
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
import gc, copyreg
@@ -1427,8 +1502,11 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs):
else:
ctypes._reset_cache()
- # Collect cyclic trash.
+ # Collect cyclic trash and read memory statistics immediately after.
+ func1 = sys.getallocatedblocks
+ func2 = sys.gettotalrefcount
gc.collect()
+ return func1(), func2()
def warm_caches():
# char cache
@@ -1471,307 +1549,10 @@ def printlist(x, width=70, indent=4):
print(fill(' '.join(str(elt) for elt in sorted(x)), width,
initial_indent=blanks, subsequent_indent=blanks))
-# Map sys.platform to a string containing the basenames of tests
-# expected to be skipped on that platform.
-#
-# Special cases:
-# test_pep277
-# The _ExpectedSkips constructor adds this to the set of expected
-# skips if not os.path.supports_unicode_filenames.
-# test_timeout
-# Controlled by test_timeout.skip_expected. Requires the network
-# resource and a socket module.
-#
-# Tests that are expected to be skipped everywhere except on one platform
-# are also handled separately.
-
-_expectations = (
- ('win32',
- """
- test__locale
- test_crypt
- test_curses
- test_dbm
- test_devpoll
- test_fcntl
- test_fork1
- test_epoll
- test_dbm_gnu
- test_dbm_ndbm
- test_grp
- test_ioctl
- test_largefile
- test_kqueue
- test_openpty
- test_ossaudiodev
- test_pipes
- test_poll
- test_posix
- test_pty
- test_pwd
- test_resource
- test_signal
- test_syslog
- test_threadsignals
- test_wait3
- test_wait4
- """),
- ('linux',
- """
- test_curses
- test_devpoll
- test_largefile
- test_kqueue
- test_ossaudiodev
- """),
- ('unixware',
- """
- test_epoll
- test_largefile
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_sax
- test_sundry
- """),
- ('openunix',
- """
- test_epoll
- test_largefile
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_sax
- test_sundry
- """),
- ('sco_sv',
- """
- test_asynchat
- test_fork1
- test_epoll
- test_gettext
- test_largefile
- test_locale
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_queue
- test_sax
- test_sundry
- test_thread
- test_threaded_import
- test_threadedtempfile
- test_threading
- """),
- ('darwin',
- """
- test__locale
- test_curses
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_gdb
- test_largefile
- test_locale
- test_minidom
- test_ossaudiodev
- test_poll
- """),
- ('sunos',
- """
- test_curses
- test_dbm
- test_epoll
- test_kqueue
- test_dbm_gnu
- test_gzip
- test_openpty
- test_zipfile
- test_zlib
- """),
- ('hp-ux',
- """
- test_curses
- test_epoll
- test_dbm_gnu
- test_gzip
- test_largefile
- test_locale
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_sax
- test_zipfile
- test_zlib
- """),
- ('cygwin',
- """
- test_curses
- test_dbm
- test_devpoll
- test_epoll
- test_ioctl
- test_kqueue
- test_largefile
- test_locale
- test_ossaudiodev
- test_socketserver
- """),
- ('os2emx',
- """
- test_audioop
- test_curses
- test_epoll
- test_kqueue
- test_largefile
- test_mmap
- test_openpty
- test_ossaudiodev
- test_pty
- test_resource
- test_signal
- """),
- ('freebsd',
- """
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_locale
- test_ossaudiodev
- test_pep277
- test_pty
- test_socketserver
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_timeout
- test_urllibnet
- test_multiprocessing
- """),
- ('aix',
- """
- test_bz2
- test_epoll
- test_dbm_gnu
- test_gzip
- test_kqueue
- test_ossaudiodev
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_zipimport
- test_zlib
- """),
- ('openbsd',
- """
- test_ctypes
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_locale
- test_normalization
- test_ossaudiodev
- test_pep277
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_multiprocessing
- """),
- ('netbsd',
- """
- test_ctypes
- test_curses
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_locale
- test_ossaudiodev
- test_pep277
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_multiprocessing
- """),
-)
-
-class _ExpectedSkips:
- def __init__(self):
- import os.path
- from test import test_timeout
-
- self.valid = False
- expected = None
- for item in _expectations:
- if sys.platform.startswith(item[0]):
- expected = item[1]
- break
- if expected is not None:
- self.expected = set(expected.split())
-
- # These are broken tests, for now skipped on every platform.
- # XXX Fix these!
- self.expected.add('test_nis')
-
- # expected to be skipped on every platform, even Linux
- if not os.path.supports_unicode_filenames:
- self.expected.add('test_pep277')
-
- # doctest, profile and cProfile tests fail when the codec for the
- # fs encoding isn't built in because PyUnicode_Decode() adds two
- # calls into Python.
- encs = ("utf-8", "latin-1", "ascii", "mbcs", "utf-16", "utf-32")
- if sys.getfilesystemencoding().lower() not in encs:
- self.expected.add('test_profile')
- self.expected.add('test_cProfile')
- self.expected.add('test_doctest')
-
- if test_timeout.skip_expected:
- self.expected.add('test_timeout')
-
- if sys.platform != "win32":
- # test_sqlite is only reliable on Windows where the library
- # is distributed with Python
- WIN_ONLY = {"test_unicode_file", "test_winreg",
- "test_winsound", "test_startfile",
- "test_sqlite", "test_msilib"}
- self.expected |= WIN_ONLY
-
- if sys.platform != 'sunos5':
- self.expected.add('test_nis')
-
- if support.python_is_optimized():
- self.expected.add("test_gdb")
-
- self.valid = True
-
- def isvalid(self):
- "Return true iff _ExpectedSkips knows about the current platform."
- return self.valid
-
- def getexpected(self):
- """Return set of test names we expect to skip on current platform.
-
- self.isvalid() must be true.
- """
-
- assert self.isvalid()
- return self.expected
-
-def _make_temp_dir_for_build(TEMPDIR):
- # When tests are run from the Python build directory, it is best practice
- # to keep the test files in a subfolder. It eases the cleanup of leftover
- # files using command "make distclean".
+
+def main_in_temp_cwd():
+ """Run main() in a temporary working directory."""
if sysconfig.is_python_build():
- TEMPDIR = os.path.join(sysconfig.get_config_var('srcdir'), 'build')
- TEMPDIR = os.path.abspath(TEMPDIR)
try:
os.mkdir(TEMPDIR)
except FileExistsError:
@@ -1780,10 +1561,16 @@ def _make_temp_dir_for_build(TEMPDIR):
# Define a writable temp dir that will be used as cwd while running
# the tests. The name of the dir includes the pid to allow parallel
# testing (see the -j option).
- TESTCWD = 'test_python_{}'.format(os.getpid())
+ test_cwd = 'test_python_{}'.format(os.getpid())
+ test_cwd = os.path.join(TEMPDIR, test_cwd)
+
+ # Run the tests in a context manager that temporarily changes the CWD to a
+ # temporary and writable directory. If it's not possible to create or
+ # change the CWD, the original CWD will be used. The original CWD is
+ # available from support.SAVEDCWD.
+ with support.temp_cwd(test_cwd, quiet=True):
+ main()
- TESTCWD = os.path.join(TEMPDIR, TESTCWD)
- return TEMPDIR, TESTCWD
if __name__ == '__main__':
# Remove regrtest.py's own directory from the module search path. Despite
@@ -1807,11 +1594,4 @@ if __name__ == '__main__':
# sanity check
assert __file__ == os.path.abspath(sys.argv[0])
- TEMPDIR, TESTCWD = _make_temp_dir_for_build(TEMPDIR)
-
- # Run the tests in a context manager that temporary changes the CWD to a
- # temporary and writable directory. If it's not possible to create or
- # change the CWD, the original CWD will be used. The original CWD is
- # available from support.SAVEDCWD.
- with support.temp_cwd(TESTCWD, quiet=True):
- main()
+ main_in_temp_cwd()
diff --git a/Lib/test/sortperf.py b/Lib/test/sortperf.py
index af7c0b4..90722f7 100644
--- a/Lib/test/sortperf.py
+++ b/Lib/test/sortperf.py
@@ -22,7 +22,7 @@ def randfloats(n):
fn = os.path.join(td, "rr%06d" % n)
try:
fp = open(fn, "rb")
- except IOError:
+ except OSError:
r = random.random
result = [r() for i in range(n)]
try:
@@ -35,9 +35,9 @@ def randfloats(n):
if fp:
try:
os.unlink(fn)
- except os.error:
+ except OSError:
pass
- except IOError as msg:
+ except OSError as msg:
print("can't write", fn, ":", msg)
else:
result = marshal.load(fp)
diff --git a/Lib/test/ssl_servers.py b/Lib/test/ssl_servers.py
index 8686153..759b3f4 100644
--- a/Lib/test/ssl_servers.py
+++ b/Lib/test/ssl_servers.py
@@ -35,7 +35,7 @@ class HTTPSServer(_HTTPServer):
try:
sock, addr = self.socket.accept()
sslconn = self.context.wrap_socket(sock, server_side=True)
- except socket.error as e:
+ except OSError as e:
# socket errors are silenced by the caller, print them here
if support.verbose:
sys.stderr.write("Got an error:\n%s\n" % e)
@@ -147,9 +147,11 @@ class HTTPSServerThread(threading.Thread):
self.server.shutdown()
-def make_https_server(case, certfile=CERTFILE, host=HOST, handler_class=None):
- # we assume the certfile contains both private key and certificate
- context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+def make_https_server(case, *, context=None, certfile=CERTFILE,
+ host=HOST, handler_class=None):
+ if context is None:
+ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ # We assume the certfile contains both private key and certificate
context.load_cert_chain(certfile)
server = HTTPSServerThread(context, host, handler_class)
flag = threading.Event()
diff --git a/Lib/test/support.py b/Lib/test/support.py
index f2c3434..d089767 100644
--- a/Lib/test/support.py
+++ b/Lib/test/support.py
@@ -93,7 +93,8 @@ def _ignore_deprecated_imports(ignore=True):
"""Context manager to suppress package and module deprecation
warnings when importing them.
- If ignore is False, this context manager has no effect."""
+ If ignore is False, this context manager has no effect.
+ """
if ignore:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", ".+ (module|package)",
@@ -103,23 +104,29 @@ def _ignore_deprecated_imports(ignore=True):
yield
-def import_module(name, deprecated=False):
+def import_module(name, deprecated=False, *, required_on=()):
"""Import and return the module to be tested, raising SkipTest if
it is not available.
If deprecated is True, any module or package deprecation messages
- will be suppressed."""
+ will be suppressed. If a module is required on a platform but optional for
+ others, set required_on to an iterable of platform prefixes which will be
+ compared against sys.platform.
+ """
with _ignore_deprecated_imports(deprecated):
try:
return importlib.import_module(name)
except ImportError as msg:
+ if sys.platform.startswith(tuple(required_on)):
+ raise
raise unittest.SkipTest(str(msg))
def _save_and_remove_module(name, orig_modules):
"""Helper function to save and remove a module from sys.modules
- Raise ImportError if the module can't be imported."""
+ Raise ImportError if the module can't be imported.
+ """
# try to import the module and raise an error if it can't be imported
if name not in sys.modules:
__import__(name)
@@ -284,25 +291,20 @@ else:
def unlink(filename):
try:
_unlink(filename)
- except OSError as error:
- # The filename need not exist.
- if error.errno not in (errno.ENOENT, errno.ENOTDIR):
- raise
+ except (FileNotFoundError, NotADirectoryError):
+ pass
def rmdir(dirname):
try:
_rmdir(dirname)
- except OSError as error:
- # The directory need not exist.
- if error.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
def rmtree(path):
try:
_rmtree(path)
- except OSError as error:
- if error.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
def make_legacy_pyc(source):
"""Move a PEP 3147 pyc/pyo file to its legacy pyc/pyo location.
@@ -489,7 +491,7 @@ def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
the SO_REUSEADDR socket option having different semantics on Windows versus
Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind,
listen and then accept connections on identical host/ports. An EADDRINUSE
- socket.error will be raised at some point (depending on the platform and
+ OSError will be raised at some point (depending on the platform and
the order bind and listen were called on each socket).
However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE
@@ -563,7 +565,7 @@ def _is_ipv6_enabled():
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.bind(('::1', 0))
return True
- except (socket.error, socket.gaierror):
+ except OSError:
pass
finally:
if sock:
@@ -1090,9 +1092,9 @@ class TransientResource(object):
# Context managers that raise ResourceDenied when various issues
# with the Internet connection manifest themselves as exceptions.
# XXX deprecate these and use transient_internet() instead
-time_out = TransientResource(IOError, errno=errno.ETIMEDOUT)
-socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET)
-ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET)
+time_out = TransientResource(OSError, errno=errno.ETIMEDOUT)
+socket_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET)
+ioerror_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET)
@contextlib.contextmanager
@@ -1138,17 +1140,17 @@ def transient_internet(resource_name, *, timeout=30.0, errnos=()):
if timeout is not None:
socket.setdefaulttimeout(timeout)
yield
- except IOError as err:
+ except OSError as err:
# urllib can wrap original socket errors multiple times (!), we must
# unwrap to get at the original error.
while True:
a = err.args
- if len(a) >= 1 and isinstance(a[0], IOError):
+ if len(a) >= 1 and isinstance(a[0], OSError):
err = a[0]
# The error can also be wrapped as args[1]:
# except socket.error as msg:
- # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
- elif len(a) >= 2 and isinstance(a[1], IOError):
+ # raise OSError('socket error', msg).with_traceback(sys.exc_info()[2])
+ elif len(a) >= 2 and isinstance(a[1], OSError):
err = a[1]
else:
break
@@ -1775,7 +1777,7 @@ def strip_python_stderr(stderr):
This will typically be run on the result of the communicate() method
of a subprocess.Popen object.
"""
- stderr = re.sub(br"\[\d+ refs\]\r?\n?", b"", stderr).strip()
+ stderr = re.sub(br"\[\d+ refs, \d+ blocks\]\r?\n?", b"", stderr).strip()
return stderr
def args_from_interpreter_flags():
diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py
index 653c957..3498524 100644
--- a/Lib/test/test_abc.py
+++ b/Lib/test/test_abc.py
@@ -96,6 +96,19 @@ class TestLegacyAPI(unittest.TestCase):
class TestABC(unittest.TestCase):
+ def test_ABC_helper(self):
+ # create an ABC using the helper class and perform basic checks
+ class C(abc.ABC):
+ @classmethod
+ @abc.abstractmethod
+ def foo(cls): return cls.__name__
+ self.assertEqual(type(C), abc.ABCMeta)
+ self.assertRaises(TypeError, C)
+ class D(C):
+ @classmethod
+ def foo(cls): return super().foo()
+ self.assertEqual(D.foo(), 'D')
+
def test_abstractmethod_basics(self):
@abc.abstractmethod
def foo(self): pass
diff --git a/Lib/test/test_aifc.py b/Lib/test/test_aifc.py
index 9c0e7b9..98d43e4 100644
--- a/Lib/test/test_aifc.py
+++ b/Lib/test/test_aifc.py
@@ -43,6 +43,18 @@ class AIFCTest(unittest.TestCase):
(2, 2, 48000, 14400, b'NONE', b'not compressed'),
)
+ def test_context_manager(self):
+ with open(self.sndfilepath, 'rb') as testfile:
+ with aifc.open(testfile) as f:
+ pass
+ self.assertEqual(testfile.closed, True)
+ with open(TESTFN, 'wb') as testfile:
+ with self.assertRaises(aifc.Error):
+ with aifc.open(testfile, 'wb') as fout:
+ pass
+ self.assertEqual(testfile.closed, True)
+ fout.close() # do nothing
+
def test_read(self):
f = self.f = aifc.open(self.sndfilepath)
self.assertEqual(f.readframes(0), b'')
@@ -319,12 +331,14 @@ class AIFCLowLevelTest(unittest.TestCase):
def test_write_aiff_by_extension(self):
sampwidth = 2
- fout = self.fout = aifc.open(TESTFN + '.aiff', 'wb')
+ filename = TESTFN + '.aiff'
+ fout = self.fout = aifc.open(filename, 'wb')
+ self.addCleanup(unlink, filename)
fout.setparams((1, sampwidth, 1, 1, b'ULAW', b''))
frames = b'\x00' * fout.getnchannels() * sampwidth
fout.writeframes(frames)
fout.close()
- f = self.f = aifc.open(TESTFN + '.aiff', 'rb')
+ f = self.f = aifc.open(filename, 'rb')
self.assertEqual(f.getcomptype(), b'NONE')
f.close()
diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py
index c06c940..00cde2e 100644
--- a/Lib/test/test_argparse.py
+++ b/Lib/test/test_argparse.py
@@ -14,6 +14,7 @@ import argparse
from io import StringIO
from test import support
+from unittest import mock
class StdIOBuffer(StringIO):
pass
@@ -1421,6 +1422,19 @@ class TestFileTypeRepr(TestCase):
type = argparse.FileType('wb', 1)
self.assertEqual("FileType('wb', 1)", repr(type))
+ def test_r_latin(self):
+ type = argparse.FileType('r', encoding='latin_1')
+ self.assertEqual("FileType('r', encoding='latin_1')", repr(type))
+
+ def test_w_big5_ignore(self):
+ type = argparse.FileType('w', encoding='big5', errors='ignore')
+ self.assertEqual("FileType('w', encoding='big5', errors='ignore')",
+ repr(type))
+
+ def test_r_1_replace(self):
+ type = argparse.FileType('r', 1, errors='replace')
+ self.assertEqual("FileType('r', 1, errors='replace')", repr(type))
+
class RFile(object):
seen = {}
@@ -1557,6 +1571,24 @@ class TestFileTypeWB(TempDirMixin, ParserTestCase):
]
+class TestFileTypeOpenArgs(TestCase):
+ """Test that open (the builtin) is correctly called"""
+
+ def test_open_args(self):
+ FT = argparse.FileType
+ cases = [
+ (FT('rb'), ('rb', -1, None, None)),
+ (FT('w', 1), ('w', 1, None, None)),
+ (FT('w', errors='replace'), ('w', -1, None, 'replace')),
+ (FT('wb', encoding='big5'), ('wb', -1, 'big5', None)),
+ (FT('w', 0, 'l1', 'strict'), ('w', 0, 'l1', 'strict')),
+ ]
+ with mock.patch('builtins.open') as m:
+ for type, args in cases:
+ type('foo')
+ m.assert_called_with('foo', *args)
+
+
class TestTypeCallable(ParserTestCase):
"""Test some callables as option/argument types"""
diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py
index f21b69f..a44ee07 100755
--- a/Lib/test/test_array.py
+++ b/Lib/test/test_array.py
@@ -353,12 +353,12 @@ class BaseTest:
support.unlink(support.TESTFN)
def test_fromfile_ioerror(self):
- # Issue #5395: Check if fromfile raises a proper IOError
+ # Issue #5395: Check if fromfile raises a proper OSError
# instead of EOFError.
a = array.array(self.typecode)
f = open(support.TESTFN, 'wb')
try:
- self.assertRaises(IOError, a.fromfile, f, len(self.example))
+ self.assertRaises(OSError, a.fromfile, f, len(self.example))
finally:
f.close()
support.unlink(support.TESTFN)
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index dc24126..c45326f 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -180,20 +180,36 @@ eval_tests = [
class AST_Tests(unittest.TestCase):
- def _assertTrueorder(self, ast_node, parent_pos):
+ def _assertTrueorder(self, ast_node, parent_pos, reverse_check = False):
+ def should_reverse_check(parent, child):
+ # In some situations, the children of nodes occur before
+ # their parents, for example in a.b.c, a occurs before b
+ # but a is a child of b.
+ if isinstance(parent, ast.Call):
+ if parent.func == child:
+ return True
+ if isinstance(parent, (ast.Attribute, ast.Subscript)):
+ return True
+ return False
+
if not isinstance(ast_node, ast.AST) or ast_node._fields is None:
return
if isinstance(ast_node, (ast.expr, ast.stmt, ast.excepthandler)):
node_pos = (ast_node.lineno, ast_node.col_offset)
- self.assertTrue(node_pos >= parent_pos)
+ if reverse_check:
+ self.assertTrue(node_pos <= parent_pos)
+ else:
+ self.assertTrue(node_pos >= parent_pos)
parent_pos = (ast_node.lineno, ast_node.col_offset)
for name in ast_node._fields:
value = getattr(ast_node, name)
if isinstance(value, list):
for child in value:
- self._assertTrueorder(child, parent_pos)
+ self._assertTrueorder(child, parent_pos,
+ should_reverse_check(ast_node, child))
elif value is not None:
- self._assertTrueorder(value, parent_pos)
+ self._assertTrueorder(value, parent_pos,
+ should_reverse_check(ast_node, value))
def test_AST_objects(self):
x = ast.AST()
@@ -262,14 +278,14 @@ class AST_Tests(unittest.TestCase):
def test_arguments(self):
x = ast.arguments()
- self.assertEqual(x._fields, ('args', 'vararg', 'varargannotation',
- 'kwonlyargs', 'kwarg', 'kwargannotation',
- 'defaults', 'kw_defaults'))
+ self.assertEqual(x._fields, ('args', 'vararg',
+ 'kwonlyargs', 'kw_defaults',
+ 'kwarg', 'defaults'))
with self.assertRaises(AttributeError):
x.vararg
- x = ast.arguments(*range(1, 9))
+ x = ast.arguments(*range(1, 7))
self.assertEqual(x.vararg, 2)
def test_field_attr_writable(self):
@@ -439,7 +455,7 @@ class ASTHelpers_Test(unittest.TestCase):
"lineno=1, col_offset=0), args=[Name(id='eggs', ctx=Load(), "
"lineno=1, col_offset=5), Str(s='and cheese', lineno=1, "
"col_offset=11)], keywords=[], starargs=None, kwargs=None, "
- "lineno=1, col_offset=0), lineno=1, col_offset=0)])"
+ "lineno=1, col_offset=4), lineno=1, col_offset=0)])"
)
def test_copy_location(self):
@@ -460,7 +476,7 @@ class ASTHelpers_Test(unittest.TestCase):
"Module(body=[Expr(value=Call(func=Name(id='write', ctx=Load(), "
"lineno=1, col_offset=0), args=[Str(s='spam', lineno=1, "
"col_offset=6)], keywords=[], starargs=None, kwargs=None, "
- "lineno=1, col_offset=0), lineno=1, col_offset=0), "
+ "lineno=1, col_offset=5), lineno=1, col_offset=0), "
"Expr(value=Call(func=Name(id='spam', ctx=Load(), lineno=1, "
"col_offset=0), args=[Str(s='eggs', lineno=1, col_offset=0)], "
"keywords=[], starargs=None, kwargs=None, lineno=1, "
@@ -560,8 +576,8 @@ class ASTValidatorTests(unittest.TestCase):
self.mod(m, "must have Load context", "eval")
def _check_arguments(self, fac, check):
- def arguments(args=None, vararg=None, varargannotation=None,
- kwonlyargs=None, kwarg=None, kwargannotation=None,
+ def arguments(args=None, vararg=None,
+ kwonlyargs=None, kwarg=None,
defaults=None, kw_defaults=None):
if args is None:
args = []
@@ -571,20 +587,12 @@ class ASTValidatorTests(unittest.TestCase):
defaults = []
if kw_defaults is None:
kw_defaults = []
- args = ast.arguments(args, vararg, varargannotation, kwonlyargs,
- kwarg, kwargannotation, defaults, kw_defaults)
+ args = ast.arguments(args, vararg, kwonlyargs, kw_defaults,
+ kwarg, defaults)
return fac(args)
args = [ast.arg("x", ast.Name("x", ast.Store()))]
check(arguments(args=args), "must have Load context")
- check(arguments(varargannotation=ast.Num(3)),
- "varargannotation but no vararg")
- check(arguments(varargannotation=ast.Name("x", ast.Store()), vararg="x"),
- "must have Load context")
check(arguments(kwonlyargs=args), "must have Load context")
- check(arguments(kwargannotation=ast.Num(42)),
- "kwargannotation but no kwarg")
- check(arguments(kwargannotation=ast.Name("x", ast.Store()),
- kwarg="x"), "must have Load context")
check(arguments(defaults=[ast.Num(3)]),
"more positional defaults than args")
check(arguments(kw_defaults=[ast.Num(4)]),
@@ -599,7 +607,7 @@ class ASTValidatorTests(unittest.TestCase):
"must have Load context")
def test_funcdef(self):
- a = ast.arguments([], None, None, [], None, None, [], [])
+ a = ast.arguments([], None, [], [], None, [])
f = ast.FunctionDef("x", a, [], [], None)
self.stmt(f, "empty body on FunctionDef")
f = ast.FunctionDef("x", a, [ast.Pass()], [ast.Name("x", ast.Store())],
@@ -770,7 +778,7 @@ class ASTValidatorTests(unittest.TestCase):
self.expr(u, "must have Load context")
def test_lambda(self):
- a = ast.arguments([], None, None, [], None, None, [], [])
+ a = ast.arguments([], None, [], [], None, [])
self.expr(ast.Lambda(a, ast.Name("x", ast.Store())),
"must have Load context")
def fac(args):
@@ -928,6 +936,9 @@ class ASTValidatorTests(unittest.TestCase):
def test_tuple(self):
self._sequence(ast.Tuple)
+ def test_nameconstant(self):
+ self.expr(ast.NameConstant(4), "singleton must be True, False, or None")
+
def test_stdlib_validates(self):
stdlib = os.path.dirname(ast.__file__)
tests = [fn for fn in os.listdir(stdlib) if fn.endswith(".py")]
@@ -936,7 +947,7 @@ class ASTValidatorTests(unittest.TestCase):
fn = os.path.join(stdlib, module)
with open(fn, "r", encoding="utf-8") as fp:
source = fp.read()
- mod = ast.parse(source)
+ mod = ast.parse(source, fn)
compile(mod, fn, "exec")
@@ -959,16 +970,16 @@ def main():
#### EVERYTHING BELOW IS GENERATED #####
exec_results = [
-('Module', [('Expr', (1, 0), ('Name', (1, 0), 'None', ('Load',)))]),
-('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], None, None, [], []), [('Pass', (1, 9))], [], None)]),
-('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None)], None, None, [], None, None, [], []), [('Pass', (1, 10))], [], None)]),
-('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None)], None, None, [], None, None, [('Num', (1, 8), 0)], []), [('Pass', (1, 12))], [], None)]),
-('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], 'args', None, [], None, None, [], []), [('Pass', (1, 14))], [], None)]),
-('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], 'kwargs', None, [], []), [('Pass', (1, 17))], [], None)]),
-('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None), ('arg', 'b', None), ('arg', 'c', None), ('arg', 'd', None), ('arg', 'e', None)], 'args', None, [], 'kwargs', None, [('Num', (1, 11), 1), ('Name', (1, 16), 'None', ('Load',)), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])], []), [('Pass', (1, 52))], [], None)]),
+('Module', [('Expr', (1, 0), ('NameConstant', (1, 0), None))]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, [], [], None, []), [('Pass', (1, 9))], [], None)]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', (1, 6), 'a', None)], None, [], [], None, []), [('Pass', (1, 10))], [], None)]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', (1, 6), 'a', None)], None, [], [], None, [('Num', (1, 8), 0)]), [('Pass', (1, 12))], [], None)]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], ('arg', (1, 7), 'args', None), [], [], None, []), [('Pass', (1, 14))], [], None)]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, [], [], ('arg', (1, 8), 'kwargs', None), []), [('Pass', (1, 17))], [], None)]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', (1, 6), 'a', None), ('arg', (1, 9), 'b', None), ('arg', (1, 14), 'c', None), ('arg', (1, 22), 'd', None), ('arg', (1, 28), 'e', None)], ('arg', (1, 35), 'args', None), [], [], ('arg', (1, 43), 'kwargs', None), [('Num', (1, 11), 1), ('NameConstant', (1, 16), None), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])]), [('Pass', (1, 52))], [], None)]),
('Module', [('ClassDef', (1, 0), 'C', [], [], None, None, [('Pass', (1, 8))], [])]),
('Module', [('ClassDef', (1, 0), 'C', [('Name', (1, 8), 'object', ('Load',))], [], None, None, [('Pass', (1, 17))], [])]),
-('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], None, None, [], []), [('Return', (1, 8), ('Num', (1, 15), 1))], [], None)]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, [], [], None, []), [('Return', (1, 8), ('Num', (1, 15), 1))], [], None)]),
('Module', [('Delete', (1, 0), [('Name', (1, 4), 'v', ('Del',))])]),
('Module', [('Assign', (1, 0), [('Name', (1, 0), 'v', ('Store',))], ('Num', (1, 4), 1))]),
('Module', [('AugAssign', (1, 0), ('Name', (1, 0), 'v', ('Store',)), ('Add',), ('Num', (1, 5), 1))]),
@@ -977,7 +988,7 @@ exec_results = [
('Module', [('If', (1, 0), ('Name', (1, 3), 'v', ('Load',)), [('Pass', (1, 5))], [])]),
('Module', [('With', (1, 0), [('withitem', ('Name', (1, 5), 'x', ('Load',)), ('Name', (1, 10), 'y', ('Store',)))], [('Pass', (1, 13))])]),
('Module', [('With', (1, 0), [('withitem', ('Name', (1, 5), 'x', ('Load',)), ('Name', (1, 10), 'y', ('Store',))), ('withitem', ('Name', (1, 13), 'z', ('Load',)), ('Name', (1, 18), 'q', ('Store',)))], [('Pass', (1, 21))])]),
-('Module', [('Raise', (1, 0), ('Call', (1, 6), ('Name', (1, 6), 'Exception', ('Load',)), [('Str', (1, 16), 'string')], [], None, None), None)]),
+('Module', [('Raise', (1, 0), ('Call', (1, 15), ('Name', (1, 6), 'Exception', ('Load',)), [('Str', (1, 16), 'string')], [], None, None), None)]),
('Module', [('Try', (1, 0), [('Pass', (2, 2))], [('ExceptHandler', (3, 0), ('Name', (3, 7), 'Exception', ('Load',)), None, [('Pass', (4, 2))])], [], [])]),
('Module', [('Try', (1, 0), [('Pass', (2, 2))], [], [], [('Pass', (4, 2))])]),
('Module', [('Assert', (1, 0), ('Name', (1, 7), 'v', ('Load',)), None)]),
@@ -1002,29 +1013,29 @@ single_results = [
('Interactive', [('Expr', (1, 0), ('BinOp', (1, 0), ('Num', (1, 0), 1), ('Add',), ('Num', (1, 2), 2)))]),
]
eval_results = [
-('Expression', ('Name', (1, 0), 'None', ('Load',))),
+('Expression', ('NameConstant', (1, 0), None)),
('Expression', ('BoolOp', (1, 0), ('And',), [('Name', (1, 0), 'a', ('Load',)), ('Name', (1, 6), 'b', ('Load',))])),
('Expression', ('BinOp', (1, 0), ('Name', (1, 0), 'a', ('Load',)), ('Add',), ('Name', (1, 4), 'b', ('Load',)))),
('Expression', ('UnaryOp', (1, 0), ('Not',), ('Name', (1, 4), 'v', ('Load',)))),
-('Expression', ('Lambda', (1, 0), ('arguments', [], None, None, [], None, None, [], []), ('Name', (1, 7), 'None', ('Load',)))),
+('Expression', ('Lambda', (1, 0), ('arguments', [], None, [], [], None, []), ('NameConstant', (1, 7), None))),
('Expression', ('Dict', (1, 0), [('Num', (1, 2), 1)], [('Num', (1, 4), 2)])),
('Expression', ('Dict', (1, 0), [], [])),
-('Expression', ('Set', (1, 0), [('Name', (1, 1), 'None', ('Load',))])),
+('Expression', ('Set', (1, 0), [('NameConstant', (1, 1), None)])),
('Expression', ('Dict', (1, 0), [('Num', (2, 6), 1)], [('Num', (4, 10), 2)])),
('Expression', ('ListComp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), [('comprehension', ('Name', (1, 7), 'b', ('Store',)), ('Name', (1, 12), 'c', ('Load',)), [('Name', (1, 17), 'd', ('Load',))])])),
('Expression', ('GeneratorExp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), [('comprehension', ('Name', (1, 7), 'b', ('Store',)), ('Name', (1, 12), 'c', ('Load',)), [('Name', (1, 17), 'd', ('Load',))])])),
('Expression', ('Compare', (1, 0), ('Num', (1, 0), 1), [('Lt',), ('Lt',)], [('Num', (1, 4), 2), ('Num', (1, 8), 3)])),
-('Expression', ('Call', (1, 0), ('Name', (1, 0), 'f', ('Load',)), [('Num', (1, 2), 1), ('Num', (1, 4), 2)], [('keyword', 'c', ('Num', (1, 8), 3))], ('Name', (1, 11), 'd', ('Load',)), ('Name', (1, 15), 'e', ('Load',)))),
+('Expression', ('Call', (1, 1), ('Name', (1, 0), 'f', ('Load',)), [('Num', (1, 2), 1), ('Num', (1, 4), 2)], [('keyword', 'c', ('Num', (1, 8), 3))], ('Name', (1, 11), 'd', ('Load',)), ('Name', (1, 15), 'e', ('Load',)))),
('Expression', ('Num', (1, 0), 10)),
('Expression', ('Str', (1, 0), 'string')),
-('Expression', ('Attribute', (1, 0), ('Name', (1, 0), 'a', ('Load',)), 'b', ('Load',))),
-('Expression', ('Subscript', (1, 0), ('Name', (1, 0), 'a', ('Load',)), ('Slice', ('Name', (1, 2), 'b', ('Load',)), ('Name', (1, 4), 'c', ('Load',)), None), ('Load',))),
+('Expression', ('Attribute', (1, 2), ('Name', (1, 0), 'a', ('Load',)), 'b', ('Load',))),
+('Expression', ('Subscript', (1, 2), ('Name', (1, 0), 'a', ('Load',)), ('Slice', ('Name', (1, 2), 'b', ('Load',)), ('Name', (1, 4), 'c', ('Load',)), None), ('Load',))),
('Expression', ('Name', (1, 0), 'v', ('Load',))),
('Expression', ('List', (1, 0), [('Num', (1, 1), 1), ('Num', (1, 3), 2), ('Num', (1, 5), 3)], ('Load',))),
('Expression', ('List', (1, 0), [], ('Load',))),
('Expression', ('Tuple', (1, 0), [('Num', (1, 0), 1), ('Num', (1, 2), 2), ('Num', (1, 4), 3)], ('Load',))),
('Expression', ('Tuple', (1, 1), [('Num', (1, 1), 1), ('Num', (1, 3), 2), ('Num', (1, 5), 3)], ('Load',))),
('Expression', ('Tuple', (1, 0), [], ('Load',))),
-('Expression', ('Call', (1, 0), ('Attribute', (1, 0), ('Attribute', (1, 0), ('Attribute', (1, 0), ('Name', (1, 0), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',)), 'd', ('Load',)), [('Subscript', (1, 8), ('Attribute', (1, 8), ('Name', (1, 8), 'a', ('Load',)), 'b', ('Load',)), ('Slice', ('Num', (1, 12), 1), ('Num', (1, 14), 2), None), ('Load',))], [], None, None)),
+('Expression', ('Call', (1, 7), ('Attribute', (1, 6), ('Attribute', (1, 4), ('Attribute', (1, 2), ('Name', (1, 0), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',)), 'd', ('Load',)), [('Subscript', (1, 12), ('Attribute', (1, 10), ('Name', (1, 8), 'a', ('Load',)), 'b', ('Load',)), ('Slice', ('Num', (1, 12), 1), ('Num', (1, 14), 2), None), ('Load',))], [], None, None)),
]
main()
diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py
index 878b26c..57eb4fa 100644
--- a/Lib/test/test_asyncore.py
+++ b/Lib/test/test_asyncore.py
@@ -756,7 +756,7 @@ class BaseTestAPI:
s2 = asyncore.dispatcher()
s2.create_socket(self.family)
# EADDRINUSE indicates the socket was correctly bound
- self.assertRaises(socket.error, s2.bind, (self.addr[0], port))
+ self.assertRaises(OSError, s2.bind, (self.addr[0], port))
def test_set_reuse_addr(self):
if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX:
@@ -764,7 +764,7 @@ class BaseTestAPI:
sock = socket.socket(self.family)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- except socket.error:
+ except OSError:
unittest.skip("SO_REUSEADDR not supported on this platform")
else:
# if SO_REUSEADDR succeeded for sock we expect asyncore
@@ -797,7 +797,7 @@ class BaseTestAPI:
struct.pack('ii', 1, 0))
try:
s.connect(server.address)
- except socket.error:
+ except OSError:
pass
finally:
s.close()
diff --git a/Lib/test/test_bisect.py b/Lib/test/test_bisect.py
index 7b9bd19..e5cb3d0 100644
--- a/Lib/test/test_bisect.py
+++ b/Lib/test/test_bisect.py
@@ -7,7 +7,7 @@ py_bisect = support.import_fresh_module('bisect', blocked=['_bisect'])
c_bisect = support.import_fresh_module('bisect', fresh=['_bisect'])
class Range(object):
- """A trivial range()-like object without any integer width limitations."""
+ """A trivial range()-like object that has an insert() method."""
def __init__(self, start, stop):
self.start = start
self.stop = stop
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index c342a43..f46f8d5 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -464,6 +464,11 @@ class BuiltinTest(unittest.TestCase):
self.assertRaises(TypeError, eval, ())
self.assertRaises(SyntaxError, eval, bom[:2] + b'a')
+ class X:
+ def __getitem__(self, key):
+ raise ValueError
+ self.assertRaises(ValueError, eval, "foo", {}, X())
+
def test_general_eval(self):
# Tests that general mappings can be used for the locals argument
@@ -1476,17 +1481,11 @@ class BuiltinTest(unittest.TestCase):
# --------------------------------------------------------------------
# Issue #7994: object.__format__ with a non-empty format string is
# deprecated
- def test_deprecated_format_string(obj, fmt_str, should_raise_warning):
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always", DeprecationWarning)
- format(obj, fmt_str)
- if should_raise_warning:
- self.assertEqual(len(w), 1)
- self.assertIsInstance(w[0].message, DeprecationWarning)
- self.assertIn('object.__format__ with a non-empty format '
- 'string', str(w[0].message))
+ def test_deprecated_format_string(obj, fmt_str, should_raise):
+ if should_raise:
+ self.assertRaises(TypeError, format, obj, fmt_str)
else:
- self.assertEqual(len(w), 0)
+ format(obj, fmt_str)
fmt_strs = ['', 's']
diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py
index 3520e83..f12f911 100644
--- a/Lib/test/test_bytes.py
+++ b/Lib/test/test_bytes.py
@@ -288,8 +288,22 @@ class BaseBytesTest:
self.assertEqual(self.type2test(b"").join(lst), b"abc")
self.assertEqual(self.type2test(b"").join(tuple(lst)), b"abc")
self.assertEqual(self.type2test(b"").join(iter(lst)), b"abc")
- self.assertEqual(self.type2test(b".").join([b"ab", b"cd"]), b"ab.cd")
- # XXX more...
+ dot_join = self.type2test(b".:").join
+ self.assertEqual(dot_join([b"ab", b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([memoryview(b"ab"), b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([b"ab", memoryview(b"cd")]), b"ab.:cd")
+ self.assertEqual(dot_join([bytearray(b"ab"), b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([b"ab", bytearray(b"cd")]), b"ab.:cd")
+ # Stress it with many items
+ seq = [b"abc"] * 1000
+ expected = b"abc" + b".:abc" * 999
+ self.assertEqual(dot_join(seq), expected)
+ # Error handling and cleanup when some item in the middle of the
+ # sequence has the wrong type.
+ with self.assertRaises(TypeError):
+ dot_join([bytearray(b"ab"), "cd", b"ef"])
+ with self.assertRaises(TypeError):
+ dot_join([memoryview(b"ab"), "cd", b"ef"])
def test_count(self):
b = self.type2test(b'mississippi')
@@ -759,7 +773,7 @@ class ByteArrayTest(BaseBytesTest, unittest.TestCase):
finally:
try:
os.remove(tfn)
- except os.error:
+ except OSError:
pass
def test_reverse(self):
@@ -1274,6 +1288,11 @@ class BytearrayPEP3137Test(unittest.TestCase,
self.assertEqual(val, newval)
self.assertTrue(val is not newval,
expr+' returned val on a mutable object')
+ sep = self.marshal(b'')
+ newval = sep.join([val])
+ self.assertEqual(val, newval)
+ self.assertIsNot(val, newval)
+
class FixedStringTest(test.string_tests.BaseTest):
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index df7e18c..7090cd6 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
from test import support
-from test.support import TESTFN, bigmemtest, _4G
+from test.support import bigmemtest, _4G
import unittest
from io import BytesIO
@@ -18,10 +18,10 @@ except ImportError:
bz2 = support.import_module('bz2')
from bz2 import BZ2File, BZ2Compressor, BZ2Decompressor
-has_cmdline_bunzip2 = sys.platform not in ("win32", "os2emx")
class BaseTest(unittest.TestCase):
"Base for other testcases."
+
TEXT_LINES = [
b'root:x:0:0:root:/root:/bin/bash\n',
b'bin:x:1:1:bin:/bin:\n',
@@ -50,13 +50,17 @@ class BaseTest(unittest.TestCase):
EMPTY_DATA = b'BZh9\x17rE8P\x90\x00\x00\x00\x00'
def setUp(self):
- self.filename = TESTFN
+ self.filename = support.TESTFN
def tearDown(self):
if os.path.isfile(self.filename):
os.unlink(self.filename)
- if has_cmdline_bunzip2:
+ if sys.platform == "win32":
+ # bunzip2 isn't available to run on Windows.
+ def decompress(self, data):
+ return bz2.decompress(data)
+ else:
def decompress(self, data):
pop = subprocess.Popen("bunzip2", shell=True,
stdin=subprocess.PIPE,
@@ -70,31 +74,21 @@ class BaseTest(unittest.TestCase):
ret = bz2.decompress(data)
return ret
- else:
- # bunzip2 isn't available to run on Windows.
- def decompress(self, data):
- return bz2.decompress(data)
class BZ2FileTest(BaseTest):
- "Test BZ2File type miscellaneous methods."
+ "Test the BZ2File class."
def createTempFile(self, streams=1):
with open(self.filename, "wb") as f:
f.write(self.DATA * streams)
def testBadArgs(self):
- with self.assertRaises(TypeError):
- BZ2File(123.456)
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "z")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "rx")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "rbt")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", compresslevel=0)
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", compresslevel=10)
+ self.assertRaises(TypeError, BZ2File, 123.456)
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "z")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "rx")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "rbt")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=0)
+ self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=10)
def testRead(self):
self.createTempFile()
@@ -215,9 +209,8 @@ class BZ2FileTest(BaseTest):
self.createTempFile()
bz2f = BZ2File(self.filename)
bz2f.close()
- self.assertRaises(ValueError, bz2f.__next__)
- # This call will deadlock if the above .__next__ call failed to
- # release the lock.
+ self.assertRaises(ValueError, next, bz2f)
+ # This call will deadlock if the above call failed to release the lock.
self.assertRaises(ValueError, bz2f.readlines)
def testWrite(self):
@@ -261,8 +254,8 @@ class BZ2FileTest(BaseTest):
bz2f.write(b"abc")
with BZ2File(self.filename, "r") as bz2f:
- self.assertRaises(IOError, bz2f.write, b"a")
- self.assertRaises(IOError, bz2f.writelines, [b"a"])
+ self.assertRaises(OSError, bz2f.write, b"a")
+ self.assertRaises(OSError, bz2f.writelines, [b"a"])
def testAppend(self):
with BZ2File(self.filename, "w") as bz2f:
@@ -380,7 +373,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.seekable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertFalse(bz2f.seekable())
finally:
@@ -406,7 +399,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.readable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertFalse(bz2f.readable())
finally:
@@ -423,7 +416,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.writable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertTrue(bz2f.writable())
finally:
@@ -437,7 +430,7 @@ class BZ2FileTest(BaseTest):
del o
def testOpenNonexistent(self):
- self.assertRaises(IOError, BZ2File, "/non/existent")
+ self.assertRaises(OSError, BZ2File, "/non/existent")
def testReadlinesNoNewline(self):
# Issue #1191043: readlines() fails on a file containing no newline.
@@ -477,7 +470,7 @@ class BZ2FileTest(BaseTest):
# Issue #7205: Using a BZ2File from several threads shouldn't deadlock.
data = b"1" * 2**20
nthreads = 10
- with bz2.BZ2File(self.filename, 'wb') as f:
+ with BZ2File(self.filename, 'wb') as f:
def comp():
for i in range(5):
f.write(data)
@@ -488,28 +481,27 @@ class BZ2FileTest(BaseTest):
t.join()
def testWithoutThreading(self):
- bz2 = support.import_fresh_module("bz2", blocked=("threading",))
- with bz2.BZ2File(self.filename, "wb") as f:
+ module = support.import_fresh_module("bz2", blocked=("threading",))
+ with module.BZ2File(self.filename, "wb") as f:
f.write(b"abc")
- with bz2.BZ2File(self.filename, "rb") as f:
+ with module.BZ2File(self.filename, "rb") as f:
self.assertEqual(f.read(), b"abc")
def testMixedIterationAndReads(self):
self.createTempFile()
linelen = len(self.TEXT_LINES[0])
halflen = linelen // 2
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.read(halflen)
self.assertEqual(next(bz2f), self.TEXT_LINES[0][halflen:])
self.assertEqual(bz2f.read(), self.TEXT[linelen:])
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.readline()
self.assertEqual(next(bz2f), self.TEXT_LINES[1])
self.assertEqual(bz2f.readline(), self.TEXT_LINES[2])
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.readlines()
- with self.assertRaises(StopIteration):
- next(bz2f)
+ self.assertRaises(StopIteration, next, bz2f)
self.assertEqual(bz2f.readlines(), [])
def testMultiStreamOrdering(self):
@@ -717,97 +709,102 @@ class CompressDecompressTest(BaseTest):
class OpenTest(BaseTest):
+ "Test the open function."
+
+ def open(self, *args, **kwargs):
+ return bz2.open(*args, **kwargs)
+
def test_binary_modes(self):
- with bz2.open(self.filename, "wb") as f:
+ with self.open(self.filename, "wb") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT)
- with bz2.open(self.filename, "rb") as f:
+ with self.open(self.filename, "rb") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(self.filename, "ab") as f:
+ with self.open(self.filename, "ab") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT * 2)
def test_implicit_binary_modes(self):
# Test implicit binary modes (no "b" or "t" in mode string).
- with bz2.open(self.filename, "w") as f:
+ with self.open(self.filename, "w") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT)
- with bz2.open(self.filename, "r") as f:
+ with self.open(self.filename, "r") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(self.filename, "a") as f:
+ with self.open(self.filename, "a") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT * 2)
def test_text_modes(self):
text = self.TEXT.decode("ascii")
text_native_eol = text.replace("\n", os.linesep)
- with bz2.open(self.filename, "wt") as f:
+ with self.open(self.filename, "wt") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("ascii")
+ file_data = self.decompress(f.read()).decode("ascii")
self.assertEqual(file_data, text_native_eol)
- with bz2.open(self.filename, "rt") as f:
+ with self.open(self.filename, "rt") as f:
self.assertEqual(f.read(), text)
- with bz2.open(self.filename, "at") as f:
+ with self.open(self.filename, "at") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("ascii")
+ file_data = self.decompress(f.read()).decode("ascii")
self.assertEqual(file_data, text_native_eol * 2)
def test_fileobj(self):
- with bz2.open(BytesIO(self.DATA), "r") as f:
+ with self.open(BytesIO(self.DATA), "r") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(BytesIO(self.DATA), "rb") as f:
+ with self.open(BytesIO(self.DATA), "rb") as f:
self.assertEqual(f.read(), self.TEXT)
text = self.TEXT.decode("ascii")
- with bz2.open(BytesIO(self.DATA), "rt") as f:
+ with self.open(BytesIO(self.DATA), "rt") as f:
self.assertEqual(f.read(), text)
def test_bad_params(self):
# Test invalid parameter combinations.
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "wbt")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", encoding="utf-8")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", errors="ignore")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", newline="\n")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "wbt")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", encoding="utf-8")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", errors="ignore")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", newline="\n")
def test_encoding(self):
# Test non-default encoding.
text = self.TEXT.decode("ascii")
text_native_eol = text.replace("\n", os.linesep)
- with bz2.open(self.filename, "wt", encoding="utf-16-le") as f:
+ with self.open(self.filename, "wt", encoding="utf-16-le") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("utf-16-le")
+ file_data = self.decompress(f.read()).decode("utf-16-le")
self.assertEqual(file_data, text_native_eol)
- with bz2.open(self.filename, "rt", encoding="utf-16-le") as f:
+ with self.open(self.filename, "rt", encoding="utf-16-le") as f:
self.assertEqual(f.read(), text)
def test_encoding_error_handler(self):
# Test with non-default encoding error handler.
- with bz2.open(self.filename, "wb") as f:
+ with self.open(self.filename, "wb") as f:
f.write(b"foo\xffbar")
- with bz2.open(self.filename, "rt", encoding="ascii", errors="ignore") \
+ with self.open(self.filename, "rt", encoding="ascii", errors="ignore") \
as f:
self.assertEqual(f.read(), "foobar")
def test_newline(self):
# Test with explicit newline (universal newline mode disabled).
text = self.TEXT.decode("ascii")
- with bz2.open(self.filename, "wt", newline="\n") as f:
+ with self.open(self.filename, "wt", newline="\n") as f:
f.write(text)
- with bz2.open(self.filename, "rt", newline="\r") as f:
+ with self.open(self.filename, "rt", newline="\r") as f:
self.assertEqual(f.readlines(), [text])
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
index a89d7e4..6684e51 100644
--- a/Lib/test/test_cmd_line.py
+++ b/Lib/test/test_cmd_line.py
@@ -213,6 +213,23 @@ class CmdLineTest(unittest.TestCase):
self.assertIn(path1.encode('ascii'), out)
self.assertIn(path2.encode('ascii'), out)
+ def test_empty_PYTHONPATH_issue16309(self):
+ # On Posix, it is documented that setting PATH to the
+ # empty string is equivalent to not setting PATH at all,
+ # which is an exception to the rule that in a string like
+ # "/bin::/usr/bin" the empty string in the middle gets
+ # interpreted as '.'
+ code = """if 1:
+ import sys
+ path = ":".join(sys.path)
+ path = path.encode("ascii", "backslashreplace")
+ sys.stdout.buffer.write(path)"""
+ rc1, out1, err1 = assert_python_ok('-c', code, PYTHONPATH="")
+ rc2, out2, err2 = assert_python_ok('-c', code)
+ # regarding to Posix specification, outputs should be equal
+ # for empty and unset PYTHONPATH
+ self.assertEqual(out1, out2)
+
def test_displayhook_unencodable(self):
for encoding in ('ascii', 'latin-1', 'utf-8'):
env = os.environ.copy()
@@ -290,7 +307,7 @@ class CmdLineTest(unittest.TestCase):
rc, out, err = assert_python_ok('-c', code)
self.assertEqual(b'', out)
self.assertRegex(err.decode('ascii', 'ignore'),
- 'Exception OSError: .* ignored')
+ 'Exception ignored in.*\nOSError: .*')
def test_closed_stdout(self):
# Issue #13444: if stdout has been explicitly closed, we should
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
index c68088e..2f3cf4d 100644
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -2295,8 +2295,8 @@ class CodePageTest(unittest.TestCase):
def test_invalid_code_page(self):
self.assertRaises(ValueError, codecs.code_page_encode, -1, 'a')
self.assertRaises(ValueError, codecs.code_page_decode, -1, b'a')
- self.assertRaises(WindowsError, codecs.code_page_encode, 123, 'a')
- self.assertRaises(WindowsError, codecs.code_page_decode, 123, b'a')
+ self.assertRaises(OSError, codecs.code_page_encode, 123, 'a')
+ self.assertRaises(OSError, codecs.code_page_decode, 123, b'a')
def test_code_page_name(self):
self.assertRaisesRegex(UnicodeEncodeError, 'cp932',
diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py
index 8850e8b..dfad78e 100644
--- a/Lib/test/test_collections.py
+++ b/Lib/test/test_collections.py
@@ -112,6 +112,38 @@ class TestChainMap(unittest.TestCase):
self.assertEqual(dict(d), dict(a=1, b=2, c=30))
self.assertEqual(dict(d.items()), dict(a=1, b=2, c=30))
+ def test_new_child(self):
+ 'Tests for changes for issue #16613.'
+ c = ChainMap()
+ c['a'] = 1
+ c['b'] = 2
+ m = {'b':20, 'c': 30}
+ d = c.new_child(m)
+ self.assertEqual(d.maps, [{'b':20, 'c':30}, {'a':1, 'b':2}]) # check internal state
+ self.assertIs(m, d.maps[0])
+
+ # Use a different map than a dict
+ class lowerdict(dict):
+ def __getitem__(self, key):
+ if isinstance(key, str):
+ key = key.lower()
+ return dict.__getitem__(self, key)
+ def __contains__(self, key):
+ if isinstance(key, str):
+ key = key.lower()
+ return dict.__contains__(self, key)
+
+ c = ChainMap()
+ c['a'] = 1
+ c['b'] = 2
+ m = lowerdict(b=20, c=30)
+ d = c.new_child(m)
+ self.assertIs(m, d.maps[0])
+ for key in 'abc': # check contains
+ self.assertIn(key, d)
+ for k, v in dict(a=1, B=20, C=30, z=100).items(): # check get
+ self.assertEqual(d.get(k, 100), v)
+
################################################################################
### Named Tuples
diff --git a/Lib/test/test_complex.py b/Lib/test/test_complex.py
index 6b34ddc..2a85bf4 100644
--- a/Lib/test/test_complex.py
+++ b/Lib/test/test_complex.py
@@ -221,6 +221,8 @@ class ComplexTest(unittest.TestCase):
self.assertRaises(TypeError, complex, OS(None))
self.assertRaises(TypeError, complex, NS(None))
self.assertRaises(TypeError, complex, {})
+ self.assertRaises(TypeError, complex, NS(1.5))
+ self.assertRaises(TypeError, complex, NS(1))
self.assertAlmostEqual(complex("1+10j"), 1+10j)
self.assertAlmostEqual(complex(10), 10+0j)
diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py
index 6ae450d..4ad3309 100644
--- a/Lib/test/test_concurrent_futures.py
+++ b/Lib/test/test_concurrent_futures.py
@@ -15,6 +15,7 @@ import sys
import threading
import time
import unittest
+import weakref
from concurrent import futures
from concurrent.futures._base import (
@@ -52,6 +53,11 @@ def sleep_and_print(t, msg):
sys.stdout.flush()
+class MyObject(object):
+ def my_method(self):
+ pass
+
+
class ExecutorMixin:
worker_count = 5
@@ -396,6 +402,22 @@ class ExecutorTest(unittest.TestCase):
self.executor.map(str, [2] * (self.worker_count + 1))
self.executor.shutdown()
+ @test.support.cpython_only
+ def test_no_stale_references(self):
+ # Issue #16284: check that the executors don't unnecessarily hang onto
+ # references.
+ my_object = MyObject()
+ my_object_collected = threading.Event()
+ my_object_callback = weakref.ref(
+ my_object, lambda obj: my_object_collected.set())
+ # Deliberately discarding the future.
+ self.executor.submit(my_object.my_method)
+ del my_object
+
+ collected = my_object_collected.wait(timeout=5.0)
+ self.assertTrue(collected,
+ "Stale reference not collected within timeout.")
+
class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest):
def test_map_submits_without_iteration(self):
diff --git a/Lib/test/test_contextlib.py b/Lib/test/test_contextlib.py
index e52ed91..d13659d 100644
--- a/Lib/test/test_contextlib.py
+++ b/Lib/test/test_contextlib.py
@@ -594,6 +594,28 @@ class TestExitStack(unittest.TestCase):
stack.push(cm)
self.assertIs(stack._exit_callbacks[-1], cm)
+class TestIgnored(unittest.TestCase):
+
+ def test_no_exception(self):
+
+ with ignored(ValueError):
+ self.assertEqual(pow(2, 5), 32)
+
+ def test_exact_exception(self):
+
+ with ignored(TypeError):
+ len(5)
+
+ def test_multiple_exception_args(self):
+
+ with ignored(ZeroDivisionError, TypeError):
+ len(5)
+
+ def test_exception_hierarchy(self):
+
+ with ignored(LookupError):
+ 'Hello'[50]
+
# This is needed to make the test actually run under regrtest.py!
def test_main():
diff --git a/Lib/test/test_cprofile.py b/Lib/test/test_cprofile.py
index 5676668..c3eb7fa 100644
--- a/Lib/test/test_cprofile.py
+++ b/Lib/test/test_cprofile.py
@@ -6,9 +6,11 @@ from test.support import run_unittest, TESTFN, unlink
# rip off all interesting stuff from test_profile
import cProfile
from test.test_profile import ProfileTest, regenerate_expected_output
+from test.profilee import testfunc
class CProfileTest(ProfileTest):
profilerclass = cProfile.Profile
+ profilermodule = cProfile
expected_max_output = "{built-in method max}"
def get_expected_output(self):
diff --git a/Lib/test/test_crypt.py b/Lib/test/test_crypt.py
index cfb7341..624d702 100644
--- a/Lib/test/test_crypt.py
+++ b/Lib/test/test_crypt.py
@@ -1,11 +1,7 @@
from test import support
import unittest
-def setUpModule():
- # this import will raise unittest.SkipTest if _crypt doesn't exist,
- # so it has to be done in setUpModule for test discovery to work
- global crypt
- crypt = support.import_module('crypt')
+crypt = support.import_module('crypt')
class CryptTestCase(unittest.TestCase):
diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py
index 96f8aa7..974d73d 100644
--- a/Lib/test/test_csv.py
+++ b/Lib/test/test_csv.py
@@ -136,12 +136,12 @@ class Test_Csv(unittest.TestCase):
return 10;
def __getitem__(self, i):
if i > 2:
- raise IOError
- self.assertRaises(IOError, self._write_test, BadList(), '')
+ raise OSError
+ self.assertRaises(OSError, self._write_test, BadList(), '')
class BadItem:
def __str__(self):
- raise IOError
- self.assertRaises(IOError, self._write_test, [BadItem()], '')
+ raise OSError
+ self.assertRaises(OSError, self._write_test, [BadItem()], '')
def test_write_bigfield(self):
# This exercises the buffer realloc functionality
@@ -186,9 +186,9 @@ class Test_Csv(unittest.TestCase):
def test_writerows(self):
class BrokenFile:
def write(self, buf):
- raise IOError
+ raise OSError
writer = csv.writer(BrokenFile())
- self.assertRaises(IOError, writer.writerows, [['a']])
+ self.assertRaises(OSError, writer.writerows, [['a']])
with TemporaryFile("w+", newline='') as fileobj:
writer = csv.writer(fileobj)
@@ -308,6 +308,15 @@ class Test_Csv(unittest.TestCase):
for i, row in enumerate(csv.reader(fileobj)):
self.assertEqual(row, rows[i])
+ def test_roundtrip_escaped_unquoted_newlines(self):
+ with TemporaryFile("w+", newline='') as fileobj:
+ writer = csv.writer(fileobj,quoting=csv.QUOTE_NONE,escapechar="\\")
+ rows = [['a\nb','b'],['c','x\r\nd']]
+ writer.writerows(rows)
+ fileobj.seek(0)
+ for i, row in enumerate(csv.reader(fileobj,quoting=csv.QUOTE_NONE,escapechar="\\")):
+ self.assertEqual(row,rows[i])
+
class TestDialectRegistry(unittest.TestCase):
def test_registry_badargs(self):
self.assertRaises(TypeError, csv.list_dialects, None)
diff --git a/Lib/test/test_dbm.py b/Lib/test/test_dbm.py
index 752e0b7..e02fa75 100644
--- a/Lib/test/test_dbm.py
+++ b/Lib/test/test_dbm.py
@@ -57,7 +57,7 @@ class AnyDBMTestCase:
return keys
def test_error(self):
- self.assertTrue(issubclass(self.module.error, IOError))
+ self.assertTrue(issubclass(self.module.error, OSError))
def test_anydbm_not_existing(self):
self.assertRaises(dbm.error, dbm.open, _fname)
diff --git a/Lib/test/test_devpoll.py b/Lib/test/test_devpoll.py
index bef4e18..ec350cd 100644
--- a/Lib/test/test_devpoll.py
+++ b/Lib/test/test_devpoll.py
@@ -8,7 +8,7 @@ from test.support import TESTFN, run_unittest
try:
select.devpoll
except AttributeError:
- raise unittest.SkipTest("select.devpoll not defined -- skipping test_devpoll")
+ raise unittest.SkipTest("select.devpoll not defined")
def find_ready_matching(ready, flag):
diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py
index 8f8c7c7..3e36f19 100644
--- a/Lib/test/test_doctest.py
+++ b/Lib/test/test_doctest.py
@@ -1409,8 +1409,40 @@ However, output from `report_start` is not suppressed:
2
TestResults(failed=3, attempted=5)
-For the purposes of REPORT_ONLY_FIRST_FAILURE, unexpected exceptions
-count as failures:
+The FAIL_FAST flag causes the runner to exit after the first failing example,
+so subsequent examples are not even attempted:
+
+ >>> flags = doctest.FAIL_FAST
+ >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
+ ... # doctest: +ELLIPSIS
+ **********************************************************************
+ File ..., line 5, in f
+ Failed example:
+ print(2) # first failure
+ Expected:
+ 200
+ Got:
+ 2
+ TestResults(failed=1, attempted=2)
+
+Specifying both FAIL_FAST and REPORT_ONLY_FIRST_FAILURE is equivalent to
+FAIL_FAST only:
+
+ >>> flags = doctest.FAIL_FAST | doctest.REPORT_ONLY_FIRST_FAILURE
+ >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
+ ... # doctest: +ELLIPSIS
+ **********************************************************************
+ File ..., line 5, in f
+ Failed example:
+ print(2) # first failure
+ Expected:
+ 200
+ Got:
+ 2
+ TestResults(failed=1, attempted=2)
+
+For the purposes of both REPORT_ONLY_FIRST_FAILURE and FAIL_FAST, unexpected
+exceptions count as failures:
>>> def f(x):
... r'''
@@ -1437,6 +1469,17 @@ count as failures:
...
ValueError: 2
TestResults(failed=3, attempted=5)
+ >>> flags = doctest.FAIL_FAST
+ >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
+ ... # doctest: +ELLIPSIS
+ **********************************************************************
+ File ..., line 5, in f
+ Failed example:
+ raise ValueError(2) # first failure
+ Exception raised:
+ ...
+ ValueError: 2
+ TestResults(failed=1, attempted=2)
New option flags can also be registered, via register_optionflag(). Here
we reach into doctest's internals a bit.
diff --git a/Lib/test/test_email/torture_test.py b/Lib/test/test_email/torture_test.py
index 544b1bb..19cf64f 100644
--- a/Lib/test/test_email/torture_test.py
+++ b/Lib/test/test_email/torture_test.py
@@ -27,7 +27,7 @@ def openfile(filename):
# Prevent this test from running in the Python distro
try:
openfile('crispin-torture.txt')
-except IOError:
+except OSError:
raise TestSkipped
diff --git a/Lib/test/test_enumerate.py b/Lib/test/test_enumerate.py
index 2e904cf..a2d18d0 100644
--- a/Lib/test/test_enumerate.py
+++ b/Lib/test/test_enumerate.py
@@ -1,4 +1,5 @@
import unittest
+import operator
import sys
import pickle
@@ -168,15 +169,12 @@ class TestReversed(unittest.TestCase, PickleTest):
x = range(1)
self.assertEqual(type(reversed(x)), type(iter(x)))
- @support.cpython_only
def test_len(self):
- # This is an implementation detail, not an interface requirement
- from test.test_iterlen import len
for s in ('hello', tuple('hello'), list('hello'), range(5)):
- self.assertEqual(len(reversed(s)), len(s))
+ self.assertEqual(operator.length_hint(reversed(s)), len(s))
r = reversed(s)
list(r)
- self.assertEqual(len(r), 0)
+ self.assertEqual(operator.length_hint(r), 0)
class SeqWithWeirdLen:
called = False
def __len__(self):
@@ -187,7 +185,7 @@ class TestReversed(unittest.TestCase, PickleTest):
def __getitem__(self, index):
return index
r = reversed(SeqWithWeirdLen())
- self.assertRaises(ZeroDivisionError, len, r)
+ self.assertRaises(ZeroDivisionError, operator.length_hint, r)
def test_gc(self):
diff --git a/Lib/test/test_epoll.py b/Lib/test/test_epoll.py
index 7f9547f..7077a70 100644
--- a/Lib/test/test_epoll.py
+++ b/Lib/test/test_epoll.py
@@ -33,7 +33,7 @@ if not hasattr(select, "epoll"):
try:
select.epoll()
-except IOError as e:
+except OSError as e:
if e.errno == errno.ENOSYS:
raise unittest.SkipTest("kernel doesn't support epoll()")
raise
@@ -56,7 +56,7 @@ class TestEPoll(unittest.TestCase):
client.setblocking(False)
try:
client.connect(('127.0.0.1', self.serverSocket.getsockname()[1]))
- except socket.error as e:
+ except OSError as e:
self.assertEqual(e.args[0], errno.EINPROGRESS)
else:
raise AssertionError("Connect should have raised EINPROGRESS")
@@ -87,6 +87,13 @@ class TestEPoll(unittest.TestCase):
self.assertRaises(TypeError, select.epoll, ['foo'])
self.assertRaises(TypeError, select.epoll, {})
+ def test_context_manager(self):
+ with select.epoll(16) as ep:
+ self.assertGreater(ep.fileno(), 0)
+ self.assertFalse(ep.closed)
+ self.assertTrue(ep.closed)
+ self.assertRaises(ValueError, ep.fileno)
+
def test_add(self):
server, client = self._connected_pair()
@@ -115,12 +122,12 @@ class TestEPoll(unittest.TestCase):
# ValueError: file descriptor cannot be a negative integer (-1)
self.assertRaises(ValueError, ep.register, -1,
select.EPOLLIN | select.EPOLLOUT)
- # IOError: [Errno 9] Bad file descriptor
- self.assertRaises(IOError, ep.register, 10000,
+ # OSError: [Errno 9] Bad file descriptor
+ self.assertRaises(OSError, ep.register, 10000,
select.EPOLLIN | select.EPOLLOUT)
# registering twice also raises an exception
ep.register(server, select.EPOLLIN | select.EPOLLOUT)
- self.assertRaises(IOError, ep.register, server,
+ self.assertRaises(OSError, ep.register, server,
select.EPOLLIN | select.EPOLLOUT)
finally:
ep.close()
@@ -142,7 +149,7 @@ class TestEPoll(unittest.TestCase):
ep.close()
try:
ep2.poll(1, 4)
- except IOError as e:
+ except OSError as e:
self.assertEqual(e.args[0], errno.EBADF, e)
else:
self.fail("epoll on closed fd didn't raise EBADF")
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 1ad7f97..2f39d4b 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -8,8 +8,8 @@ import weakref
import errno
from test.support import (TESTFN, captured_output, check_impl_detail,
- cpython_only, gc_collect, run_unittest, no_tracing,
- unlink)
+ check_warnings, cpython_only, gc_collect, run_unittest,
+ no_tracing, unlink)
class NaiveException(Exception):
def __init__(self, x):
@@ -244,22 +244,22 @@ class ExceptionTests(unittest.TestCase):
{'args' : ('foo', 1)}),
(SystemExit, ('foo',),
{'args' : ('foo',), 'code' : 'foo'}),
- (IOError, ('foo',),
+ (OSError, ('foo',),
{'args' : ('foo',), 'filename' : None,
'errno' : None, 'strerror' : None}),
- (IOError, ('foo', 'bar'),
+ (OSError, ('foo', 'bar'),
{'args' : ('foo', 'bar'), 'filename' : None,
'errno' : 'foo', 'strerror' : 'bar'}),
- (IOError, ('foo', 'bar', 'baz'),
+ (OSError, ('foo', 'bar', 'baz'),
{'args' : ('foo', 'bar'), 'filename' : 'baz',
'errno' : 'foo', 'strerror' : 'bar'}),
- (IOError, ('foo', 'bar', 'baz', 'quux'),
+ (OSError, ('foo', 'bar', 'baz', 'quux'),
{'args' : ('foo', 'bar', 'baz', 'quux')}),
- (EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'),
+ (OSError, ('errnoStr', 'strErrorStr', 'filenameStr'),
{'args' : ('errnoStr', 'strErrorStr'),
'strerror' : 'strErrorStr', 'errno' : 'errnoStr',
'filename' : 'filenameStr'}),
- (EnvironmentError, (1, 'strErrorStr', 'filenameStr'),
+ (OSError, (1, 'strErrorStr', 'filenameStr'),
{'args' : (1, 'strErrorStr'), 'errno' : 1,
'strerror' : 'strErrorStr', 'filename' : 'filenameStr'}),
(SyntaxError, (), {'msg' : None, 'text' : None,
@@ -409,7 +409,7 @@ class ExceptionTests(unittest.TestCase):
self.assertIsNone(e.__context__)
self.assertIsNone(e.__cause__)
- class MyException(EnvironmentError):
+ class MyException(OSError):
pass
e = MyException()
@@ -947,9 +947,10 @@ class ImportErrorTests(unittest.TestCase):
def test_non_str_argument(self):
# Issue #15778
- arg = b'abc'
- exc = ImportError(arg)
- self.assertEqual(str(arg), str(exc))
+ with check_warnings(('', BytesWarning), quiet=True):
+ arg = b'abc'
+ exc = ImportError(arg)
+ self.assertEqual(str(arg), str(exc))
def test_main():
diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py
index 0ce3a5d..f977187 100644
--- a/Lib/test/test_fcntl.py
+++ b/Lib/test/test_fcntl.py
@@ -1,7 +1,4 @@
"""Test program for the fcntl C module.
-
-OS/2+EMX doesn't support the file locking operations.
-
"""
import os
import struct
@@ -36,8 +33,6 @@ def get_lockdata():
fcntl.F_WRLCK, 0)
elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']:
lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
- elif sys.platform in ['os2emx']:
- lockdata = None
else:
lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0)
if lockdata:
@@ -63,18 +58,16 @@ class TestFcntl(unittest.TestCase):
rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
if verbose:
print('Status from fcntl with O_NONBLOCK: ', rv)
- if sys.platform not in ['os2emx']:
- rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata)
- if verbose:
- print('String from fcntl with F_SETLKW: ', repr(rv))
+ rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata)
+ if verbose:
+ print('String from fcntl with F_SETLKW: ', repr(rv))
self.f.close()
def test_fcntl_file_descriptor(self):
# again, but pass the file rather than numeric descriptor
self.f = open(TESTFN, 'wb')
rv = fcntl.fcntl(self.f, fcntl.F_SETFL, os.O_NONBLOCK)
- if sys.platform not in ['os2emx']:
- rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata)
+ rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata)
self.f.close()
def test_fcntl_bad_file(self):
diff --git a/Lib/test/test_file.py b/Lib/test/test_file.py
index a78ddf3..1daffe4 100644
--- a/Lib/test/test_file.py
+++ b/Lib/test/test_file.py
@@ -87,7 +87,7 @@ class AutoFileTests:
self.assertTrue(not f.closed)
if hasattr(f, "readinto"):
- self.assertRaises((IOError, TypeError), f.readinto, "")
+ self.assertRaises((OSError, TypeError), f.readinto, "")
f.close()
self.assertTrue(f.closed)
@@ -126,7 +126,7 @@ class AutoFileTests:
self.assertEqual(self.f.__exit__(*sys.exc_info()), None)
def testReadWhenWriting(self):
- self.assertRaises(IOError, self.f.read)
+ self.assertRaises(OSError, self.f.read)
class CAutoFileTests(AutoFileTests, unittest.TestCase):
open = io.open
@@ -151,12 +151,12 @@ class OtherFileTests:
def testStdin(self):
# This causes the interpreter to exit on OSF1 v5.1.
if sys.platform != 'osf1V5':
- self.assertRaises((IOError, ValueError), sys.stdin.seek, -1)
+ self.assertRaises((OSError, ValueError), sys.stdin.seek, -1)
else:
print((
' Skipping sys.stdin.seek(-1), it may crash the interpreter.'
' Test manually.'), file=sys.__stdout__)
- self.assertRaises((IOError, ValueError), sys.stdin.truncate)
+ self.assertRaises((OSError, ValueError), sys.stdin.truncate)
def testBadModeArgument(self):
# verify that we get a sensible error message for bad mode argument
@@ -187,7 +187,7 @@ class OtherFileTests:
d = int(f.read().decode("ascii"))
f.close()
f.close()
- except IOError as msg:
+ except OSError as msg:
self.fail('error setting buffer size %d: %s' % (s, str(msg)))
self.assertEqual(d, s)
diff --git a/Lib/test/test_filecmp.py b/Lib/test/test_filecmp.py
index 0959979..f760147 100644
--- a/Lib/test/test_filecmp.py
+++ b/Lib/test/test_filecmp.py
@@ -1,4 +1,3 @@
-
import os, filecmp, shutil, tempfile
import unittest
from test import support
@@ -46,9 +45,14 @@ class DirCompareTestCase(unittest.TestCase):
self.dir = os.path.join(tmpdir, 'dir')
self.dir_same = os.path.join(tmpdir, 'dir-same')
self.dir_diff = os.path.join(tmpdir, 'dir-diff')
+
+ # Another dir is created under dir_same, but it has a name from the
+ # ignored list so it should not affect testing results.
+ self.dir_ignored = os.path.join(self.dir_same, '.hg')
+
self.caseinsensitive = os.path.normcase('A') == os.path.normcase('a')
data = 'Contents of file go here.\n'
- for dir in [self.dir, self.dir_same, self.dir_diff]:
+ for dir in (self.dir, self.dir_same, self.dir_diff, self.dir_ignored):
shutil.rmtree(dir, True)
os.mkdir(dir)
if self.caseinsensitive and dir is self.dir_same:
@@ -64,9 +68,11 @@ class DirCompareTestCase(unittest.TestCase):
output.close()
def tearDown(self):
- shutil.rmtree(self.dir)
- shutil.rmtree(self.dir_same)
- shutil.rmtree(self.dir_diff)
+ for dir in (self.dir, self.dir_same, self.dir_diff):
+ shutil.rmtree(dir)
+
+ def test_default_ignores(self):
+ self.assertIn('.hg', filecmp.DEFAULT_IGNORES)
def test_cmpfiles(self):
self.assertTrue(filecmp.cmpfiles(self.dir, self.dir, ['file']) ==
diff --git a/Lib/test/test_fileinput.py b/Lib/test/test_fileinput.py
index 1e70641..c42e3e8 100644
--- a/Lib/test/test_fileinput.py
+++ b/Lib/test/test_fileinput.py
@@ -275,8 +275,8 @@ class FileInputTests(unittest.TestCase):
try:
t1 = writeTmp(1, [""])
with FileInput(files=t1) as fi:
- raise IOError
- except IOError:
+ raise OSError
+ except OSError:
self.assertEqual(fi._files, ())
finally:
remove_tempfiles(t1)
diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py
index 19737d9..0ccbda2 100644
--- a/Lib/test/test_fileio.py
+++ b/Lib/test/test_fileio.py
@@ -145,16 +145,16 @@ class AutoFileTests(unittest.TestCase):
# Unix calls dircheck() and returns "[Errno 21]: Is a directory"
try:
_FileIO('.', 'r')
- except IOError as e:
+ except OSError as e:
self.assertNotEqual(e.errno, 0)
self.assertEqual(e.filename, ".")
else:
- self.fail("Should have raised IOError")
+ self.fail("Should have raised OSError")
@unittest.skipIf(os.name == 'nt', "test only works on a POSIX-like system")
def testOpenDirFD(self):
fd = os.open('.', os.O_RDONLY)
- with self.assertRaises(IOError) as cm:
+ with self.assertRaises(OSError) as cm:
_FileIO(fd, 'r')
os.close(fd)
self.assertEqual(cm.exception.errno, errno.EISDIR)
@@ -172,7 +172,7 @@ class AutoFileTests(unittest.TestCase):
finally:
try:
self.f.close()
- except IOError:
+ except OSError:
pass
return wrapper
@@ -184,14 +184,14 @@ class AutoFileTests(unittest.TestCase):
os.close(f.fileno())
try:
func(self, f)
- except IOError as e:
+ except OSError as e:
self.assertEqual(e.errno, errno.EBADF)
else:
- self.fail("Should have raised IOError")
+ self.fail("Should have raised OSError")
finally:
try:
self.f.close()
- except IOError:
+ except OSError:
pass
return wrapper
@@ -238,7 +238,7 @@ class AutoFileTests(unittest.TestCase):
def ReopenForRead(self):
try:
self.f.close()
- except IOError:
+ except OSError:
pass
self.f = _FileIO(TESTFN, 'r')
os.close(self.f.fileno())
@@ -286,7 +286,7 @@ class OtherFileTests(unittest.TestCase):
if sys.platform != "win32":
try:
f = _FileIO("/dev/tty", "a")
- except EnvironmentError:
+ except OSError:
# When run in a cron job there just aren't any
# ttys, so skip the test. This also handles other
# OS'es that don't support /dev/tty.
@@ -347,7 +347,7 @@ class OtherFileTests(unittest.TestCase):
self.assertRaises(OSError, _FileIO, make_bad_fd())
if sys.platform == 'win32':
import msvcrt
- self.assertRaises(IOError, msvcrt.get_osfhandle, make_bad_fd())
+ self.assertRaises(OSError, msvcrt.get_osfhandle, make_bad_fd())
# Issue 15989
self.assertRaises(TypeError, _FileIO, _testcapi.INT_MAX + 1)
self.assertRaises(TypeError, _FileIO, _testcapi.INT_MIN - 1)
diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py
index f8f5420..bc56fbc 100644
--- a/Lib/test/test_format.py
+++ b/Lib/test/test_format.py
@@ -307,6 +307,22 @@ class FormatTest(unittest.TestCase):
finally:
locale.setlocale(locale.LC_ALL, oldloc)
+ @support.cpython_only
+ def test_optimisations(self):
+ text = "abcde" # 5 characters
+
+ self.assertIs("%s" % text, text)
+ self.assertIs("%.5s" % text, text)
+ self.assertIs("%.10s" % text, text)
+ self.assertIs("%1s" % text, text)
+ self.assertIs("%5s" % text, text)
+
+ self.assertIs("{0}".format(text), text)
+ self.assertIs("{0:s}".format(text), text)
+ self.assertIs("{0:.5s}".format(text), text)
+ self.assertIs("{0:.10s}".format(text), text)
+ self.assertIs("{0:1s}".format(text), text)
+ self.assertIs("{0:5s}".format(text), text)
def test_main():
diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py
index 1fad921..3336532 100644
--- a/Lib/test/test_fractions.py
+++ b/Lib/test/test_fractions.py
@@ -146,9 +146,10 @@ class FractionTest(unittest.TestCase):
self.assertEqual((0, 1), _components(F(-0.0)))
self.assertEqual((3602879701896397, 36028797018963968),
_components(F(0.1)))
- self.assertRaises(TypeError, F, float('nan'))
- self.assertRaises(TypeError, F, float('inf'))
- self.assertRaises(TypeError, F, float('-inf'))
+ # bug 16469: error types should be consistent with float -> int
+ self.assertRaises(ValueError, F, float('nan'))
+ self.assertRaises(OverflowError, F, float('inf'))
+ self.assertRaises(OverflowError, F, float('-inf'))
def testInitFromDecimal(self):
self.assertEqual((11, 10),
@@ -157,10 +158,11 @@ class FractionTest(unittest.TestCase):
_components(F(Decimal('3.5e-2'))))
self.assertEqual((0, 1),
_components(F(Decimal('.000e20'))))
- self.assertRaises(TypeError, F, Decimal('nan'))
- self.assertRaises(TypeError, F, Decimal('snan'))
- self.assertRaises(TypeError, F, Decimal('inf'))
- self.assertRaises(TypeError, F, Decimal('-inf'))
+ # bug 16469: error types should be consistent with decimal -> int
+ self.assertRaises(ValueError, F, Decimal('nan'))
+ self.assertRaises(ValueError, F, Decimal('snan'))
+ self.assertRaises(OverflowError, F, Decimal('inf'))
+ self.assertRaises(OverflowError, F, Decimal('-inf'))
def testFromString(self):
self.assertEqual((5, 1), _components(F("5")))
@@ -248,14 +250,15 @@ class FractionTest(unittest.TestCase):
inf = 1e1000
nan = inf - inf
+ # bug 16469: error types should be consistent with float -> int
self.assertRaisesMessage(
- TypeError, "Cannot convert inf to Fraction.",
+ OverflowError, "Cannot convert inf to Fraction.",
F.from_float, inf)
self.assertRaisesMessage(
- TypeError, "Cannot convert -inf to Fraction.",
+ OverflowError, "Cannot convert -inf to Fraction.",
F.from_float, -inf)
self.assertRaisesMessage(
- TypeError, "Cannot convert nan to Fraction.",
+ ValueError, "Cannot convert nan to Fraction.",
F.from_float, nan)
def testFromDecimal(self):
@@ -268,17 +271,18 @@ class FractionTest(unittest.TestCase):
self.assertEqual(1 - F(1, 10**30),
F.from_decimal(Decimal("0." + "9" * 30)))
+ # bug 16469: error types should be consistent with decimal -> int
self.assertRaisesMessage(
- TypeError, "Cannot convert Infinity to Fraction.",
+ OverflowError, "Cannot convert Infinity to Fraction.",
F.from_decimal, Decimal("inf"))
self.assertRaisesMessage(
- TypeError, "Cannot convert -Infinity to Fraction.",
+ OverflowError, "Cannot convert -Infinity to Fraction.",
F.from_decimal, Decimal("-inf"))
self.assertRaisesMessage(
- TypeError, "Cannot convert NaN to Fraction.",
+ ValueError, "Cannot convert NaN to Fraction.",
F.from_decimal, Decimal("nan"))
self.assertRaisesMessage(
- TypeError, "Cannot convert sNaN to Fraction.",
+ ValueError, "Cannot convert sNaN to Fraction.",
F.from_decimal, Decimal("snan"))
def testLimitDenominator(self):
diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py
index 824b7c1..2e89973 100644
--- a/Lib/test/test_ftplib.py
+++ b/Lib/test/test_ftplib.py
@@ -321,7 +321,7 @@ if ssl is not None:
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
- except socket.error as err:
+ except OSError as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
@@ -335,7 +335,7 @@ if ssl is not None:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
- except socket.error as err:
+ except OSError as err:
# Any "socket error" corresponds to a SSL_ERROR_SYSCALL return
# from OpenSSL's SSL_shutdown(), corresponding to a
# closed socket condition. See also:
@@ -482,7 +482,7 @@ class TestFTPClass(TestCase):
def test_all_errors(self):
exceptions = (ftplib.error_reply, ftplib.error_temp, ftplib.error_perm,
- ftplib.error_proto, ftplib.Error, IOError, EOFError)
+ ftplib.error_proto, ftplib.Error, OSError, EOFError)
for x in exceptions:
try:
raise x('exception not included in all_errors set')
@@ -676,7 +676,7 @@ class TestFTPClass(TestCase):
return False
try:
self.client.sendcmd('noop')
- except (socket.error, EOFError):
+ except (OSError, EOFError):
return False
return True
@@ -721,7 +721,7 @@ class TestFTPClass(TestCase):
source_address=(HOST, port))
self.assertEqual(self.client.sock.getsockname()[1], port)
self.client.quit()
- except IOError as e:
+ except OSError as e:
if e.errno == errno.EADDRINUSE:
self.skipTest("couldn't bind to port %d" % port)
raise
@@ -732,7 +732,7 @@ class TestFTPClass(TestCase):
try:
with self.client.transfercmd('list') as sock:
self.assertEqual(sock.getsockname()[1], port)
- except IOError as e:
+ except OSError as e:
if e.errno == errno.EADDRINUSE:
self.skipTest("couldn't bind to port %d" % port)
raise
@@ -985,8 +985,19 @@ class TestTimeouts(TestCase):
ftp.close()
+class TestNetrcDeprecation(TestCase):
+
+ def test_deprecation(self):
+ with support.temp_cwd(), support.EnvironmentVarGuard() as env:
+ env['HOME'] = os.getcwd()
+ open('.netrc', 'w').close()
+ with self.assertWarns(DeprecationWarning):
+ ftplib.Netrc()
+
+
+
def test_main():
- tests = [TestFTPClass, TestTimeouts]
+ tests = [TestFTPClass, TestTimeouts, TestNetrcDeprecation]
if support.IPV6_ENABLED:
tests.append(TestIPv6Environment)
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index db1e934..9b3c31e 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -1,4 +1,3 @@
-import functools
import collections
import sys
import unittest
@@ -7,17 +6,10 @@ from weakref import proxy
import pickle
from random import choice
-@staticmethod
-def PythonPartial(func, *args, **keywords):
- 'Pure Python approximation of partial()'
- def newfunc(*fargs, **fkeywords):
- newkeywords = keywords.copy()
- newkeywords.update(fkeywords)
- return func(*(args + fargs), **newkeywords)
- newfunc.func = func
- newfunc.args = args
- newfunc.keywords = keywords
- return newfunc
+import functools
+
+py_functools = support.import_fresh_module('functools', blocked=['_functools'])
+c_functools = support.import_fresh_module('functools', fresh=['_functools'])
def capture(*args, **kw):
"""capture all positional and keyword arguments"""
@@ -27,31 +19,30 @@ def signature(part):
""" return the signature of a partial object """
return (part.func, part.args, part.keywords, part.__dict__)
-class TestPartial(unittest.TestCase):
-
- thetype = functools.partial
+class TestPartial:
def test_basic_examples(self):
- p = self.thetype(capture, 1, 2, a=10, b=20)
+ p = self.partial(capture, 1, 2, a=10, b=20)
+ self.assertTrue(callable(p))
self.assertEqual(p(3, 4, b=30, c=40),
((1, 2, 3, 4), dict(a=10, b=30, c=40)))
- p = self.thetype(map, lambda x: x*10)
+ p = self.partial(map, lambda x: x*10)
self.assertEqual(list(p([1,2,3,4])), [10, 20, 30, 40])
def test_attributes(self):
- p = self.thetype(capture, 1, 2, a=10, b=20)
+ p = self.partial(capture, 1, 2, a=10, b=20)
# attributes should be readable
self.assertEqual(p.func, capture)
self.assertEqual(p.args, (1, 2))
self.assertEqual(p.keywords, dict(a=10, b=20))
# attributes should not be writable
- if not isinstance(self.thetype, type):
+ if not isinstance(self.partial, type):
return
self.assertRaises(AttributeError, setattr, p, 'func', map)
self.assertRaises(AttributeError, setattr, p, 'args', (1, 2))
self.assertRaises(AttributeError, setattr, p, 'keywords', dict(a=1, b=2))
- p = self.thetype(hex)
+ p = self.partial(hex)
try:
del p.__dict__
except TypeError:
@@ -60,9 +51,9 @@ class TestPartial(unittest.TestCase):
self.fail('partial object allowed __dict__ to be deleted')
def test_argument_checking(self):
- self.assertRaises(TypeError, self.thetype) # need at least a func arg
+ self.assertRaises(TypeError, self.partial) # need at least a func arg
try:
- self.thetype(2)()
+ self.partial(2)()
except TypeError:
pass
else:
@@ -73,7 +64,7 @@ class TestPartial(unittest.TestCase):
def func(a=10, b=20):
return a
d = {'a':3}
- p = self.thetype(func, a=5)
+ p = self.partial(func, a=5)
self.assertEqual(p(**d), 3)
self.assertEqual(d, {'a':3})
p(b=7)
@@ -82,20 +73,20 @@ class TestPartial(unittest.TestCase):
def test_arg_combinations(self):
# exercise special code paths for zero args in either partial
# object or the caller
- p = self.thetype(capture)
+ p = self.partial(capture)
self.assertEqual(p(), ((), {}))
self.assertEqual(p(1,2), ((1,2), {}))
- p = self.thetype(capture, 1, 2)
+ p = self.partial(capture, 1, 2)
self.assertEqual(p(), ((1,2), {}))
self.assertEqual(p(3,4), ((1,2,3,4), {}))
def test_kw_combinations(self):
# exercise special code paths for no keyword args in
# either the partial object or the caller
- p = self.thetype(capture)
+ p = self.partial(capture)
self.assertEqual(p(), ((), {}))
self.assertEqual(p(a=1), ((), {'a':1}))
- p = self.thetype(capture, a=1)
+ p = self.partial(capture, a=1)
self.assertEqual(p(), ((), {'a':1}))
self.assertEqual(p(b=2), ((), {'a':1, 'b':2}))
# keyword args in the call override those in the partial object
@@ -104,7 +95,7 @@ class TestPartial(unittest.TestCase):
def test_positional(self):
# make sure positional arguments are captured correctly
for args in [(), (0,), (0,1), (0,1,2), (0,1,2,3)]:
- p = self.thetype(capture, *args)
+ p = self.partial(capture, *args)
expected = args + ('x',)
got, empty = p('x')
self.assertTrue(expected == got and empty == {})
@@ -112,14 +103,14 @@ class TestPartial(unittest.TestCase):
def test_keyword(self):
# make sure keyword arguments are captured correctly
for a in ['a', 0, None, 3.5]:
- p = self.thetype(capture, a=a)
+ p = self.partial(capture, a=a)
expected = {'a':a,'x':None}
empty, got = p(x=None)
self.assertTrue(expected == got and empty == ())
def test_no_side_effects(self):
# make sure there are no side effects that affect subsequent calls
- p = self.thetype(capture, 0, a=1)
+ p = self.partial(capture, 0, a=1)
args1, kw1 = p(1, b=2)
self.assertTrue(args1 == (0,1) and kw1 == {'a':1,'b':2})
args2, kw2 = p()
@@ -128,13 +119,13 @@ class TestPartial(unittest.TestCase):
def test_error_propagation(self):
def f(x, y):
x / y
- self.assertRaises(ZeroDivisionError, self.thetype(f, 1, 0))
- self.assertRaises(ZeroDivisionError, self.thetype(f, 1), 0)
- self.assertRaises(ZeroDivisionError, self.thetype(f), 1, 0)
- self.assertRaises(ZeroDivisionError, self.thetype(f, y=0), 1)
+ self.assertRaises(ZeroDivisionError, self.partial(f, 1, 0))
+ self.assertRaises(ZeroDivisionError, self.partial(f, 1), 0)
+ self.assertRaises(ZeroDivisionError, self.partial(f), 1, 0)
+ self.assertRaises(ZeroDivisionError, self.partial(f, y=0), 1)
def test_weakref(self):
- f = self.thetype(int, base=16)
+ f = self.partial(int, base=16)
p = proxy(f)
self.assertEqual(f.func, p.func)
f = None
@@ -142,39 +133,44 @@ class TestPartial(unittest.TestCase):
def test_with_bound_and_unbound_methods(self):
data = list(map(str, range(10)))
- join = self.thetype(str.join, '')
+ join = self.partial(str.join, '')
self.assertEqual(join(data), '0123456789')
- join = self.thetype(''.join)
+ join = self.partial(''.join)
self.assertEqual(join(data), '0123456789')
+@unittest.skipUnless(c_functools, 'requires the C _functools module')
+class TestPartialC(TestPartial, unittest.TestCase):
+ if c_functools:
+ partial = c_functools.partial
+
def test_repr(self):
args = (object(), object())
args_repr = ', '.join(repr(a) for a in args)
kwargs = {'a': object(), 'b': object()}
kwargs_repr = ', '.join("%s=%r" % (k, v) for k, v in kwargs.items())
- if self.thetype is functools.partial:
+ if self.partial is c_functools.partial:
name = 'functools.partial'
else:
- name = self.thetype.__name__
+ name = self.partial.__name__
- f = self.thetype(capture)
+ f = self.partial(capture)
self.assertEqual('{}({!r})'.format(name, capture),
repr(f))
- f = self.thetype(capture, *args)
+ f = self.partial(capture, *args)
self.assertEqual('{}({!r}, {})'.format(name, capture, args_repr),
repr(f))
- f = self.thetype(capture, **kwargs)
+ f = self.partial(capture, **kwargs)
self.assertEqual('{}({!r}, {})'.format(name, capture, kwargs_repr),
repr(f))
- f = self.thetype(capture, *args, **kwargs)
+ f = self.partial(capture, *args, **kwargs)
self.assertEqual('{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr),
repr(f))
def test_pickle(self):
- f = self.thetype(signature, 'asdf', bar=True)
+ f = self.partial(signature, 'asdf', bar=True)
f.add_something_to__dict__ = True
f_copy = pickle.loads(pickle.dumps(f))
self.assertEqual(signature(f), signature(f_copy))
@@ -193,28 +189,22 @@ class TestPartial(unittest.TestCase):
return {}
raise IndexError
- f = self.thetype(object)
+ f = self.partial(object)
self.assertRaisesRegex(SystemError,
"new style getargs format but argument is not a tuple",
f.__setstate__, BadSequence())
-class PartialSubclass(functools.partial):
- pass
-
-class TestPartialSubclass(TestPartial):
-
- thetype = PartialSubclass
-
-class TestPythonPartial(TestPartial):
+class TestPartialPy(TestPartial, unittest.TestCase):
+ partial = staticmethod(py_functools.partial)
- thetype = PythonPartial
+if c_functools:
+ class PartialSubclass(c_functools.partial):
+ pass
- # the python version hasn't a nice repr
- def test_repr(self): pass
-
- # the python version isn't picklable
- def test_pickle(self): pass
- def test_setstate_refcount(self): pass
+@unittest.skipUnless(c_functools, 'requires the C _functools module')
+class TestPartialCSubclass(TestPartialC):
+ if c_functools:
+ partial = PartialSubclass
class TestUpdateWrapper(unittest.TestCase):
@@ -462,24 +452,28 @@ class TestReduce(unittest.TestCase):
d = {"one": 1, "two": 2, "three": 3}
self.assertEqual(self.func(add, d), "".join(d.keys()))
-class TestCmpToKey(unittest.TestCase):
+class TestCmpToKey:
def test_cmp_to_key(self):
def cmp1(x, y):
return (x > y) - (x < y)
- key = functools.cmp_to_key(cmp1)
+ key = self.cmp_to_key(cmp1)
self.assertEqual(key(3), key(3))
self.assertGreater(key(3), key(1))
+ self.assertGreaterEqual(key(3), key(3))
+
def cmp2(x, y):
return int(x) - int(y)
- key = functools.cmp_to_key(cmp2)
+ key = self.cmp_to_key(cmp2)
self.assertEqual(key(4.0), key('4'))
self.assertLess(key(2), key('35'))
+ self.assertLessEqual(key(2), key('35'))
+ self.assertNotEqual(key(2), key('35'))
def test_cmp_to_key_arguments(self):
def cmp1(x, y):
return (x > y) - (x < y)
- key = functools.cmp_to_key(mycmp=cmp1)
+ key = self.cmp_to_key(mycmp=cmp1)
self.assertEqual(key(obj=3), key(obj=3))
self.assertGreater(key(obj=3), key(obj=1))
with self.assertRaises((TypeError, AttributeError)):
@@ -487,10 +481,10 @@ class TestCmpToKey(unittest.TestCase):
with self.assertRaises((TypeError, AttributeError)):
1 < key(3) # lhs is not a K object
with self.assertRaises(TypeError):
- key = functools.cmp_to_key() # too few args
+ key = self.cmp_to_key() # too few args
with self.assertRaises(TypeError):
- key = functools.cmp_to_key(cmp1, None) # too many args
- key = functools.cmp_to_key(cmp1)
+ key = self.cmp_to_key(cmp1, None) # too many args
+ key = self.cmp_to_key(cmp1)
with self.assertRaises(TypeError):
key() # too few args
with self.assertRaises(TypeError):
@@ -499,7 +493,7 @@ class TestCmpToKey(unittest.TestCase):
def test_bad_cmp(self):
def cmp1(x, y):
raise ZeroDivisionError
- key = functools.cmp_to_key(cmp1)
+ key = self.cmp_to_key(cmp1)
with self.assertRaises(ZeroDivisionError):
key(3) > key(1)
@@ -514,13 +508,13 @@ class TestCmpToKey(unittest.TestCase):
def test_obj_field(self):
def cmp1(x, y):
return (x > y) - (x < y)
- key = functools.cmp_to_key(mycmp=cmp1)
+ key = self.cmp_to_key(mycmp=cmp1)
self.assertEqual(key(50).obj, 50)
def test_sort_int(self):
def mycmp(x, y):
return y - x
- self.assertEqual(sorted(range(5), key=functools.cmp_to_key(mycmp)),
+ self.assertEqual(sorted(range(5), key=self.cmp_to_key(mycmp)),
[4, 3, 2, 1, 0])
def test_sort_int_str(self):
@@ -528,18 +522,26 @@ class TestCmpToKey(unittest.TestCase):
x, y = int(x), int(y)
return (x > y) - (x < y)
values = [5, '3', 7, 2, '0', '1', 4, '10', 1]
- values = sorted(values, key=functools.cmp_to_key(mycmp))
+ values = sorted(values, key=self.cmp_to_key(mycmp))
self.assertEqual([int(value) for value in values],
[0, 1, 1, 2, 3, 4, 5, 7, 10])
def test_hash(self):
def mycmp(x, y):
return y - x
- key = functools.cmp_to_key(mycmp)
+ key = self.cmp_to_key(mycmp)
k = key(10)
self.assertRaises(TypeError, hash, k)
self.assertNotIsInstance(k, collections.Hashable)
+@unittest.skipUnless(c_functools, 'requires the C _functools module')
+class TestCmpToKeyC(TestCmpToKey, unittest.TestCase):
+ if c_functools:
+ cmp_to_key = c_functools.cmp_to_key
+
+class TestCmpToKeyPy(TestCmpToKey, unittest.TestCase):
+ cmp_to_key = staticmethod(py_functools.cmp_to_key)
+
class TestTotalOrdering(unittest.TestCase):
def test_total_ordering_lt(self):
@@ -644,7 +646,7 @@ class TestLRU(unittest.TestCase):
def test_lru(self):
def orig(x, y):
- return 3*x+y
+ return 3 * x + y
f = functools.lru_cache(maxsize=20)(orig)
hits, misses, maxsize, currsize = f.cache_info()
self.assertEqual(maxsize, 20)
@@ -749,7 +751,7 @@ class TestLRU(unittest.TestCase):
# Verify that user_function exceptions get passed through without
# creating a hard-to-read chained exception.
# http://bugs.python.org/issue13177
- for maxsize in (None, 100):
+ for maxsize in (None, 128):
@functools.lru_cache(maxsize)
def func(i):
return 'abc'[i]
@@ -762,7 +764,7 @@ class TestLRU(unittest.TestCase):
func(15)
def test_lru_with_types(self):
- for maxsize in (None, 100):
+ for maxsize in (None, 128):
@functools.lru_cache(maxsize=maxsize, typed=True)
def square(x):
return x * x
@@ -777,6 +779,36 @@ class TestLRU(unittest.TestCase):
self.assertEqual(square.cache_info().hits, 4)
self.assertEqual(square.cache_info().misses, 4)
+ def test_lru_with_keyword_args(self):
+ @functools.lru_cache()
+ def fib(n):
+ if n < 2:
+ return n
+ return fib(n=n-1) + fib(n=n-2)
+ self.assertEqual(
+ [fib(n=number) for number in range(16)],
+ [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]
+ )
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=28, misses=16, maxsize=128, currsize=16))
+ fib.cache_clear()
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=0, misses=0, maxsize=128, currsize=0))
+
+ def test_lru_with_keyword_args_maxsize_none(self):
+ @functools.lru_cache(maxsize=None)
+ def fib(n):
+ if n < 2:
+ return n
+ return fib(n=n-1) + fib(n=n-2)
+ self.assertEqual([fib(n=number) for number in range(16)],
+ [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610])
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16))
+ fib.cache_clear()
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0))
+
def test_need_for_rlock(self):
# This will deadlock on an LRU cache that uses a regular lock
@@ -804,12 +836,13 @@ class TestLRU(unittest.TestCase):
def test_main(verbose=None):
test_classes = (
- TestPartial,
- TestPartialSubclass,
- TestPythonPartial,
+ TestPartialC,
+ TestPartialPy,
+ TestPartialCSubclass,
TestUpdateWrapper,
TestTotalOrdering,
- TestCmpToKey,
+ TestCmpToKeyC,
+ TestCmpToKeyPy,
TestWraps,
TestReduce,
TestLRU,
diff --git a/Lib/test/test_future.py b/Lib/test/test_future.py
index a0c156f..beac993 100644
--- a/Lib/test/test_future.py
+++ b/Lib/test/test_future.py
@@ -82,6 +82,14 @@ class FutureTest(unittest.TestCase):
else:
self.fail("expected exception didn't occur")
+ def test_badfuture10(self):
+ try:
+ from test import badsyntax_future10
+ except SyntaxError as msg:
+ self.assertEqual(get_error_location(msg), ("badsyntax_future10", '3'))
+ else:
+ self.fail("expected exception didn't occur")
+
def test_parserhack(self):
# test that the parser.c::future_hack function works as expected
# Note: although this test must pass, it's not testing the original
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index c59b72e..85dbc97 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -610,6 +610,32 @@ class GCTests(unittest.TestCase):
stderr = run_command(code % "gc.DEBUG_SAVEALL")
self.assertNotIn(b"uncollectable objects at shutdown", stderr)
+ def test_get_stats(self):
+ stats = gc.get_stats()
+ self.assertEqual(len(stats), 3)
+ for st in stats:
+ self.assertIsInstance(st, dict)
+ self.assertEqual(set(st),
+ {"collected", "collections", "uncollectable"})
+ self.assertGreaterEqual(st["collected"], 0)
+ self.assertGreaterEqual(st["collections"], 0)
+ self.assertGreaterEqual(st["uncollectable"], 0)
+ # Check that collection counts are incremented correctly
+ if gc.isenabled():
+ self.addCleanup(gc.enable)
+ gc.disable()
+ old = gc.get_stats()
+ gc.collect(0)
+ new = gc.get_stats()
+ self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
+ self.assertEqual(new[1]["collections"], old[1]["collections"])
+ self.assertEqual(new[2]["collections"], old[2]["collections"])
+ gc.collect(2)
+ new = gc.get_stats()
+ self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
+ self.assertEqual(new[1]["collections"], old[1]["collections"])
+ self.assertEqual(new[2]["collections"], old[2]["collections"] + 1)
+
class GCCallbackTests(unittest.TestCase):
def setUp(self):
diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py
index 958054a..edf3443 100644
--- a/Lib/test/test_generators.py
+++ b/Lib/test/test_generators.py
@@ -1729,9 +1729,7 @@ Our ill-behaved code should be invoked during GC:
>>> g = f()
>>> next(g)
>>> del g
->>> sys.stderr.getvalue().startswith(
-... "Exception RuntimeError: 'generator ignored GeneratorExit' in "
-... )
+>>> "RuntimeError: generator ignored GeneratorExit" in sys.stderr.getvalue()
True
>>> sys.stderr = old
@@ -1841,22 +1839,23 @@ to test.
... sys.stderr = io.StringIO()
... class Leaker:
... def __del__(self):
-... raise RuntimeError
+... def invoke(message):
+... raise RuntimeError(message)
+... invoke("test")
...
... l = Leaker()
... del l
... err = sys.stderr.getvalue().strip()
-... err.startswith(
-... "Exception RuntimeError: RuntimeError() in <"
-... )
-... err.endswith("> ignored")
-... len(err.splitlines())
+... "Exception ignored in" in err
+... "RuntimeError: test" in err
+... "Traceback" in err
+... "in invoke" in err
... finally:
... sys.stderr = old
True
True
-1
-
+True
+True
These refleak tests should perhaps be in a testfile of their own,
diff --git a/Lib/test/test_genericpath.py b/Lib/test/test_genericpath.py
index fd8bc57..e967897 100644
--- a/Lib/test/test_genericpath.py
+++ b/Lib/test/test_genericpath.py
@@ -188,12 +188,93 @@ class GenericTest:
support.unlink(support.TESTFN)
safe_rmdir(support.TESTFN)
+ @staticmethod
+ def _create_file(filename):
+ with open(filename, 'wb') as f:
+ f.write(b'foo')
+
+ def test_samefile(self):
+ try:
+ test_fn = support.TESTFN + "1"
+ self._create_file(test_fn)
+ self.assertTrue(self.pathmodule.samefile(test_fn, test_fn))
+ self.assertRaises(TypeError, self.pathmodule.samefile)
+ finally:
+ os.remove(test_fn)
+
+ @support.skip_unless_symlink
+ def test_samefile_on_symlink(self):
+ self._test_samefile_on_link_func(os.symlink)
+
+ def test_samefile_on_link(self):
+ self._test_samefile_on_link_func(os.link)
+
+ def _test_samefile_on_link_func(self, func):
+ try:
+ test_fn1 = support.TESTFN + "1"
+ test_fn2 = support.TESTFN + "2"
+ self._create_file(test_fn1)
+
+ func(test_fn1, test_fn2)
+ self.assertTrue(self.pathmodule.samefile(test_fn1, test_fn2))
+ os.remove(test_fn2)
+
+ self._create_file(test_fn2)
+ self.assertFalse(self.pathmodule.samefile(test_fn1, test_fn2))
+ finally:
+ os.remove(test_fn1)
+ os.remove(test_fn2)
+
+ def test_samestat(self):
+ try:
+ test_fn = support.TESTFN + "1"
+ self._create_file(test_fn)
+ test_fns = [test_fn]*2
+ stats = map(os.stat, test_fns)
+ self.assertTrue(self.pathmodule.samestat(*stats))
+ finally:
+ os.remove(test_fn)
+
+ @support.skip_unless_symlink
+ def test_samestat_on_symlink(self):
+ self._test_samestat_on_link_func(os.symlink)
+
+ def test_samestat_on_link(self):
+ self._test_samestat_on_link_func(os.link)
+
+ def _test_samestat_on_link_func(self, func):
+ try:
+ test_fn1 = support.TESTFN + "1"
+ test_fn2 = support.TESTFN + "2"
+ self._create_file(test_fn1)
+ test_fns = (test_fn1, test_fn2)
+ func(*test_fns)
+ stats = map(os.stat, test_fns)
+ self.assertTrue(self.pathmodule.samestat(*stats))
+ os.remove(test_fn2)
+
+ self._create_file(test_fn2)
+ stats = map(os.stat, test_fns)
+ self.assertFalse(self.pathmodule.samestat(*stats))
+
+ self.assertRaises(TypeError, self.pathmodule.samestat)
+ finally:
+ os.remove(test_fn1)
+ os.remove(test_fn2)
+
+ def test_sameopenfile(self):
+ fname = support.TESTFN + "1"
+ with open(fname, "wb") as a, open(fname, "wb") as b:
+ self.assertTrue(self.pathmodule.sameopenfile(
+ a.fileno(), b.fileno()))
+
class TestGenericTest(GenericTest, unittest.TestCase):
# Issue 16852: GenericTest can't inherit from unittest.TestCase
# for test discovery purposes; CommonTest inherits from GenericTest
# and is only meant to be inherited by others.
pathmodule = genericpath
+
# Following TestCase is not supposed to be run from test_genericpath.
# It is inherited by other test modules (macpath, ntpath, posixpath).
@@ -322,7 +403,6 @@ class CommonTest(GenericTest):
else:
self.skipTest("need support.TESTFN_NONASCII")
- # Test non-ASCII, non-UTF8 bytes in the path.
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with support.temp_cwd(name):
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index 32f85e9..54201a1 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -36,7 +36,10 @@ def hexstr(s):
class HashLibTestCase(unittest.TestCase):
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
'sha224', 'SHA224', 'sha256', 'SHA256',
- 'sha384', 'SHA384', 'sha512', 'SHA512' )
+ 'sha384', 'SHA384', 'sha512', 'SHA512',
+ 'sha3_224', 'sha3_256', 'sha3_384',
+ 'sha3_512', 'SHA3_224', 'SHA3_256',
+ 'SHA3_384', 'SHA3_512' )
# Issue #14693: fallback modules are always compiled under POSIX
_warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG
@@ -93,6 +96,12 @@ class HashLibTestCase(unittest.TestCase):
if _sha512:
self.constructors_to_test['sha384'].add(_sha512.sha384)
self.constructors_to_test['sha512'].add(_sha512.sha512)
+ _sha3 = self._conditional_import_module('_sha3')
+ if _sha3:
+ self.constructors_to_test['sha3_224'].add(_sha3.sha3_224)
+ self.constructors_to_test['sha3_256'].add(_sha3.sha3_256)
+ self.constructors_to_test['sha3_384'].add(_sha3.sha3_384)
+ self.constructors_to_test['sha3_512'].add(_sha3.sha3_512)
super(HashLibTestCase, self).__init__(*args, **kwargs)
@@ -158,6 +167,7 @@ class HashLibTestCase(unittest.TestCase):
self.assertEqual(m1.digest(), m2.digest())
def check(self, name, data, digest):
+ digest = digest.lower()
constructors = self.constructors_to_test[name]
# 2 is for hashlib.name(...) and hashlib.new(name, ...)
self.assertGreaterEqual(len(constructors), 2)
@@ -183,6 +193,10 @@ class HashLibTestCase(unittest.TestCase):
self.check_no_unicode('sha256')
self.check_no_unicode('sha384')
self.check_no_unicode('sha512')
+ self.check_no_unicode('sha3_224')
+ self.check_no_unicode('sha3_256')
+ self.check_no_unicode('sha3_384')
+ self.check_no_unicode('sha3_512')
def test_case_md5_0(self):
self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e')
@@ -318,11 +332,122 @@ class HashLibTestCase(unittest.TestCase):
"e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
"de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
+ # SHA-3 family
+ def test_case_sha3_224_0(self):
+ self.check('sha3_224', b"",
+ "F71837502BA8E10837BDD8D365ADB85591895602FC552B48B7390ABD")
+
+ def test_case_sha3_224_1(self):
+ self.check('sha3_224', bytes.fromhex("CC"),
+ "A9CAB59EB40A10B246290F2D6086E32E3689FAF1D26B470C899F2802")
+
+ def test_case_sha3_224_2(self):
+ self.check('sha3_224', bytes.fromhex("41FB"),
+ "615BA367AFDC35AAC397BC7EB5D58D106A734B24986D5D978FEFD62C")
+
+ def test_case_sha3_224_3(self):
+ self.check('sha3_224', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "62B10F1B6236EBC2DA72957742A8D4E48E213B5F8934604BFD4D2C3A")
+
+ @bigmemtest(size=_4G + 5, memuse=1)
+ def test_case_sha3_224_huge(self, size):
+ if size == _4G + 5:
+ try:
+ self.check('sha3_224', b'A'*size,
+ '58ef60057c9dddb6a87477e9ace5a26f0d9db01881cf9b10a9f8c224')
+ except OverflowError:
+ pass # 32-bit arch
+
+
+ def test_case_sha3_256_0(self):
+ self.check('sha3_256', b"",
+ "C5D2460186F7233C927E7DB2DCC703C0E500B653CA82273B7BFAD8045D85A470")
+
+ def test_case_sha3_256_1(self):
+ self.check('sha3_256', bytes.fromhex("CC"),
+ "EEAD6DBFC7340A56CAEDC044696A168870549A6A7F6F56961E84A54BD9970B8A")
+
+ def test_case_sha3_256_2(self):
+ self.check('sha3_256', bytes.fromhex("41FB"),
+ "A8EACEDA4D47B3281A795AD9E1EA2122B407BAF9AABCB9E18B5717B7873537D2")
+
+ def test_case_sha3_256_3(self):
+ self.check('sha3_256', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "CE87A5173BFFD92399221658F801D45C294D9006EE9F3F9D419C8D427748DC41")
+
+
+ def test_case_sha3_384_0(self):
+ self.check('sha3_384', b"",
+ "2C23146A63A29ACF99E73B88F8C24EAA7DC60AA771780CCC006AFBFA8FE2479B"+
+ "2DD2B21362337441AC12B515911957FF")
+
+ def test_case_sha3_384_1(self):
+ self.check('sha3_384', bytes.fromhex("CC"),
+ "1B84E62A46E5A201861754AF5DC95C4A1A69CAF4A796AE405680161E29572641"+
+ "F5FA1E8641D7958336EE7B11C58F73E9")
+
+ def test_case_sha3_384_2(self):
+ self.check('sha3_384', bytes.fromhex("41FB"),
+ "495CCE2714CD72C8C53C3363D22C58B55960FE26BE0BF3BBC7A3316DD563AD1D"+
+ "B8410E75EEFEA655E39D4670EC0B1792")
+
+ def test_case_sha3_384_3(self):
+ self.check('sha3_384', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "135114508DD63E279E709C26F7817C0482766CDE49132E3EDF2EEDD8996F4E35"+
+ "96D184100B384868249F1D8B8FDAA2C9")
+
+
+ def test_case_sha3_512_0(self):
+ self.check('sha3_512', b"",
+ "0EAB42DE4C3CEB9235FC91ACFFE746B29C29A8C366B7C60E4E67C466F36A4304"+
+ "C00FA9CAF9D87976BA469BCBE06713B435F091EF2769FB160CDAB33D3670680E")
+
+ def test_case_sha3_512_1(self):
+ self.check('sha3_512', bytes.fromhex("CC"),
+ "8630C13CBD066EA74BBE7FE468FEC1DEE10EDC1254FB4C1B7C5FD69B646E4416"+
+ "0B8CE01D05A0908CA790DFB080F4B513BC3B6225ECE7A810371441A5AC666EB9")
+
+ def test_case_sha3_512_2(self):
+ self.check('sha3_512', bytes.fromhex("41FB"),
+ "551DA6236F8B96FCE9F97F1190E901324F0B45E06DBBB5CDB8355D6ED1DC34B3"+
+ "F0EAE7DCB68622FF232FA3CECE0D4616CDEB3931F93803662A28DF1CD535B731")
+
+ def test_case_sha3_512_3(self):
+ self.check('sha3_512', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "527D28E341E6B14F4684ADB4B824C496C6482E51149565D3D17226828884306B"+
+ "51D6148A72622C2B75F5D3510B799D8BDC03EAEDE453676A6EC8FE03A1AD0EAB")
+
+
def test_gil(self):
# Check things work fine with an input larger than the size required
# for multithreaded operation (which is hardwired to 2048).
gil_minsize = 2048
+ for name in self.supported_hash_names:
+ m = hashlib.new(name)
+ m.update(b'1')
+ m.update(b'#' * gil_minsize)
+ m.update(b'1')
+
+ m = hashlib.new(name, b'x' * gil_minsize)
+ m.update(b'1')
+
m = hashlib.md5()
m.update(b'1')
m.update(b'#' * gil_minsize)
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index db123dc..a2f141e 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -27,8 +27,10 @@ class FakeSocket:
self.text = text
self.fileclass = fileclass
self.data = b''
+ self.sendall_calls = 0
def sendall(self, data):
+ self.sendall_calls += 1
self.data += data
def makefile(self, mode, bufsize=None):
@@ -45,7 +47,7 @@ class EPipeSocket(FakeSocket):
def sendall(self, data):
if self.pipe_trigger in data:
- raise socket.error(errno.EPIPE, "gotcha")
+ raise OSError(errno.EPIPE, "gotcha")
self.data += data
def close(self):
@@ -567,7 +569,7 @@ class BasicTest(TestCase):
b"Content-Length")
conn = client.HTTPConnection("example.com")
conn.sock = sock
- self.assertRaises(socket.error,
+ self.assertRaises(OSError,
lambda: conn.request("PUT", "/url", "body"))
resp = conn.getresponse()
self.assertEqual(401, resp.status)
@@ -613,6 +615,28 @@ class BasicTest(TestCase):
resp.close()
self.assertTrue(resp.closed)
+ def test_delayed_ack_opt(self):
+ # Test that Nagle/delayed_ack optimistaion works correctly.
+
+ # For small payloads, it should coalesce the body with
+ # headers, resulting in a single sendall() call
+ conn = client.HTTPConnection('example.com')
+ sock = FakeSocket(None)
+ conn.sock = sock
+ body = b'x' * (conn.mss - 1)
+ conn.request('POST', '/', body)
+ self.assertEqual(sock.sendall_calls, 1)
+
+ # For large payloads, it should send the headers and
+ # then the body, resulting in more than one sendall()
+ # call
+ conn = client.HTTPConnection('example.com')
+ sock = FakeSocket(None)
+ conn.sock = sock
+ body = b'x' * conn.mss
+ conn.request('POST', '/', body)
+ self.assertGreater(sock.sendall_calls, 1)
+
class OfflineTest(TestCase):
def test_responses(self):
self.assertEqual(client.responses[client.NOT_FOUND], "Not Found")
@@ -703,7 +727,7 @@ class HTTPSTest(TestCase):
def make_server(self, certfile):
from test.ssl_servers import make_https_server
- return make_https_server(self, certfile)
+ return make_https_server(self, certfile=certfile)
def test_attributes(self):
# simple test to check it's storing the timeout
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
index 03c0776..ec751cc 100644
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -92,6 +92,13 @@ class BaseHTTPServerTestCase(BaseTestCase):
def do_KEYERROR(self):
self.send_error(999)
+ def do_NOTFOUND(self):
+ self.send_error(404)
+
+ def do_EXPLAINERROR(self):
+ self.send_error(999, "Short Message",
+ "This is a long \n explaination")
+
def do_CUSTOM(self):
self.send_response(999)
self.send_header('Content-Type', 'text/html')
@@ -203,6 +210,12 @@ class BaseHTTPServerTestCase(BaseTestCase):
res = self.con.getresponse()
self.assertEqual(res.status, 999)
+ def test_return_explain_error(self):
+ self.con.request('EXPLAINERROR', '/')
+ res = self.con.getresponse()
+ self.assertEqual(res.status, 999)
+ self.assertTrue(int(res.getheader('Content-Length')))
+
def test_latin1_header(self):
self.con.request('LATINONEHEADER', '/', headers={
'X-Special-Incoming': 'Ärger mit Unicode'
@@ -211,6 +224,14 @@ class BaseHTTPServerTestCase(BaseTestCase):
self.assertEqual(res.getheader('X-Special'), 'Dängerous Mind')
self.assertEqual(res.read(), 'Ärger mit Unicode'.encode('utf-8'))
+ def test_error_content_length(self):
+ # Issue #16088: standard error responses should have a content-length
+ self.con.request('NOTFOUND', '/')
+ res = self.con.getresponse()
+ self.assertEqual(res.status, 404)
+ data = res.read()
+ self.assertEqual(int(res.getheader('Content-Length')), len(data))
+
class SimpleHTTPServerTestCase(BaseTestCase):
class request_handler(NoLogRequestHandler, SimpleHTTPRequestHandler):
diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py
index 7db3f7d..c37ea1d 100644
--- a/Lib/test/test_imaplib.py
+++ b/Lib/test/test_imaplib.py
@@ -125,7 +125,7 @@ class SimpleIMAPHandler(socketserver.StreamRequestHandler):
# Naked sockets return empty strings..
return
line += part
- except IOError:
+ except OSError:
# ..but SSLSockets raise exceptions.
return
if line.endswith(b'\r\n'):
diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py
index 72ae145..2d75724 100644
--- a/Lib/test/test_imp.py
+++ b/Lib/test/test_imp.py
@@ -219,6 +219,20 @@ class ImportTests(unittest.TestCase):
mod = imp.load_module(example, *x)
self.assertEqual(mod.__name__, example)
+ def test_issue16421_multiple_modules_in_one_dll(self):
+ # Issue 16421: loading several modules from the same compiled file fails
+ m = '_testimportmultiple'
+ fileobj, pathname, description = imp.find_module(m)
+ fileobj.close()
+ mod0 = imp.load_dynamic(m, pathname)
+ mod1 = imp.load_dynamic('_testimportmultiple_foo', pathname)
+ mod2 = imp.load_dynamic('_testimportmultiple_bar', pathname)
+ self.assertEqual(mod0.__name__, m)
+ self.assertEqual(mod1.__name__, '_testimportmultiple_foo')
+ self.assertEqual(mod2.__name__, '_testimportmultiple_bar')
+ with self.assertRaises(ImportError):
+ imp.load_dynamic('nonexistent', pathname)
+
def test_load_dynamic_ImportError_path(self):
# Issue #1559549 added `name` and `path` attributes to ImportError
# in order to provide better detail. Issue #10854 implemented those
diff --git a/Lib/test/test_importlib/import_/test___loader__.py b/Lib/test/test_importlib/import_/test___loader__.py
new file mode 100644
index 0000000..3e0d3dd
--- /dev/null
+++ b/Lib/test/test_importlib/import_/test___loader__.py
@@ -0,0 +1,44 @@
+import imp
+import sys
+import unittest
+
+from .. import util
+from . import util as import_util
+
+
+class LoaderMock:
+
+ def find_module(self, fullname, path=None):
+ return self
+
+ def load_module(self, fullname):
+ sys.modules[fullname] = self.module
+ return self.module
+
+
+class LoaderAttributeTests(unittest.TestCase):
+
+ def test___loader___missing(self):
+ module = imp.new_module('blah')
+ try:
+ del module.__loader__
+ except AttributeError:
+ pass
+ loader = LoaderMock()
+ loader.module = module
+ with util.uncache('blah'), util.import_state(meta_path=[loader]):
+ module = import_util.import_('blah')
+ self.assertEqual(loader, module.__loader__)
+
+ def test___loader___is_None(self):
+ module = imp.new_module('blah')
+ module.__loader__ = None
+ loader = LoaderMock()
+ loader.module = module
+ with util.uncache('blah'), util.import_state(meta_path=[loader]):
+ returned_module = import_util.import_('blah')
+ self.assertEqual(loader, module.__loader__)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Lib/test/test_importlib/source/test_abc_loader.py b/Lib/test/test_importlib/source/test_abc_loader.py
index 0d912b6..718a548 100644
--- a/Lib/test/test_importlib/source/test_abc_loader.py
+++ b/Lib/test/test_importlib/source/test_abc_loader.py
@@ -59,7 +59,7 @@ class SourceLoaderMock(SourceOnlyLoaderMock):
elif path == self.bytecode_path:
return self.bytecode
else:
- raise IOError
+ raise OSError
def path_stats(self, path):
assert path == self.path
@@ -70,483 +70,9 @@ class SourceLoaderMock(SourceOnlyLoaderMock):
return path == self.bytecode_path
-class PyLoaderMock(abc.PyLoader):
-
- # Globals that should be defined for all modules.
- source = (b"_ = '::'.join([__name__, __file__, __package__, "
- b"repr(__loader__)])")
-
- def __init__(self, data):
- """Take a dict of 'module_name: path' pairings.
-
- Paths should have no file extension, allowing packages to be denoted by
- ending in '__init__'.
-
- """
- self.module_paths = data
- self.path_to_module = {val:key for key,val in data.items()}
-
- def get_data(self, path):
- if path not in self.path_to_module:
- raise IOError
- return self.source
-
- def is_package(self, name):
- filename = os.path.basename(self.get_filename(name))
- return os.path.splitext(filename)[0] == '__init__'
-
- def source_path(self, name):
- try:
- return self.module_paths[name]
- except KeyError:
- raise ImportError
-
- def get_filename(self, name):
- """Silence deprecation warning."""
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always")
- path = super().get_filename(name)
- assert len(w) == 1
- assert issubclass(w[0].category, DeprecationWarning)
- return path
-
- def module_repr(self):
- return '<module>'
-
-
-class PyLoaderCompatMock(PyLoaderMock):
-
- """Mock that matches what is suggested to have a loader that is compatible
- from Python 3.1 onwards."""
-
- def get_filename(self, fullname):
- try:
- return self.module_paths[fullname]
- except KeyError:
- raise ImportError
-
- def source_path(self, fullname):
- try:
- return self.get_filename(fullname)
- except ImportError:
- return None
-
-
-class PyPycLoaderMock(abc.PyPycLoader, PyLoaderMock):
-
- default_mtime = 1
-
- def __init__(self, source, bc={}):
- """Initialize mock.
-
- 'bc' is a dict keyed on a module's name. The value is dict with
- possible keys of 'path', 'mtime', 'magic', and 'bc'. Except for 'path',
- each of those keys control if any part of created bytecode is to
- deviate from default values.
-
- """
- super().__init__(source)
- self.module_bytecode = {}
- self.path_to_bytecode = {}
- self.bytecode_to_path = {}
- for name, data in bc.items():
- self.path_to_bytecode[data['path']] = name
- self.bytecode_to_path[name] = data['path']
- magic = data.get('magic', imp.get_magic())
- mtime = importlib._w_long(data.get('mtime', self.default_mtime))
- source_size = importlib._w_long(len(self.source) & 0xFFFFFFFF)
- if 'bc' in data:
- bc = data['bc']
- else:
- bc = self.compile_bc(name)
- self.module_bytecode[name] = magic + mtime + source_size + bc
-
- def compile_bc(self, name):
- source_path = self.module_paths.get(name, '<test>') or '<test>'
- code = compile(self.source, source_path, 'exec')
- return marshal.dumps(code)
-
- def source_mtime(self, name):
- if name in self.module_paths:
- return self.default_mtime
- elif name in self.module_bytecode:
- return None
- else:
- raise ImportError
-
- def bytecode_path(self, name):
- try:
- return self.bytecode_to_path[name]
- except KeyError:
- if name in self.module_paths:
- return None
- else:
- raise ImportError
-
- def write_bytecode(self, name, bytecode):
- self.module_bytecode[name] = bytecode
- return True
-
- def get_data(self, path):
- if path in self.path_to_module:
- return super().get_data(path)
- elif path in self.path_to_bytecode:
- name = self.path_to_bytecode[path]
- return self.module_bytecode[name]
- else:
- raise IOError
-
- def is_package(self, name):
- try:
- return super().is_package(name)
- except TypeError:
- return '__init__' in self.bytecode_to_path[name]
-
- def get_code(self, name):
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always")
- code_object = super().get_code(name)
- assert len(w) == 1
- assert issubclass(w[0].category, DeprecationWarning)
- return code_object
-
-class PyLoaderTests(testing_abc.LoaderTests):
-
- """Tests for importlib.abc.PyLoader."""
-
- mocker = PyLoaderMock
-
- def eq_attrs(self, ob, **kwargs):
- for attr, val in kwargs.items():
- found = getattr(ob, attr)
- self.assertEqual(found, val,
- "{} attribute: {} != {}".format(attr, found, val))
-
- def test_module(self):
- name = '<module>'
- path = os.path.join('', 'path', 'to', 'module')
- mock = self.mocker({name: path})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertIn(name, sys.modules)
- self.eq_attrs(module, __name__=name, __file__=path, __package__='',
- __loader__=mock)
- self.assertTrue(not hasattr(module, '__path__'))
- return mock, name
-
- def test_package(self):
- name = '<pkg>'
- path = os.path.join('path', 'to', name, '__init__')
- mock = self.mocker({name: path})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertIn(name, sys.modules)
- self.eq_attrs(module, __name__=name, __file__=path,
- __path__=[os.path.dirname(path)], __package__=name,
- __loader__=mock)
- return mock, name
-
- def test_lacking_parent(self):
- name = 'pkg.mod'
- path = os.path.join('path', 'to', 'pkg', 'mod')
- mock = self.mocker({name: path})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertIn(name, sys.modules)
- self.eq_attrs(module, __name__=name, __file__=path, __package__='pkg',
- __loader__=mock)
- self.assertFalse(hasattr(module, '__path__'))
- return mock, name
-
- def test_module_reuse(self):
- name = 'mod'
- path = os.path.join('path', 'to', 'mod')
- module = imp.new_module(name)
- mock = self.mocker({name: path})
- with util.uncache(name):
- sys.modules[name] = module
- loaded_module = mock.load_module(name)
- self.assertIs(loaded_module, module)
- self.assertIs(sys.modules[name], module)
- return mock, name
-
- def test_state_after_failure(self):
- name = "mod"
- module = imp.new_module(name)
- module.blah = None
- mock = self.mocker({name: os.path.join('path', 'to', 'mod')})
- mock.source = b"1/0"
- with util.uncache(name):
- sys.modules[name] = module
- with self.assertRaises(ZeroDivisionError):
- mock.load_module(name)
- self.assertIs(sys.modules[name], module)
- self.assertTrue(hasattr(module, 'blah'))
- return mock
-
- def test_unloadable(self):
- name = "mod"
- mock = self.mocker({name: os.path.join('path', 'to', 'mod')})
- mock.source = b"1/0"
- with util.uncache(name):
- with self.assertRaises(ZeroDivisionError):
- mock.load_module(name)
- self.assertNotIn(name, sys.modules)
- return mock
-
-
-class PyLoaderCompatTests(PyLoaderTests):
-
- """Test that the suggested code to make a loader that is compatible from
- Python 3.1 forward works."""
-
- mocker = PyLoaderCompatMock
-
-
-class PyLoaderInterfaceTests(unittest.TestCase):
-
- """Tests for importlib.abc.PyLoader to make sure that when source_path()
- doesn't return a path everything works as expected."""
-
- def test_no_source_path(self):
- # No source path should lead to ImportError.
- name = 'mod'
- mock = PyLoaderMock({})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.load_module(name)
-
- def test_source_path_is_None(self):
- name = 'mod'
- mock = PyLoaderMock({name: None})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.load_module(name)
-
- def test_get_filename_with_source_path(self):
- # get_filename() should return what source_path() returns.
- name = 'mod'
- path = os.path.join('path', 'to', 'source')
- mock = PyLoaderMock({name: path})
- with util.uncache(name):
- self.assertEqual(mock.get_filename(name), path)
-
- def test_get_filename_no_source_path(self):
- # get_filename() should raise ImportError if source_path returns None.
- name = 'mod'
- mock = PyLoaderMock({name: None})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.get_filename(name)
-
-
-class PyPycLoaderTests(PyLoaderTests):
-
- """Tests for importlib.abc.PyPycLoader."""
-
- mocker = PyPycLoaderMock
-
- @source_util.writes_bytecode_files
- def verify_bytecode(self, mock, name):
- assert name in mock.module_paths
- self.assertIn(name, mock.module_bytecode)
- magic = mock.module_bytecode[name][:4]
- self.assertEqual(magic, imp.get_magic())
- mtime = importlib._r_long(mock.module_bytecode[name][4:8])
- self.assertEqual(mtime, 1)
- source_size = mock.module_bytecode[name][8:12]
- self.assertEqual(len(mock.source) & 0xFFFFFFFF,
- importlib._r_long(source_size))
- bc = mock.module_bytecode[name][12:]
- self.assertEqual(bc, mock.compile_bc(name))
-
- def test_module(self):
- mock, name = super().test_module()
- self.verify_bytecode(mock, name)
-
- def test_package(self):
- mock, name = super().test_package()
- self.verify_bytecode(mock, name)
-
- def test_lacking_parent(self):
- mock, name = super().test_lacking_parent()
- self.verify_bytecode(mock, name)
-
- def test_module_reuse(self):
- mock, name = super().test_module_reuse()
- self.verify_bytecode(mock, name)
-
- def test_state_after_failure(self):
- super().test_state_after_failure()
-
- def test_unloadable(self):
- super().test_unloadable()
-
-
-class PyPycLoaderInterfaceTests(unittest.TestCase):
-
- """Test for the interface of importlib.abc.PyPycLoader."""
-
- def get_filename_check(self, src_path, bc_path, expect):
- name = 'mod'
- mock = PyPycLoaderMock({name: src_path}, {name: {'path': bc_path}})
- with util.uncache(name):
- assert mock.source_path(name) == src_path
- assert mock.bytecode_path(name) == bc_path
- self.assertEqual(mock.get_filename(name), expect)
-
- def test_filename_with_source_bc(self):
- # When source and bytecode paths present, return the source path.
- self.get_filename_check('source_path', 'bc_path', 'source_path')
-
- def test_filename_with_source_no_bc(self):
- # With source but no bc, return source path.
- self.get_filename_check('source_path', None, 'source_path')
-
- def test_filename_with_no_source_bc(self):
- # With not source but bc, return the bc path.
- self.get_filename_check(None, 'bc_path', 'bc_path')
-
- def test_filename_with_no_source_or_bc(self):
- # With no source or bc, raise ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({name: None}, {name: {'path': None}})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.get_filename(name)
-
-
-class SkipWritingBytecodeTests(unittest.TestCase):
-
- """Test that bytecode is properly handled based on
- sys.dont_write_bytecode."""
-
- @source_util.writes_bytecode_files
- def run_test(self, dont_write_bytecode):
- name = 'mod'
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')})
- sys.dont_write_bytecode = dont_write_bytecode
- with util.uncache(name):
- mock.load_module(name)
- self.assertIsNot(name in mock.module_bytecode, dont_write_bytecode)
-
- def test_no_bytecode_written(self):
- self.run_test(True)
-
- def test_bytecode_written(self):
- self.run_test(False)
-
-
-class RegeneratedBytecodeTests(unittest.TestCase):
-
- """Test that bytecode is regenerated as expected."""
-
- @source_util.writes_bytecode_files
- def test_different_magic(self):
- # A different magic number should lead to new bytecode.
- name = 'mod'
- bad_magic = b'\x00\x00\x00\x00'
- assert bad_magic != imp.get_magic()
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')},
- {name: {'path': os.path.join('path', 'to',
- 'mod.bytecode'),
- 'magic': bad_magic}})
- with util.uncache(name):
- mock.load_module(name)
- self.assertIn(name, mock.module_bytecode)
- magic = mock.module_bytecode[name][:4]
- self.assertEqual(magic, imp.get_magic())
-
- @source_util.writes_bytecode_files
- def test_old_mtime(self):
- # Bytecode with an older mtime should be regenerated.
- name = 'mod'
- old_mtime = PyPycLoaderMock.default_mtime - 1
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')},
- {name: {'path': 'path/to/mod.bytecode', 'mtime': old_mtime}})
- with util.uncache(name):
- mock.load_module(name)
- self.assertIn(name, mock.module_bytecode)
- mtime = importlib._r_long(mock.module_bytecode[name][4:8])
- self.assertEqual(mtime, PyPycLoaderMock.default_mtime)
-
-
-class BadBytecodeFailureTests(unittest.TestCase):
-
- """Test import failures when there is no source and parts of the bytecode
- is bad."""
-
- def test_bad_magic(self):
- # A bad magic number should lead to an ImportError.
- name = 'mod'
- bad_magic = b'\x00\x00\x00\x00'
- bc = {name:
- {'path': os.path.join('path', 'to', 'mod'),
- 'magic': bad_magic}}
- mock = PyPycLoaderMock({name: None}, bc)
- with util.uncache(name), self.assertRaises(ImportError) as cm:
- mock.load_module(name)
- self.assertEqual(cm.exception.name, name)
-
- def test_no_bytecode(self):
- # Missing code object bytecode should lead to an EOFError.
- name = 'mod'
- bc = {name: {'path': os.path.join('path', 'to', 'mod'), 'bc': b''}}
- mock = PyPycLoaderMock({name: None}, bc)
- with util.uncache(name), self.assertRaises(EOFError):
- mock.load_module(name)
-
- def test_bad_bytecode(self):
- # Malformed code object bytecode should lead to a ValueError.
- name = 'mod'
- bc = {name: {'path': os.path.join('path', 'to', 'mod'), 'bc': b'1234'}}
- mock = PyPycLoaderMock({name: None}, bc)
- with util.uncache(name), self.assertRaises(ValueError):
- mock.load_module(name)
-
-
def raise_ImportError(*args, **kwargs):
raise ImportError
-class MissingPathsTests(unittest.TestCase):
-
- """Test what happens when a source or bytecode path does not exist (either
- from *_path returning None or raising ImportError)."""
-
- def test_source_path_None(self):
- # Bytecode should be used when source_path returns None, along with
- # __file__ being set to the bytecode path.
- name = 'mod'
- bytecode_path = 'path/to/mod'
- mock = PyPycLoaderMock({name: None}, {name: {'path': bytecode_path}})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertEqual(module.__file__, bytecode_path)
-
- # Testing for bytecode_path returning None handled by all tests where no
- # bytecode initially exists.
-
- def test_all_paths_None(self):
- # If all *_path methods return None, raise ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({name: None})
- with util.uncache(name), self.assertRaises(ImportError) as cm:
- mock.load_module(name)
- self.assertEqual(cm.exception.name, name)
-
- def test_source_path_ImportError(self):
- # An ImportError from source_path should trigger an ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({}, {name: {'path': os.path.join('path', 'to',
- 'mod')}})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.load_module(name)
-
- def test_bytecode_path_ImportError(self):
- # An ImportError from bytecode_path should trigger an ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')})
- bad_meth = types.MethodType(raise_ImportError, mock)
- mock.bytecode_path = bad_meth
- with util.uncache(name), self.assertRaises(ImportError) as cm:
- mock.load_module(name)
-
class SourceLoaderTestHarness(unittest.TestCase):
@@ -599,12 +125,12 @@ class SourceOnlyLoaderTests(SourceLoaderTestHarness):
def test_get_source(self):
# Verify the source code is returned as a string.
- # If an IOError is raised by get_data then raise ImportError.
+ # If an OSError is raised by get_data then raise ImportError.
expected_source = self.loader.source.decode('utf-8')
self.assertEqual(self.loader.get_source(self.name), expected_source)
- def raise_IOError(path):
- raise IOError
- self.loader.get_data = raise_IOError
+ def raise_OSError(path):
+ raise OSError
+ self.loader.get_data = raise_OSError
with self.assertRaises(ImportError) as cm:
self.loader.get_source(self.name)
self.assertEqual(cm.exception.name, self.name)
@@ -622,6 +148,11 @@ class SourceOnlyLoaderTests(SourceLoaderTestHarness):
code_object = self.loader.get_code(self.name)
self.verify_code(code_object)
+ def test_source_to_code(self):
+ # Verify the compiled code object.
+ code = self.loader.source_to_code(self.loader.source, self.path)
+ self.verify_code(code)
+
def test_load_module(self):
# Loading a module should set __name__, __loader__, __package__,
# __path__ (for packages), __file__, and __cached__.
@@ -685,7 +216,7 @@ class SourceLoaderBytecodeTests(SourceLoaderTestHarness):
# If no bytecode exists then move on to the source.
self.loader.bytecode_path = "<does not exist>"
# Sanity check
- with self.assertRaises(IOError):
+ with self.assertRaises(OSError):
bytecode_path = imp.cache_from_source(self.path)
self.loader.get_data(bytecode_path)
code_object = self.loader.get_code(self.name)
@@ -734,7 +265,7 @@ class SourceLoaderBytecodeTests(SourceLoaderTestHarness):
self.loader.__class__.set_data = original_set_data
def test_set_data_raises_exceptions(self):
- # Raising NotImplementedError or IOError is okay for set_data.
+ # Raising NotImplementedError or OSError is okay for set_data.
def raise_exception(exc):
def closure(*args, **kwargs):
raise exc
@@ -801,6 +332,7 @@ class AbstractMethodImplTests(unittest.TestCase):
class Loader(abc.Loader):
def load_module(self, fullname):
super().load_module(fullname)
+
def module_repr(self, module):
super().module_repr(module)
@@ -825,20 +357,6 @@ class AbstractMethodImplTests(unittest.TestCase):
class SourceLoader(ResourceLoader, ExecutionLoader, abc.SourceLoader):
pass
- class PyLoader(ResourceLoader, InspectLoader, abc.PyLoader):
- def source_path(self, _):
- super().source_path(_)
-
- class PyPycLoader(PyLoader, abc.PyPycLoader):
- def bytecode_path(self, _):
- super().bytecode_path(_)
-
- def source_mtime(self, _):
- super().source_mtime(_)
-
- def write_bytecode(self, _, _2):
- super().write_bytecode(_, _2)
-
def raises_NotImplementedError(self, ins, *args):
for method_name in args:
method = getattr(ins, method_name)
@@ -877,29 +395,15 @@ class AbstractMethodImplTests(unittest.TestCase):
# Required abstractmethods.
self.raises_NotImplementedError(ins, 'get_filename', 'get_data')
# Optional abstractmethods.
- self.raises_NotImplementedError(ins,'path_stats', 'set_data')
-
- def test_PyLoader(self):
- self.raises_NotImplementedError(self.PyLoader(), 'source_path',
- 'get_data', 'is_package')
-
- def test_PyPycLoader(self):
- self.raises_NotImplementedError(self.PyPycLoader(), 'source_path',
- 'source_mtime', 'bytecode_path',
- 'write_bytecode')
+ self.raises_NotImplementedError(ins, 'path_stats', 'set_data')
def test_main():
from test.support import run_unittest
- run_unittest(PyLoaderTests, PyLoaderCompatTests,
- PyLoaderInterfaceTests,
- PyPycLoaderTests, PyPycLoaderInterfaceTests,
- SkipWritingBytecodeTests, RegeneratedBytecodeTests,
- BadBytecodeFailureTests, MissingPathsTests,
- SourceOnlyLoaderTests,
- SourceLoaderBytecodeTests,
- SourceLoaderGetSourceTests,
- AbstractMethodImplTests)
+ run_unittest(SourceOnlyLoaderTests,
+ SourceLoaderBytecodeTests,
+ SourceLoaderGetSourceTests,
+ AbstractMethodImplTests)
if __name__ == '__main__':
diff --git a/Lib/test/test_importlib/source/test_file_loader.py b/Lib/test/test_importlib/source/test_file_loader.py
index d59969a..80999e8 100644
--- a/Lib/test/test_importlib/source/test_file_loader.py
+++ b/Lib/test/test_importlib/source/test_file_loader.py
@@ -408,7 +408,7 @@ class SourceLoaderBadBytecodeTest(BadBytecodeTest):
os.chmod(bytecode_path,
stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
try:
- # Should not raise IOError!
+ # Should not raise OSError!
self.import_(mapping['_temp'], '_temp')
finally:
# Make writable for eventual clean-up.
diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py
index c620c37..a8d8c2e 100644
--- a/Lib/test/test_importlib/test_abc.py
+++ b/Lib/test/test_importlib/test_abc.py
@@ -43,11 +43,6 @@ class PathEntryFinder(InheritanceTests, unittest.TestCase):
subclasses = [machinery.FileFinder]
-class Loader(InheritanceTests, unittest.TestCase):
-
- subclasses = [abc.PyLoader]
-
-
class ResourceLoader(InheritanceTests, unittest.TestCase):
superclasses = [abc.Loader]
@@ -56,14 +51,13 @@ class ResourceLoader(InheritanceTests, unittest.TestCase):
class InspectLoader(InheritanceTests, unittest.TestCase):
superclasses = [abc.Loader]
- subclasses = [abc.PyLoader, machinery.BuiltinImporter,
+ subclasses = [machinery.BuiltinImporter,
machinery.FrozenImporter, machinery.ExtensionFileLoader]
class ExecutionLoader(InheritanceTests, unittest.TestCase):
superclasses = [abc.InspectLoader]
- subclasses = [abc.PyLoader]
class FileLoader(InheritanceTests, unittest.TestCase):
@@ -78,16 +72,6 @@ class SourceLoader(InheritanceTests, unittest.TestCase):
subclasses = [machinery.SourceFileLoader]
-class PyLoader(InheritanceTests, unittest.TestCase):
-
- superclasses = [abc.Loader, abc.ResourceLoader, abc.ExecutionLoader]
-
-
-class PyPycLoader(InheritanceTests, unittest.TestCase):
-
- superclasses = [abc.PyLoader]
-
-
def test_main():
from test.support import run_unittest
classes = []
diff --git a/Lib/test/test_importlib/test_api.py b/Lib/test/test_importlib/test_api.py
index b1a5894..ac88b2b 100644
--- a/Lib/test/test_importlib/test_api.py
+++ b/Lib/test/test_importlib/test_api.py
@@ -1,6 +1,7 @@
from . import util
import imp
import importlib
+from importlib import _bootstrap
from importlib import machinery
import sys
from test import support
@@ -115,6 +116,20 @@ class FindLoaderTests(unittest.TestCase):
with self.assertRaises(ValueError):
importlib.find_loader(name)
+ def test_sys_modules_loader_is_not_set(self):
+ # Should raise ValueError
+ # Issue #17099
+ name = 'some_mod'
+ with util.uncache(name):
+ module = imp.new_module(name)
+ try:
+ del module.__loader__
+ except AttributeError:
+ pass
+ sys.modules[name] = module
+ with self.assertRaises(ValueError):
+ importlib.find_loader(name)
+
def test_success(self):
# Return the loader found on sys.meta_path.
name = 'some_mod'
@@ -183,20 +198,14 @@ class StartupTests(unittest.TestCase):
for name, module in sys.modules.items():
if isinstance(module, types.ModuleType):
if name in sys.builtin_module_names:
- self.assertEqual(importlib.machinery.BuiltinImporter,
- module.__loader__)
+ self.assertIn(module.__loader__,
+ (importlib.machinery.BuiltinImporter,
+ importlib._bootstrap.BuiltinImporter))
elif imp.is_frozen(name):
- self.assertEqual(importlib.machinery.FrozenImporter,
- module.__loader__)
-
-def test_main():
- from test.support import run_unittest
- run_unittest(ImportModuleTests,
- FindLoaderTests,
- InvalidateCacheTests,
- FrozenImportlibTests,
- StartupTests)
+ self.assertIn(module.__loader__,
+ (importlib.machinery.FrozenImporter,
+ importlib._bootstrap.FrozenImporter))
if __name__ == '__main__':
- test_main()
+ unittest.main()
diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py
index efc8977..5f08719 100644
--- a/Lib/test/test_importlib/test_util.py
+++ b/Lib/test/test_importlib/test_util.py
@@ -162,6 +162,37 @@ class SetPackageTests(unittest.TestCase):
self.assertEqual(wrapped.__qualname__, fxn.__qualname__)
+class SetLoaderTests(unittest.TestCase):
+
+ """Tests importlib.util.set_loader()."""
+
+ class DummyLoader:
+ @util.set_loader
+ def load_module(self, module):
+ return self.module
+
+ def test_no_attribute(self):
+ loader = self.DummyLoader()
+ loader.module = imp.new_module('blah')
+ try:
+ del loader.module.__loader__
+ except AttributeError:
+ pass
+ self.assertEqual(loader, loader.load_module('blah').__loader__)
+
+ def test_attribute_is_None(self):
+ loader = self.DummyLoader()
+ loader.module = imp.new_module('blah')
+ loader.module.__loader__ = None
+ self.assertEqual(loader, loader.load_module('blah').__loader__)
+
+ def test_not_reset(self):
+ loader = self.DummyLoader()
+ loader.module = imp.new_module('blah')
+ loader.module.__loader__ = 42
+ self.assertEqual(42, loader.load_module('blah').__loader__)
+
+
class ResolveNameTests(unittest.TestCase):
"""Tests importlib.util.resolve_name()."""
@@ -195,14 +226,5 @@ class ResolveNameTests(unittest.TestCase):
util.resolve_name('..bacon', 'spam')
-def test_main():
- from test import support
- support.run_unittest(
- ModuleForLoaderTests,
- SetPackageTests,
- ResolveNameTests
- )
-
-
if __name__ == '__main__':
- test_main()
+ unittest.main()
diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py
index 6e3f04e..66ffe91 100644
--- a/Lib/test/test_inspect.py
+++ b/Lib/test/test_inspect.py
@@ -401,14 +401,14 @@ class TestBuggyCases(GetSourceBase):
unicodedata.__file__[-4:] in (".pyc", ".pyo"),
"unicodedata is not an external binary module")
def test_findsource_binary(self):
- self.assertRaises(IOError, inspect.getsource, unicodedata)
- self.assertRaises(IOError, inspect.findsource, unicodedata)
+ self.assertRaises(OSError, inspect.getsource, unicodedata)
+ self.assertRaises(OSError, inspect.findsource, unicodedata)
def test_findsource_code_in_linecache(self):
lines = ["x=1"]
co = compile(lines[0], "_dynamically_created_file", "exec")
- self.assertRaises(IOError, inspect.findsource, co)
- self.assertRaises(IOError, inspect.getsource, co)
+ self.assertRaises(OSError, inspect.findsource, co)
+ self.assertRaises(OSError, inspect.getsource, co)
linecache.cache[co.co_filename] = (1, None, lines, co.co_filename)
try:
self.assertEqual(inspect.findsource(co), (lines,0))
diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py
index c35a42f..afc9169 100644
--- a/Lib/test/test_int.py
+++ b/Lib/test/test_int.py
@@ -236,6 +236,47 @@ class IntTestCases(unittest.TestCase):
self.assertRaises(TypeError, int, base=10)
self.assertRaises(TypeError, int, base=0)
+ def test_int_base_limits(self):
+ """Testing the supported limits of the int() base parameter."""
+ self.assertEqual(int('0', 5), 0)
+ with self.assertRaises(ValueError):
+ int('0', 1)
+ with self.assertRaises(ValueError):
+ int('0', 37)
+ with self.assertRaises(ValueError):
+ int('0', -909) # An old magic value base from Python 2.
+ with self.assertRaises(ValueError):
+ int('0', base=0-(2**234))
+ with self.assertRaises(ValueError):
+ int('0', base=2**234)
+ # Bases 2 through 36 are supported.
+ for base in range(2,37):
+ self.assertEqual(int('0', base=base), 0)
+
+ def test_int_base_bad_types(self):
+ """Not integer types are not valid bases; issue16772."""
+ with self.assertRaises(TypeError):
+ int('0', 5.5)
+ with self.assertRaises(TypeError):
+ int('0', 5.0)
+
+ def test_int_base_indexable(self):
+ class MyIndexable(object):
+ def __init__(self, value):
+ self.value = value
+ def __index__(self):
+ return self.value
+
+ # Check out of range bases.
+ for base in 2**100, -2**100, 1, 37:
+ with self.assertRaises(ValueError):
+ int('43', base)
+
+ # Check in-range bases.
+ self.assertEqual(int('101', base=MyIndexable(2)), 5)
+ self.assertEqual(int('101', base=MyIndexable(10)), 101)
+ self.assertEqual(int('101', base=MyIndexable(36)), 1 + 36**2)
+
def test_non_numeric_input_types(self):
# Test possible non-numeric types for the argument x, including
# subclasses of the explicitly documented accepted types.
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index 95b4d80..e5474f0 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -165,7 +165,7 @@ class CloseFailureIO(MockRawIO):
def close(self):
if not self.closed:
self.closed = 1
- raise IOError
+ raise OSError
class CCloseFailureIO(CloseFailureIO, io.RawIOBase):
pass
@@ -601,9 +601,9 @@ class IOTest(unittest.TestCase):
def test_flush_error_on_close(self):
f = self.open(support.TESTFN, "wb", buffering=0)
def bad_flush():
- raise IOError()
+ raise OSError()
f.flush = bad_flush
- self.assertRaises(IOError, f.close) # exception not swallowed
+ self.assertRaises(OSError, f.close) # exception not swallowed
self.assertTrue(f.closed)
def test_multi_close(self):
@@ -762,7 +762,7 @@ class CommonBufferedTests:
if s:
# The destructor *may* have printed an unraisable error, check it
self.assertEqual(len(s.splitlines()), 1)
- self.assertTrue(s.startswith("Exception IOError: "), s)
+ self.assertTrue(s.startswith("Exception OSError: "), s)
self.assertTrue(s.endswith(" ignored"), s)
def test_repr(self):
@@ -778,22 +778,22 @@ class CommonBufferedTests:
def test_flush_error_on_close(self):
raw = self.MockRawIO()
def bad_flush():
- raise IOError()
+ raise OSError()
raw.flush = bad_flush
b = self.tp(raw)
- self.assertRaises(IOError, b.close) # exception not swallowed
+ self.assertRaises(OSError, b.close) # exception not swallowed
self.assertTrue(b.closed)
def test_close_error_on_close(self):
raw = self.MockRawIO()
def bad_flush():
- raise IOError('flush')
+ raise OSError('flush')
def bad_close():
- raise IOError('close')
+ raise OSError('close')
raw.close = bad_close
b = self.tp(raw)
b.flush = bad_flush
- with self.assertRaises(IOError) as err: # exception not swallowed
+ with self.assertRaises(OSError) as err: # exception not swallowed
b.close()
self.assertEqual(err.exception.args, ('close',))
self.assertEqual(err.exception.__context__.args, ('flush',))
@@ -833,6 +833,14 @@ class SizeofTest:
bufio = self.tp(rawio, buffer_size=bufsize2)
self.assertEqual(sys.getsizeof(bufio), size + bufsize2)
+ @support.cpython_only
+ def test_buffer_freeing(self) :
+ bufsize = 4096
+ rawio = self.MockRawIO()
+ bufio = self.tp(rawio, buffer_size=bufsize)
+ size = sys.getsizeof(bufio) - bufsize
+ bufio.close()
+ self.assertEqual(sys.getsizeof(bufio), size)
class BufferedReaderTest(unittest.TestCase, CommonBufferedTests):
read_mode = "rb"
@@ -1007,8 +1015,8 @@ class BufferedReaderTest(unittest.TestCase, CommonBufferedTests):
def test_misbehaved_io(self):
rawio = self.MisbehavedRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
- self.assertRaises(IOError, bufio.seek, 0)
- self.assertRaises(IOError, bufio.tell)
+ self.assertRaises(OSError, bufio.seek, 0)
+ self.assertRaises(OSError, bufio.tell)
def test_no_extraneous_read(self):
# Issue #9550; when the raw IO object has satisfied the read request,
@@ -1059,7 +1067,7 @@ class CBufferedReaderTest(BufferedReaderTest, SizeofTest):
bufio = self.tp(rawio)
# _pyio.BufferedReader seems to implement reading different, so that
# checking this is not so easy.
- self.assertRaises(IOError, bufio.read, 10)
+ self.assertRaises(OSError, bufio.read, 10)
def test_garbage_collection(self):
# C BufferedReader objects are collected.
@@ -1312,9 +1320,9 @@ class BufferedWriterTest(unittest.TestCase, CommonBufferedTests):
def test_misbehaved_io(self):
rawio = self.MisbehavedRawIO()
bufio = self.tp(rawio, 5)
- self.assertRaises(IOError, bufio.seek, 0)
- self.assertRaises(IOError, bufio.tell)
- self.assertRaises(IOError, bufio.write, b"abcdef")
+ self.assertRaises(OSError, bufio.seek, 0)
+ self.assertRaises(OSError, bufio.tell)
+ self.assertRaises(OSError, bufio.write, b"abcdef")
def test_max_buffer_size_removal(self):
with self.assertRaises(TypeError):
@@ -1323,11 +1331,11 @@ class BufferedWriterTest(unittest.TestCase, CommonBufferedTests):
def test_write_error_on_close(self):
raw = self.MockRawIO()
def bad_write(b):
- raise IOError()
+ raise OSError()
raw.write = bad_write
b = self.tp(raw)
b.write(b'spam')
- self.assertRaises(IOError, b.close) # exception not swallowed
+ self.assertRaises(OSError, b.close) # exception not swallowed
self.assertTrue(b.closed)
@@ -1397,14 +1405,14 @@ class BufferedRWPairTest(unittest.TestCase):
def readable(self):
return False
- self.assertRaises(IOError, self.tp, NotReadable(), self.MockRawIO())
+ self.assertRaises(OSError, self.tp, NotReadable(), self.MockRawIO())
def test_constructor_with_not_writeable(self):
class NotWriteable(MockRawIO):
def writable(self):
return False
- self.assertRaises(IOError, self.tp, self.MockRawIO(), NotWriteable())
+ self.assertRaises(OSError, self.tp, self.MockRawIO(), NotWriteable())
def test_read(self):
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
@@ -2165,7 +2173,7 @@ class TextIOWrapperTest(unittest.TestCase):
if s:
# The destructor *may* have printed an unraisable error, check it
self.assertEqual(len(s.splitlines()), 1)
- self.assertTrue(s.startswith("Exception IOError: "), s)
+ self.assertTrue(s.startswith("Exception OSError: "), s)
self.assertTrue(s.endswith(" ignored"), s)
# Systematic tests of the text I/O API
@@ -2237,7 +2245,7 @@ class TextIOWrapperTest(unittest.TestCase):
f.seek(0)
for line in f:
self.assertEqual(line, "\xff\n")
- self.assertRaises(IOError, f.tell)
+ self.assertRaises(OSError, f.tell)
self.assertEqual(f.tell(), p2)
f.close()
@@ -2341,7 +2349,7 @@ class TextIOWrapperTest(unittest.TestCase):
def readable(self):
return False
txt = self.TextIOWrapper(UnReadable())
- self.assertRaises(IOError, txt.read)
+ self.assertRaises(OSError, txt.read)
def test_read_one_by_one(self):
txt = self.TextIOWrapper(self.BytesIO(b"AA\r\nBB"))
@@ -2516,9 +2524,9 @@ class TextIOWrapperTest(unittest.TestCase):
def test_flush_error_on_close(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
def bad_flush():
- raise IOError()
+ raise OSError()
txt.flush = bad_flush
- self.assertRaises(IOError, txt.close) # exception not swallowed
+ self.assertRaises(OSError, txt.close) # exception not swallowed
self.assertTrue(txt.closed)
def test_multi_close(self):
@@ -2896,7 +2904,7 @@ class MiscIOTest(unittest.TestCase):
for fd in fds:
try:
os.close(fd)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EBADF:
raise
self.addCleanup(cleanup_fds)
@@ -3075,7 +3083,7 @@ class SignalsTest(unittest.TestCase):
# buffer, and block again.
try:
wio.close()
- except IOError as e:
+ except OSError as e:
if e.errno != errno.EBADF:
raise
@@ -3203,7 +3211,7 @@ class SignalsTest(unittest.TestCase):
# buffer, and could block (in case of failure).
try:
wio.close()
- except IOError as e:
+ except OSError as e:
if e.errno != errno.EBADF:
raise
diff --git a/Lib/test/test_ioctl.py b/Lib/test/test_ioctl.py
index 531c9af..7eb324a 100644
--- a/Lib/test/test_ioctl.py
+++ b/Lib/test/test_ioctl.py
@@ -8,7 +8,7 @@ get_attribute(termios, 'TIOCGPGRP') #Can't run tests without this feature
try:
tty = open("/dev/tty", "rb")
-except IOError:
+except OSError:
raise unittest.SkipTest("Unable to open /dev/tty")
else:
# Skip if another process is in foreground
diff --git a/Lib/test/test_iterlen.py b/Lib/test/test_iterlen.py
index 7469a31..57f7101 100644
--- a/Lib/test/test_iterlen.py
+++ b/Lib/test/test_iterlen.py
@@ -45,31 +45,21 @@ import unittest
from test import support
from itertools import repeat
from collections import deque
-from builtins import len as _len
+from operator import length_hint
n = 10
-def len(obj):
- try:
- return _len(obj)
- except TypeError:
- try:
- # note: this is an internal undocumented API,
- # don't rely on it in your own programs
- return obj.__length_hint__()
- except AttributeError:
- raise TypeError
class TestInvariantWithoutMutations(unittest.TestCase):
def test_invariant(self):
it = self.it
for i in reversed(range(1, n+1)):
- self.assertEqual(len(it), i)
+ self.assertEqual(length_hint(it), i)
next(it)
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
self.assertRaises(StopIteration, next, it)
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
class TestTemporarilyImmutable(TestInvariantWithoutMutations):
@@ -78,12 +68,12 @@ class TestTemporarilyImmutable(TestInvariantWithoutMutations):
# length immutability during iteration
it = self.it
- self.assertEqual(len(it), n)
+ self.assertEqual(length_hint(it), n)
next(it)
- self.assertEqual(len(it), n-1)
+ self.assertEqual(length_hint(it), n-1)
self.mutate()
self.assertRaises(RuntimeError, next, it)
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
## ------- Concrete Type Tests -------
@@ -92,10 +82,6 @@ class TestRepeat(TestInvariantWithoutMutations):
def setUp(self):
self.it = repeat(None, n)
- def test_no_len_for_infinite_repeat(self):
- # The repeat() object can also be infinite
- self.assertRaises(TypeError, len, repeat(None))
-
class TestXrange(TestInvariantWithoutMutations):
def setUp(self):
@@ -167,14 +153,15 @@ class TestList(TestInvariantWithoutMutations):
it = iter(d)
next(it)
next(it)
- self.assertEqual(len(it), n-2)
+ self.assertEqual(length_hint(it), n - 2)
d.append(n)
- self.assertEqual(len(it), n-1) # grow with append
+ self.assertEqual(length_hint(it), n - 1) # grow with append
d[1:] = []
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), [])
d.extend(range(20))
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
+
class TestListReversed(TestInvariantWithoutMutations):
@@ -186,32 +173,41 @@ class TestListReversed(TestInvariantWithoutMutations):
it = reversed(d)
next(it)
next(it)
- self.assertEqual(len(it), n-2)
+ self.assertEqual(length_hint(it), n - 2)
d.append(n)
- self.assertEqual(len(it), n-2) # ignore append
+ self.assertEqual(length_hint(it), n - 2) # ignore append
d[1:] = []
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), []) # confirm invariant
d.extend(range(20))
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
## -- Check to make sure exceptions are not suppressed by __length_hint__()
class BadLen(object):
- def __iter__(self): return iter(range(10))
+ def __iter__(self):
+ return iter(range(10))
+
def __len__(self):
raise RuntimeError('hello')
+
class BadLengthHint(object):
- def __iter__(self): return iter(range(10))
+ def __iter__(self):
+ return iter(range(10))
+
def __length_hint__(self):
raise RuntimeError('hello')
+
class NoneLengthHint(object):
- def __iter__(self): return iter(range(10))
+ def __iter__(self):
+ return iter(range(10))
+
def __length_hint__(self):
- return None
+ return NotImplemented
+
class TestLengthHintExceptions(unittest.TestCase):
diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py
index 53926a9..fdf7984 100644
--- a/Lib/test/test_itertools.py
+++ b/Lib/test/test_itertools.py
@@ -1729,9 +1729,8 @@ class TestVariousIteratorArgs(unittest.TestCase):
class LengthTransparency(unittest.TestCase):
def test_repeat(self):
- from test.test_iterlen import len
- self.assertEqual(len(repeat(None, 50)), 50)
- self.assertRaises(TypeError, len, repeat(None))
+ self.assertEqual(operator.length_hint(repeat(None, 50)), 50)
+ self.assertEqual(operator.length_hint(repeat(None), 12), 12)
class RegressionTests(unittest.TestCase):
diff --git a/Lib/test/test_keywordonlyarg.py b/Lib/test/test_keywordonlyarg.py
index 0503a7f..e4a44c1 100644
--- a/Lib/test/test_keywordonlyarg.py
+++ b/Lib/test/test_keywordonlyarg.py
@@ -176,6 +176,18 @@ class KeywordOnlyArgTestCase(unittest.TestCase):
return __a
self.assertEqual(X().f(), 42)
+ def test_default_evaluation_order(self):
+ # See issue 16967
+ a = 42
+ with self.assertRaises(NameError) as err:
+ def f(v=a, x=b, *, y=c, z=d):
+ pass
+ self.assertEqual(str(err.exception), "name 'b' is not defined")
+ with self.assertRaises(NameError) as err:
+ f = lambda v=a, x=b, *, y=c, z=d: None
+ self.assertEqual(str(err.exception), "name 'b' is not defined")
+
+
def test_main():
run_unittest(KeywordOnlyArgTestCase)
diff --git a/Lib/test/test_kqueue.py b/Lib/test/test_kqueue.py
index 4e93013..6312ddd 100644
--- a/Lib/test/test_kqueue.py
+++ b/Lib/test/test_kqueue.py
@@ -94,7 +94,7 @@ class TestKQueue(unittest.TestCase):
client.setblocking(False)
try:
client.connect(('127.0.0.1', serverSocket.getsockname()[1]))
- except socket.error as e:
+ except OSError as e:
self.assertEqual(e.args[0], errno.EINPROGRESS)
else:
#raise AssertionError("Connect should have raised EINPROGRESS")
diff --git a/Lib/test/test_largefile.py b/Lib/test/test_largefile.py
index 1c6297a..7502dc3 100644
--- a/Lib/test/test_largefile.py
+++ b/Lib/test/test_largefile.py
@@ -14,7 +14,7 @@ try:
import signal
# The default handler for SIGXFSZ is to abort the process.
# By ignoring it, system calls exceeding the file size resource
- # limit will raise IOError instead of crashing the interpreter.
+ # limit will raise OSError instead of crashing the interpreter.
oldhandler = signal.signal(signal.SIGXFSZ, signal.SIG_IGN)
except (ImportError, AttributeError):
pass
@@ -162,7 +162,7 @@ def test_main():
# flush, too!
f.write(b'x')
f.flush()
- except (IOError, OverflowError):
+ except (OSError, OverflowError):
f.close()
unlink(TESTFN)
raise unittest.SkipTest("filesystem does not have largefile support")
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index 2ff8c91..11e812d 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -26,6 +26,7 @@ import logging.handlers
import logging.config
import codecs
+import configparser
import datetime
import pickle
import io
@@ -81,7 +82,7 @@ class BaseTest(unittest.TestCase):
"""Base class for logging tests."""
log_format = "%(name)s -> %(levelname)s: %(message)s"
- expected_log_pat = r"^([\w.]+) -> ([\w]+): ([\d]+)$"
+ expected_log_pat = r"^([\w.]+) -> (\w+): (\d+)$"
message_num = 0
def setUp(self):
@@ -150,14 +151,17 @@ class BaseTest(unittest.TestCase):
finally:
logging._releaseLock()
- def assert_log_lines(self, expected_values, stream=None):
+ def assert_log_lines(self, expected_values, stream=None, pat=None):
"""Match the collected log lines against the regular expression
self.expected_log_pat, and compare the extracted group values to
the expected_values list of tuples."""
stream = stream or self.stream
- pat = re.compile(self.expected_log_pat)
+ pat = re.compile(pat or self.expected_log_pat)
try:
- stream.reset()
+ if hasattr(stream, 'reset'):
+ stream.reset()
+ elif hasattr(stream, 'seek'):
+ stream.seek(0)
actual_lines = stream.readlines()
except AttributeError:
# StringIO.StringIO lacks a reset() method.
@@ -435,7 +439,7 @@ class CustomLevelsAndFiltersTest(BaseTest):
"""Test various filtering possibilities with custom logging levels."""
# Skip the logger name group.
- expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
+ expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$"
def setUp(self):
BaseTest.setUp(self)
@@ -565,7 +569,7 @@ class HandlerTest(BaseTest):
self.assertEqual(h.facility, h.LOG_USER)
self.assertTrue(h.unixsocket)
h.close()
- except socket.error: # syslogd might not be available
+ except OSError: # syslogd might not be available
pass
for method in ('GET', 'POST', 'PUT'):
if method == 'PUT':
@@ -675,7 +679,7 @@ if threading:
self.num_bytes = 0
try:
self.peer = conn.getpeername()
- except socket.error as err:
+ except OSError as err:
# a race condition may occur if the other end is closing
# before we can get the peername
self.close()
@@ -769,7 +773,7 @@ if threading:
"""
try:
asyncore.loop(poll_interval, map=self.sockmap)
- except select.error:
+ except OSError:
# On FreeBSD 8, closing the server repeatably
# raises this error. We swallow it if the
# server has been closed.
@@ -876,7 +880,7 @@ if threading:
sock, addr = self.socket.accept()
if self.sslctx:
sock = self.sslctx.wrap_socket(sock, server_side=True)
- except socket.error as e:
+ except OSError as e:
# socket errors are silenced by the caller, print them here
sys.stderr.write("Got an error:\n%s\n" % e)
raise
@@ -942,7 +946,7 @@ if threading:
if data:
try:
super(DelegatingUDPRequestHandler, self).finish()
- except socket.error:
+ except OSError:
if not self.server._closed:
raise
@@ -996,7 +1000,7 @@ class MemoryHandlerTest(BaseTest):
"""Tests for the MemoryHandler."""
# Do not bother with a logger name group.
- expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
+ expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$"
def setUp(self):
BaseTest.setUp(self)
@@ -1049,7 +1053,7 @@ class ConfigFileTest(BaseTest):
"""Reading logging config from a .ini-style config file."""
- expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$"
+ expected_log_pat = r"^(\w+) \+\+ (\w+)$"
# config0 is a standard configuration.
config0 = """
@@ -1297,6 +1301,24 @@ class ConfigFileTest(BaseTest):
# Original logger output is empty.
self.assert_log_lines([])
+ def test_config0_using_cp_ok(self):
+ # A simple config file which overrides the default settings.
+ with captured_stdout() as output:
+ file = io.StringIO(textwrap.dedent(self.config0))
+ cp = configparser.ConfigParser()
+ cp.read_file(file)
+ logging.config.fileConfig(cp)
+ logger = logging.getLogger()
+ # Won't output anything
+ logger.info(self.next_message())
+ # Outputs a message
+ logger.error(self.next_message())
+ self.assert_log_lines([
+ ('ERROR', '2'),
+ ], stream=output)
+ # Original logger output is empty.
+ self.assert_log_lines([])
+
def test_config1_ok(self, config=config1):
# A config file defining a sub-parser as well.
with captured_stdout() as output:
@@ -1449,16 +1471,19 @@ class SocketHandlerTest(BaseTest):
self.assertEqual(self.log_output, "spam\neggs\n")
def test_noserver(self):
+ # Avoid timing-related failures due to SocketHandler's own hard-wired
+ # one-second timeout on socket.create_connection() (issue #16264).
+ self.sock_hdlr.retryStart = 2.5
# Kill the server
self.server.stop(2.0)
- #The logging call should try to connect, which should fail
+ # The logging call should try to connect, which should fail
try:
raise RuntimeError('Deliberate mistake')
except RuntimeError:
self.root_logger.exception('Never sent')
self.root_logger.error('Never sent, either')
now = time.time()
- self.assertTrue(self.sock_hdlr.retryTime > now)
+ self.assertGreater(self.sock_hdlr.retryTime, now)
time.sleep(self.sock_hdlr.retryTime - now + 0.001)
self.root_logger.error('Nor this')
@@ -1784,7 +1809,7 @@ class WarningsTest(BaseTest):
logger.removeHandler(h)
s = stream.getvalue()
h.close()
- self.assertTrue(s.find("UserWarning: I'm warning you...\n") > 0)
+ self.assertGreater(s.find("UserWarning: I'm warning you...\n"), 0)
#See if an explicit file uses the original implementation
a_file = io.StringIO()
@@ -1822,7 +1847,7 @@ class ConfigDictTest(BaseTest):
"""Reading logging config from a dictionary."""
- expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$"
+ expected_log_pat = r"^(\w+) \+\+ (\w+)$"
# config0 is a standard configuration.
config0 = {
@@ -2394,6 +2419,32 @@ class ConfigDictTest(BaseTest):
},
}
+ # As config0, but with properties
+ config14 = {
+ 'version': 1,
+ 'formatters': {
+ 'form1' : {
+ 'format' : '%(levelname)s ++ %(message)s',
+ },
+ },
+ 'handlers' : {
+ 'hand1' : {
+ 'class' : 'logging.StreamHandler',
+ 'formatter' : 'form1',
+ 'level' : 'NOTSET',
+ 'stream' : 'ext://sys.stdout',
+ '.': {
+ 'foo': 'bar',
+ 'terminator': '!\n',
+ }
+ },
+ },
+ 'root' : {
+ 'level' : 'WARNING',
+ 'handlers' : ['hand1'],
+ },
+ }
+
out_of_order = {
"version": 1,
"formatters": {
@@ -2660,11 +2711,20 @@ class ConfigDictTest(BaseTest):
def test_config13_failure(self):
self.assertRaises(Exception, self.apply_config, self.config13)
+ def test_config14_ok(self):
+ with captured_stdout() as output:
+ self.apply_config(self.config14)
+ h = logging._handlers['hand1']
+ self.assertEqual(h.foo, 'bar')
+ self.assertEqual(h.terminator, '!\n')
+ logging.warning('Exclamation')
+ self.assertTrue(output.getvalue().endswith('Exclamation!\n'))
+
@unittest.skipUnless(threading, 'listen() needs threading to work')
- def setup_via_listener(self, text):
+ def setup_via_listener(self, text, verify=None):
text = text.encode("utf-8")
# Ask for a randomly assigned port (by using port 0)
- t = logging.config.listen(0)
+ t = logging.config.listen(0, verify)
t.start()
t.ready.wait()
# Now get the port allocated
@@ -2724,6 +2784,69 @@ class ConfigDictTest(BaseTest):
# Original logger output is empty.
self.assert_log_lines([])
+ @unittest.skipUnless(threading, 'Threading required for this test.')
+ def test_listen_verify(self):
+
+ def verify_fail(stuff):
+ return None
+
+ def verify_reverse(stuff):
+ return stuff[::-1]
+
+ logger = logging.getLogger("compiler.parser")
+ to_send = textwrap.dedent(ConfigFileTest.config1)
+ # First, specify a verification function that will fail.
+ # We expect to see no output, since our configuration
+ # never took effect.
+ with captured_stdout() as output:
+ self.setup_via_listener(to_send, verify_fail)
+ # Both will output a message
+ logger.info(self.next_message())
+ logger.error(self.next_message())
+ self.assert_log_lines([], stream=output)
+ # Original logger output has the stuff we logged.
+ self.assert_log_lines([
+ ('INFO', '1'),
+ ('ERROR', '2'),
+ ], pat=r"^[\w.]+ -> (\w+): (\d+)$")
+
+ # Now, perform no verification. Our configuration
+ # should take effect.
+
+ with captured_stdout() as output:
+ self.setup_via_listener(to_send) # no verify callable specified
+ logger = logging.getLogger("compiler.parser")
+ # Both will output a message
+ logger.info(self.next_message())
+ logger.error(self.next_message())
+ self.assert_log_lines([
+ ('INFO', '3'),
+ ('ERROR', '4'),
+ ], stream=output)
+ # Original logger output still has the stuff we logged before.
+ self.assert_log_lines([
+ ('INFO', '1'),
+ ('ERROR', '2'),
+ ], pat=r"^[\w.]+ -> (\w+): (\d+)$")
+
+ # Now, perform verification which transforms the bytes.
+
+ with captured_stdout() as output:
+ self.setup_via_listener(to_send[::-1], verify_reverse)
+ logger = logging.getLogger("compiler.parser")
+ # Both will output a message
+ logger.info(self.next_message())
+ logger.error(self.next_message())
+ self.assert_log_lines([
+ ('INFO', '5'),
+ ('ERROR', '6'),
+ ], stream=output)
+ # Original logger output still has the stuff we logged before.
+ self.assert_log_lines([
+ ('INFO', '1'),
+ ('ERROR', '2'),
+ ], pat=r"^[\w.]+ -> (\w+): (\d+)$")
+
def test_out_of_order(self):
self.apply_config(self.out_of_order)
handler = logging.getLogger('mymodule').handlers[0]
@@ -2781,14 +2904,14 @@ class ChildLoggerTest(BaseTest):
l2 = logging.getLogger('def.ghi')
c1 = r.getChild('xyz')
c2 = r.getChild('uvw.xyz')
- self.assertTrue(c1 is logging.getLogger('xyz'))
- self.assertTrue(c2 is logging.getLogger('uvw.xyz'))
+ self.assertIs(c1, logging.getLogger('xyz'))
+ self.assertIs(c2, logging.getLogger('uvw.xyz'))
c1 = l1.getChild('def')
c2 = c1.getChild('ghi')
c3 = l1.getChild('def.ghi')
- self.assertTrue(c1 is logging.getLogger('abc.def'))
- self.assertTrue(c2 is logging.getLogger('abc.def.ghi'))
- self.assertTrue(c2 is c3)
+ self.assertIs(c1, logging.getLogger('abc.def'))
+ self.assertIs(c2, logging.getLogger('abc.def.ghi'))
+ self.assertIs(c2, c3)
class DerivedLogRecord(logging.LogRecord):
@@ -2831,7 +2954,7 @@ class LogRecordFactoryTest(BaseTest):
class QueueHandlerTest(BaseTest):
# Do not bother with a logger name group.
- expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
+ expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$"
def setUp(self):
BaseTest.setUp(self)
@@ -3125,13 +3248,13 @@ class ShutdownTest(BaseTest):
self.assertEqual('0 - release', self.called[-1])
def test_with_ioerror_in_acquire(self):
- self._test_with_failure_in_method('acquire', IOError)
+ self._test_with_failure_in_method('acquire', OSError)
def test_with_ioerror_in_flush(self):
- self._test_with_failure_in_method('flush', IOError)
+ self._test_with_failure_in_method('flush', OSError)
def test_with_ioerror_in_close(self):
- self._test_with_failure_in_method('close', IOError)
+ self._test_with_failure_in_method('close', OSError)
def test_with_valueerror_in_acquire(self):
self._test_with_failure_in_method('acquire', ValueError)
@@ -3338,6 +3461,12 @@ class BasicConfigTest(unittest.TestCase):
self.assertEqual(logging.root.level, self.original_logging_level)
def test_filename(self):
+
+ def cleanup(h1, h2, fn):
+ h1.close()
+ h2.close()
+ os.remove(fn)
+
logging.basicConfig(filename='test.log')
self.assertEqual(len(logging.root.handlers), 1)
@@ -3345,17 +3474,23 @@ class BasicConfigTest(unittest.TestCase):
self.assertIsInstance(handler, logging.FileHandler)
expected = logging.FileHandler('test.log', 'a')
- self.addCleanup(expected.close)
self.assertEqual(handler.stream.mode, expected.stream.mode)
self.assertEqual(handler.stream.name, expected.stream.name)
+ self.addCleanup(cleanup, handler, expected, 'test.log')
def test_filemode(self):
+
+ def cleanup(h1, h2, fn):
+ h1.close()
+ h2.close()
+ os.remove(fn)
+
logging.basicConfig(filename='test.log', filemode='wb')
handler = logging.root.handlers[0]
expected = logging.FileHandler('test.log', 'wb')
- self.addCleanup(expected.close)
self.assertEqual(handler.stream.mode, expected.stream.mode)
+ self.addCleanup(cleanup, handler, expected, 'test.log')
def test_stream(self):
stream = io.StringIO()
@@ -3869,7 +4004,7 @@ class NTEventLogHandlerTest(BaseTest):
h.handle(r)
h.close()
# Now see if the event is recorded
- self.assertTrue(num_recs < win32evtlog.GetNumberOfEventLogRecords(elh))
+ self.assertLess(num_recs, win32evtlog.GetNumberOfEventLogRecords(elh))
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | \
win32evtlog.EVENTLOG_SEQUENTIAL_READ
found = False
diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py
index 39e8643..6c679ce 100644
--- a/Lib/test/test_mailbox.py
+++ b/Lib/test/test_mailbox.py
@@ -596,7 +596,7 @@ class TestMaildir(TestMailbox, unittest.TestCase):
def setUp(self):
TestMailbox.setUp(self)
- if os.name in ('nt', 'os2') or sys.platform == 'cygwin':
+ if (os.name == 'nt') or (sys.platform == 'cygwin'):
self._box.colon = '!'
def assertMailboxEmpty(self):
diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py
index 025b53c..c817caf 100644
--- a/Lib/test/test_marshal.py
+++ b/Lib/test/test_marshal.py
@@ -200,10 +200,14 @@ class BugsTestCase(unittest.TestCase):
except Exception:
pass
- def test_loads_recursion(self):
+ def test_loads_2x_code(self):
s = b'c' + (b'X' * 4*4) + b'{' * 2**20
self.assertRaises(ValueError, marshal.loads, s)
+ def test_loads_recursion(self):
+ s = b'c' + (b'X' * 4*5) + b'{' * 2**20
+ self.assertRaises(ValueError, marshal.loads, s)
+
def test_recursion_limit(self):
# Create a deeply nested structure.
head = last = []
@@ -279,6 +283,11 @@ class BugsTestCase(unittest.TestCase):
unicode_string = 'T'
self.assertRaises(TypeError, marshal.loads, unicode_string)
+ def _test_eof(self):
+ data = marshal.dumps(("hello", "dolly", None))
+ for i in range(len(data)):
+ self.assertRaises(EOFError, marshal.loads, data[0: i])
+
LARGE_SIZE = 2**31
pointer_size = 8 if sys.maxsize > 0xFFFFFFFF else 4
@@ -323,6 +332,122 @@ class LargeValuesTestCase(unittest.TestCase):
def test_bytearray(self, size):
self.check_unmarshallable(bytearray(size))
+def CollectObjectIDs(ids, obj):
+ """Collect object ids seen in a structure"""
+ if id(obj) in ids:
+ return
+ ids.add(id(obj))
+ if isinstance(obj, (list, tuple, set, frozenset)):
+ for e in obj:
+ CollectObjectIDs(ids, e)
+ elif isinstance(obj, dict):
+ for k, v in obj.items():
+ CollectObjectIDs(ids, k)
+ CollectObjectIDs(ids, v)
+ return len(ids)
+
+class InstancingTestCase(unittest.TestCase, HelperMixin):
+ intobj = 123321
+ floatobj = 1.2345
+ strobj = "abcde"*3
+ dictobj = {"hello":floatobj, "goodbye":floatobj, floatobj:"hello"}
+
+ def helper3(self, rsample, recursive=False, simple=False):
+ #we have two instances
+ sample = (rsample, rsample)
+
+ n0 = CollectObjectIDs(set(), sample)
+
+ s3 = marshal.dumps(sample, 3)
+ n3 = CollectObjectIDs(set(), marshal.loads(s3))
+
+ #same number of instances generated
+ self.assertEqual(n3, n0)
+
+ if not recursive:
+ #can compare with version 2
+ s2 = marshal.dumps(sample, 2)
+ n2 = CollectObjectIDs(set(), marshal.loads(s2))
+ #old format generated more instances
+ self.assertGreater(n2, n0)
+
+ #if complex objects are in there, old format is larger
+ if not simple:
+ self.assertGreater(len(s2), len(s3))
+ else:
+ self.assertGreaterEqual(len(s2), len(s3))
+
+ def testInt(self):
+ self.helper(self.intobj)
+ self.helper3(self.intobj, simple=True)
+
+ def testFloat(self):
+ self.helper(self.floatobj)
+ self.helper3(self.floatobj)
+
+ def testStr(self):
+ self.helper(self.strobj)
+ self.helper3(self.strobj)
+
+ def testDict(self):
+ self.helper(self.dictobj)
+ self.helper3(self.dictobj)
+
+ def testModule(self):
+ with open(__file__, "rb") as f:
+ code = f.read()
+ if __file__.endswith(".py"):
+ code = compile(code, __file__, "exec")
+ self.helper(code)
+ self.helper3(code)
+
+ def testRecursion(self):
+ d = dict(self.dictobj)
+ d["self"] = d
+ self.helper3(d, recursive=True)
+ l = [self.dictobj]
+ l.append(l)
+ self.helper3(l, recursive=True)
+
+class CompatibilityTestCase(unittest.TestCase):
+ def _test(self, version):
+ with open(__file__, "rb") as f:
+ code = f.read()
+ if __file__.endswith(".py"):
+ code = compile(code, __file__, "exec")
+ data = marshal.dumps(code, version)
+ marshal.loads(data)
+
+ def test0To3(self):
+ self._test(0)
+
+ def test1To3(self):
+ self._test(1)
+
+ def test2To3(self):
+ self._test(2)
+
+ def test3To3(self):
+ self._test(3)
+
+class InterningTestCase(unittest.TestCase, HelperMixin):
+ strobj = "this is an interned string"
+ strobj = sys.intern(strobj)
+
+ def testIntern(self):
+ s = marshal.loads(marshal.dumps(self.strobj))
+ self.assertEqual(s, self.strobj)
+ self.assertEqual(id(s), id(self.strobj))
+ s2 = sys.intern(s)
+ self.assertEqual(id(s2), id(s))
+
+ def testNoIntern(self):
+ s = marshal.loads(marshal.dumps(self.strobj, 2))
+ self.assertEqual(s, self.strobj)
+ self.assertNotEqual(id(s), id(self.strobj))
+ s2 = sys.intern(s)
+ self.assertNotEqual(id(s2), id(s))
+
def test_main():
support.run_unittest(IntTestCase,
diff --git a/Lib/test/test_memoryio.py b/Lib/test/test_memoryio.py
index e5db945..9e1b7c7 100644
--- a/Lib/test/test_memoryio.py
+++ b/Lib/test/test_memoryio.py
@@ -520,12 +520,12 @@ class TextIOTestMixin:
def test_relative_seek(self):
memio = self.ioclass()
- self.assertRaises(IOError, memio.seek, -1, 1)
- self.assertRaises(IOError, memio.seek, 3, 1)
- self.assertRaises(IOError, memio.seek, -3, 1)
- self.assertRaises(IOError, memio.seek, -1, 2)
- self.assertRaises(IOError, memio.seek, 1, 1)
- self.assertRaises(IOError, memio.seek, 1, 2)
+ self.assertRaises(OSError, memio.seek, -1, 1)
+ self.assertRaises(OSError, memio.seek, 3, 1)
+ self.assertRaises(OSError, memio.seek, -3, 1)
+ self.assertRaises(OSError, memio.seek, -1, 2)
+ self.assertRaises(OSError, memio.seek, 1, 1)
+ self.assertRaises(OSError, memio.seek, 1, 2)
def test_textio_properties(self):
memio = self.ioclass()
diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py
index 91da289..593fdb0 100644
--- a/Lib/test/test_mimetypes.py
+++ b/Lib/test/test_mimetypes.py
@@ -22,6 +22,8 @@ class MimeTypesTestCase(unittest.TestCase):
eq(self.db.guess_type("foo.tgz"), ("application/x-tar", "gzip"))
eq(self.db.guess_type("foo.tar.gz"), ("application/x-tar", "gzip"))
eq(self.db.guess_type("foo.tar.Z"), ("application/x-tar", "compress"))
+ eq(self.db.guess_type("foo.tar.bz2"), ("application/x-tar", "bzip2"))
+ eq(self.db.guess_type("foo.tar.xz"), ("application/x-tar", "xz"))
def test_data_urls(self):
eq = self.assertEqual
diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py
index d066368..505ffba 100644
--- a/Lib/test/test_mmap.py
+++ b/Lib/test/test_mmap.py
@@ -245,7 +245,7 @@ class MmapTests(unittest.TestCase):
def test_bad_file_desc(self):
# Try opening a bad file descriptor...
- self.assertRaises(mmap.error, mmap.mmap, -2, 4096)
+ self.assertRaises(OSError, mmap.mmap, -2, 4096)
def test_tougher_find(self):
# Do a tougher .find() test. SF bug 515943 pointed out that, in 2.2,
@@ -658,7 +658,7 @@ class MmapTests(unittest.TestCase):
m = mmap.mmap(f.fileno(), 0)
f.close()
try:
- m.resize(0) # will raise WindowsError
+ m.resize(0) # will raise OSError
except:
pass
try:
@@ -673,7 +673,7 @@ class MmapTests(unittest.TestCase):
# parameters to _get_osfhandle.
s = socket.socket()
try:
- with self.assertRaises(mmap.error):
+ with self.assertRaises(OSError):
m = mmap.mmap(s.fileno(), 10)
finally:
s.close()
@@ -684,11 +684,11 @@ class MmapTests(unittest.TestCase):
self.assertTrue(m.closed)
def test_context_manager_exception(self):
- # Test that the IOError gets passed through
+ # Test that the OSError gets passed through
with self.assertRaises(Exception) as exc:
with mmap.mmap(-1, 10) as m:
- raise IOError
- self.assertIsInstance(exc.exception, IOError,
+ raise OSError
+ self.assertIsInstance(exc.exception, OSError,
"wrong exception raised in context manager")
self.assertTrue(m.closed, "context manager failed")
@@ -709,7 +709,7 @@ class LargeMmapTests(unittest.TestCase):
f.seek(num_zeroes)
f.write(tail)
f.flush()
- except (IOError, OverflowError):
+ except (OSError, OverflowError):
f.close()
raise unittest.SkipTest("filesystem does not have largefile support")
return f
diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py
index 3fb07f6..6cda4fa 100644
--- a/Lib/test/test_multiprocessing.py
+++ b/Lib/test/test_multiprocessing.py
@@ -19,6 +19,7 @@ import socket
import random
import logging
import struct
+import operator
import test.support
import test.script_helper
@@ -1617,6 +1618,18 @@ def mul(x, y):
class _TestPool(BaseTestCase):
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.pool = cls.Pool(4)
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.pool.terminate()
+ cls.pool.join()
+ cls.pool = None
+ super().tearDownClass()
+
def test_apply(self):
papply = self.pool.apply
self.assertEqual(papply(sqr, (5,)), sqr(5))
@@ -1708,15 +1721,6 @@ class _TestPool(BaseTestCase):
p.join()
def test_terminate(self):
- if self.TYPE == 'manager':
- # On Unix a forked process increfs each shared object to
- # which its parent process held a reference. If the
- # forked process gets terminated then there is likely to
- # be a reference leak. So to prevent
- # _TestZZZNumberOfObjects from failing we skip this test
- # when using a manager.
- return
-
result = self.pool.map_async(
time.sleep, [0.1 for i in range(10000)], chunksize=1
)
@@ -1744,7 +1748,6 @@ class _TestPool(BaseTestCase):
with multiprocessing.Pool(2) as p:
r = p.map_async(sqr, L)
self.assertEqual(r.get(), expected)
- print(p._state)
self.assertRaises(ValueError, p.map_async, sqr, L)
def raising():
@@ -1838,35 +1841,6 @@ class _TestPoolWorkerLifetime(BaseTestCase):
for (j, res) in enumerate(results):
self.assertEqual(res.get(), sqr(j))
-
-#
-# Test that manager has expected number of shared objects left
-#
-
-class _TestZZZNumberOfObjects(BaseTestCase):
- # Because test cases are sorted alphabetically, this one will get
- # run after all the other tests for the manager. It tests that
- # there have been no "reference leaks" for the manager's shared
- # objects. Note the comment in _TestPool.test_terminate().
-
- # If some other test using ManagerMixin.manager fails, then the
- # raised exception may keep alive a frame which holds a reference
- # to a managed object. This will cause test_number_of_objects to
- # also fail.
- ALLOWED_TYPES = ('manager',)
-
- def test_number_of_objects(self):
- EXPECTED_NUMBER = 1 # the pool object is still alive
- multiprocessing.active_children() # discard dead process objs
- gc.collect() # do garbage collection
- refs = self.manager._number_of_objects()
- debug_info = self.manager._debug_info()
- if refs != EXPECTED_NUMBER:
- print(self.manager._debug_info())
- print(debug_info)
-
- self.assertEqual(refs, EXPECTED_NUMBER)
-
#
# Test of creating a customized manager class
#
@@ -2044,7 +2018,7 @@ class _TestManagerRestart(BaseTestCase):
address=addr, authkey=authkey, serializer=SERIALIZER)
try:
manager.start()
- except IOError as e:
+ except OSError as e:
if e.errno != errno.EADDRINUSE:
raise
# Retry after some time, in case the old socket was lingering
@@ -2158,9 +2132,9 @@ class _TestConnection(BaseTestCase):
self.assertEqual(reader.writable, False)
self.assertEqual(writer.readable, False)
self.assertEqual(writer.writable, True)
- self.assertRaises(IOError, reader.send, 2)
- self.assertRaises(IOError, writer.recv)
- self.assertRaises(IOError, writer.poll)
+ self.assertRaises(OSError, reader.send, 2)
+ self.assertRaises(OSError, writer.recv)
+ self.assertRaises(OSError, writer.poll)
def test_spawn_close(self):
# We test that a pipe connection can be closed by parent
@@ -2322,8 +2296,8 @@ class _TestConnection(BaseTestCase):
if self.TYPE == 'processes':
self.assertTrue(a.closed)
self.assertTrue(b.closed)
- self.assertRaises(IOError, a.recv)
- self.assertRaises(IOError, b.recv)
+ self.assertRaises(OSError, a.recv)
+ self.assertRaises(OSError, b.recv)
class _TestListener(BaseTestCase):
@@ -2344,7 +2318,7 @@ class _TestListener(BaseTestCase):
self.assertEqual(d.recv(), 1729)
if self.TYPE == 'processes':
- self.assertRaises(IOError, l.accept)
+ self.assertRaises(OSError, l.accept)
class _TestListenerClient(BaseTestCase):
@@ -2935,27 +2909,18 @@ class TestInvalidHandle(unittest.TestCase):
def test_invalid_handles(self):
conn = multiprocessing.connection.Connection(44977608)
try:
- self.assertRaises((ValueError, IOError), conn.poll)
+ self.assertRaises((ValueError, OSError), conn.poll)
finally:
# Hack private attribute _handle to avoid printing an error
# in conn.__del__
conn._handle = None
- self.assertRaises((ValueError, IOError),
+ self.assertRaises((ValueError, OSError),
multiprocessing.connection.Connection, -1)
#
# Functions used to create test cases from the base ones in this module
#
-def get_attributes(Source, names):
- d = {}
- for name in names:
- obj = getattr(Source, name)
- if type(obj) == type(get_attributes):
- obj = staticmethod(obj)
- d[name] = obj
- return d
-
def create_test_cases(Mixin, type):
result = {}
glob = globals()
@@ -2968,10 +2933,10 @@ def create_test_cases(Mixin, type):
assert set(base.ALLOWED_TYPES) <= ALL_TYPES, set(base.ALLOWED_TYPES)
if type in base.ALLOWED_TYPES:
newname = 'With' + Type + name[1:]
- class Temp(base, unittest.TestCase, Mixin):
+ class Temp(base, Mixin, unittest.TestCase):
pass
result[newname] = Temp
- Temp.__name__ = newname
+ Temp.__name__ = Temp.__qualname__ = newname
Temp.__module__ = Mixin.__module__
return result
@@ -2982,12 +2947,24 @@ def create_test_cases(Mixin, type):
class ProcessesMixin(object):
TYPE = 'processes'
Process = multiprocessing.Process
- locals().update(get_attributes(multiprocessing, (
- 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore',
- 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'RawValue',
- 'RawArray', 'current_process', 'active_children', 'Pipe',
- 'connection', 'JoinableQueue', 'Pool'
- )))
+ connection = multiprocessing.connection
+ current_process = staticmethod(multiprocessing.current_process)
+ active_children = staticmethod(multiprocessing.active_children)
+ Pool = staticmethod(multiprocessing.Pool)
+ Pipe = staticmethod(multiprocessing.Pipe)
+ Queue = staticmethod(multiprocessing.Queue)
+ JoinableQueue = staticmethod(multiprocessing.JoinableQueue)
+ Lock = staticmethod(multiprocessing.Lock)
+ RLock = staticmethod(multiprocessing.RLock)
+ Semaphore = staticmethod(multiprocessing.Semaphore)
+ BoundedSemaphore = staticmethod(multiprocessing.BoundedSemaphore)
+ Condition = staticmethod(multiprocessing.Condition)
+ Event = staticmethod(multiprocessing.Event)
+ Barrier = staticmethod(multiprocessing.Barrier)
+ Value = staticmethod(multiprocessing.Value)
+ Array = staticmethod(multiprocessing.Array)
+ RawValue = staticmethod(multiprocessing.RawValue)
+ RawArray = staticmethod(multiprocessing.RawArray)
testcases_processes = create_test_cases(ProcessesMixin, type='processes')
globals().update(testcases_processes)
@@ -2996,12 +2973,48 @@ globals().update(testcases_processes)
class ManagerMixin(object):
TYPE = 'manager'
Process = multiprocessing.Process
- manager = object.__new__(multiprocessing.managers.SyncManager)
- locals().update(get_attributes(manager, (
- 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore',
- 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'list', 'dict',
- 'Namespace', 'JoinableQueue', 'Pool'
- )))
+ Queue = property(operator.attrgetter('manager.Queue'))
+ JoinableQueue = property(operator.attrgetter('manager.JoinableQueue'))
+ Lock = property(operator.attrgetter('manager.Lock'))
+ RLock = property(operator.attrgetter('manager.RLock'))
+ Semaphore = property(operator.attrgetter('manager.Semaphore'))
+ BoundedSemaphore = property(operator.attrgetter('manager.BoundedSemaphore'))
+ Condition = property(operator.attrgetter('manager.Condition'))
+ Event = property(operator.attrgetter('manager.Event'))
+ Barrier = property(operator.attrgetter('manager.Barrier'))
+ Value = property(operator.attrgetter('manager.Value'))
+ Array = property(operator.attrgetter('manager.Array'))
+ list = property(operator.attrgetter('manager.list'))
+ dict = property(operator.attrgetter('manager.dict'))
+ Namespace = property(operator.attrgetter('manager.Namespace'))
+
+ @classmethod
+ def Pool(cls, *args, **kwds):
+ return cls.manager.Pool(*args, **kwds)
+
+ @classmethod
+ def setUpClass(cls):
+ cls.manager = multiprocessing.Manager()
+
+ @classmethod
+ def tearDownClass(cls):
+ # only the manager process should be returned by active_children()
+ # but this can take a bit on slow machines, so wait a few seconds
+ # if there are other children too (see #17395)
+ t = 0.01
+ while len(multiprocessing.active_children()) > 1 and t < 5:
+ time.sleep(t)
+ t *= 2
+ gc.collect() # do garbage collection
+ if cls.manager._number_of_objects() != 0:
+ # This is not really an error since some tests do not
+ # ensure that all processes which hold a reference to a
+ # managed object have been joined.
+ print('Shared objects which still exist at manager shutdown:')
+ print(cls.manager._debug_info())
+ cls.manager.shutdown()
+ cls.manager.join()
+ cls.manager = None
testcases_manager = create_test_cases(ManagerMixin, type='manager')
globals().update(testcases_manager)
@@ -3010,16 +3023,27 @@ globals().update(testcases_manager)
class ThreadsMixin(object):
TYPE = 'threads'
Process = multiprocessing.dummy.Process
- locals().update(get_attributes(multiprocessing.dummy, (
- 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore',
- 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'current_process',
- 'active_children', 'Pipe', 'connection', 'dict', 'list',
- 'Namespace', 'JoinableQueue', 'Pool'
- )))
+ connection = multiprocessing.dummy.connection
+ current_process = staticmethod(multiprocessing.dummy.current_process)
+ active_children = staticmethod(multiprocessing.dummy.active_children)
+ Pool = staticmethod(multiprocessing.Pool)
+ Pipe = staticmethod(multiprocessing.dummy.Pipe)
+ Queue = staticmethod(multiprocessing.dummy.Queue)
+ JoinableQueue = staticmethod(multiprocessing.dummy.JoinableQueue)
+ Lock = staticmethod(multiprocessing.dummy.Lock)
+ RLock = staticmethod(multiprocessing.dummy.RLock)
+ Semaphore = staticmethod(multiprocessing.dummy.Semaphore)
+ BoundedSemaphore = staticmethod(multiprocessing.dummy.BoundedSemaphore)
+ Condition = staticmethod(multiprocessing.dummy.Condition)
+ Event = staticmethod(multiprocessing.dummy.Event)
+ Barrier = staticmethod(multiprocessing.dummy.Barrier)
+ Value = staticmethod(multiprocessing.dummy.Value)
+ Array = staticmethod(multiprocessing.dummy.Array)
testcases_threads = create_test_cases(ThreadsMixin, type='threads')
globals().update(testcases_threads)
+
class OtherTest(unittest.TestCase):
# TODO: add more tests for deliver/answer challenge.
def test_deliver_challenge_auth_failure(self):
@@ -3452,12 +3476,6 @@ def test_main(run=None):
multiprocessing.get_logger().setLevel(LOG_LEVEL)
- ProcessesMixin.pool = multiprocessing.Pool(4)
- ThreadsMixin.pool = multiprocessing.dummy.Pool(4)
- ManagerMixin.manager.__init__()
- ManagerMixin.manager.start()
- ManagerMixin.pool = ManagerMixin.manager.Pool(4)
-
testcases = (
sorted(testcases_processes.values(), key=lambda tc:tc.__name__) +
sorted(testcases_threads.values(), key=lambda tc:tc.__name__) +
@@ -3467,18 +3485,7 @@ def test_main(run=None):
loadTestsFromTestCase = unittest.defaultTestLoader.loadTestsFromTestCase
suite = unittest.TestSuite(loadTestsFromTestCase(tc) for tc in testcases)
- try:
- run(suite)
- finally:
- ThreadsMixin.pool.terminate()
- ProcessesMixin.pool.terminate()
- ManagerMixin.pool.terminate()
- ManagerMixin.pool.join()
- ManagerMixin.manager.shutdown()
- ManagerMixin.manager.join()
- ThreadsMixin.pool.join()
- ProcessesMixin.pool.join()
- del ProcessesMixin.pool, ThreadsMixin.pool, ManagerMixin.pool
+ run(suite)
def main():
test_main(unittest.TextTestRunner(verbosity=2).run)
diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py
index 19fb10a..7cf497a 100644
--- a/Lib/test/test_nntplib.py
+++ b/Lib/test/test_nntplib.py
@@ -264,7 +264,7 @@ class NetworkedNNTPTestsMixin:
return False
try:
server.help()
- except (socket.error, EOFError):
+ except (OSError, EOFError):
return False
return True
diff --git a/Lib/test/test_normalization.py b/Lib/test/test_normalization.py
index e3e2560..db28e1a 100644
--- a/Lib/test/test_normalization.py
+++ b/Lib/test/test_normalization.py
@@ -43,7 +43,7 @@ class NormalizationTest(unittest.TestCase):
try:
testdata = open_urlresource(TESTDATAURL, encoding="utf-8",
check=check_version)
- except (IOError, HTTPException):
+ except (OSError, HTTPException):
self.skipTest("Could not retrieve " + TESTDATAURL)
self.addCleanup(testdata.close)
for line in testdata:
diff --git a/Lib/test/test_openpty.py b/Lib/test/test_openpty.py
index e8175ff..8713d34 100644
--- a/Lib/test/test_openpty.py
+++ b/Lib/test/test_openpty.py
@@ -4,7 +4,7 @@ import os, unittest
from test.support import run_unittest
if not hasattr(os, "openpty"):
- raise unittest.SkipTest("No openpty() available.")
+ raise unittest.SkipTest("os.openpty() not available.")
class OpenptyTest(unittest.TestCase):
diff --git a/Lib/test/test_operator.py b/Lib/test/test_operator.py
index fa608b9..b445ee0 100644
--- a/Lib/test/test_operator.py
+++ b/Lib/test/test_operator.py
@@ -410,6 +410,31 @@ class OperatorTestCase(unittest.TestCase):
self.assertEqual(operator.__ixor__ (c, 5), "ixor")
self.assertEqual(operator.__iconcat__ (c, c), "iadd")
+ def test_length_hint(self):
+ class X(object):
+ def __init__(self, value):
+ self.value = value
+
+ def __length_hint__(self):
+ if type(self.value) is type:
+ raise self.value
+ else:
+ return self.value
+
+ self.assertEqual(operator.length_hint([], 2), 0)
+ self.assertEqual(operator.length_hint(iter([1, 2, 3])), 3)
+
+ self.assertEqual(operator.length_hint(X(2)), 2)
+ self.assertEqual(operator.length_hint(X(NotImplemented), 4), 4)
+ self.assertEqual(operator.length_hint(X(TypeError), 12), 12)
+ with self.assertRaises(TypeError):
+ operator.length_hint(X("abc"))
+ with self.assertRaises(ValueError):
+ operator.length_hint(X(-2))
+ with self.assertRaises(LookupError):
+ operator.length_hint(X(LookupError))
+
+
def test_main(verbose=None):
import sys
test_classes = (
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
index 184c9ae..32a67e5 100644
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -471,9 +471,9 @@ class StatAttributeTests(unittest.TestCase):
# Verify that an open file can be stat'ed
try:
os.stat(r"c:\pagefile.sys")
- except WindowsError as e:
- if e.errno == 2: # file does not exist; cannot run test
- return
+ except FileNotFoundError:
+ pass # file does not exist; cannot run test
+ except OSError as e:
self.fail("Could not stat pagefile.sys")
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
@@ -1090,27 +1090,27 @@ class ExecTests(unittest.TestCase):
class Win32ErrorTests(unittest.TestCase):
def test_rename(self):
- self.assertRaises(WindowsError, os.rename, support.TESTFN, support.TESTFN+".bak")
+ self.assertRaises(OSError, os.rename, support.TESTFN, support.TESTFN+".bak")
def test_remove(self):
- self.assertRaises(WindowsError, os.remove, support.TESTFN)
+ self.assertRaises(OSError, os.remove, support.TESTFN)
def test_chdir(self):
- self.assertRaises(WindowsError, os.chdir, support.TESTFN)
+ self.assertRaises(OSError, os.chdir, support.TESTFN)
def test_mkdir(self):
f = open(support.TESTFN, "w")
try:
- self.assertRaises(WindowsError, os.mkdir, support.TESTFN)
+ self.assertRaises(OSError, os.mkdir, support.TESTFN)
finally:
f.close()
os.unlink(support.TESTFN)
def test_utime(self):
- self.assertRaises(WindowsError, os.utime, support.TESTFN, None)
+ self.assertRaises(OSError, os.utime, support.TESTFN, None)
def test_chmod(self):
- self.assertRaises(WindowsError, os.chmod, support.TESTFN, 0)
+ self.assertRaises(OSError, os.chmod, support.TESTFN, 0)
class TestInvalidFD(unittest.TestCase):
singles = ["fchdir", "dup", "fdopen", "fdatasync", "fstat",
@@ -1238,31 +1238,31 @@ if sys.platform != 'win32':
if hasattr(os, 'setuid'):
def test_setuid(self):
if os.getuid() != 0:
- self.assertRaises(os.error, os.setuid, 0)
+ self.assertRaises(OSError, os.setuid, 0)
self.assertRaises(OverflowError, os.setuid, 1<<32)
if hasattr(os, 'setgid'):
def test_setgid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
- self.assertRaises(os.error, os.setgid, 0)
+ self.assertRaises(OSError, os.setgid, 0)
self.assertRaises(OverflowError, os.setgid, 1<<32)
if hasattr(os, 'seteuid'):
def test_seteuid(self):
if os.getuid() != 0:
- self.assertRaises(os.error, os.seteuid, 0)
+ self.assertRaises(OSError, os.seteuid, 0)
self.assertRaises(OverflowError, os.seteuid, 1<<32)
if hasattr(os, 'setegid'):
def test_setegid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
- self.assertRaises(os.error, os.setegid, 0)
+ self.assertRaises(OSError, os.setegid, 0)
self.assertRaises(OverflowError, os.setegid, 1<<32)
if hasattr(os, 'setreuid'):
def test_setreuid(self):
if os.getuid() != 0:
- self.assertRaises(os.error, os.setreuid, 0, 0)
+ self.assertRaises(OSError, os.setreuid, 0, 0)
self.assertRaises(OverflowError, os.setreuid, 1<<32, 0)
self.assertRaises(OverflowError, os.setreuid, 0, 1<<32)
@@ -1276,7 +1276,7 @@ if sys.platform != 'win32':
if hasattr(os, 'setregid'):
def test_setregid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
- self.assertRaises(os.error, os.setregid, 0, 0)
+ self.assertRaises(OSError, os.setregid, 0, 0)
self.assertRaises(OverflowError, os.setregid, 1<<32, 0)
self.assertRaises(OverflowError, os.setregid, 0, 1<<32)
@@ -2107,6 +2107,103 @@ class TermsizeTests(unittest.TestCase):
self.assertEqual(expected, actual)
+class OSErrorTests(unittest.TestCase):
+ def setUp(self):
+ class Str(str):
+ pass
+
+ self.bytes_filenames = []
+ self.unicode_filenames = []
+ if support.TESTFN_UNENCODABLE is not None:
+ decoded = support.TESTFN_UNENCODABLE
+ else:
+ decoded = support.TESTFN
+ self.unicode_filenames.append(decoded)
+ self.unicode_filenames.append(Str(decoded))
+ if support.TESTFN_UNDECODABLE is not None:
+ encoded = support.TESTFN_UNDECODABLE
+ else:
+ encoded = os.fsencode(support.TESTFN)
+ self.bytes_filenames.append(encoded)
+ self.bytes_filenames.append(memoryview(encoded))
+
+ self.filenames = self.bytes_filenames + self.unicode_filenames
+
+ def test_oserror_filename(self):
+ funcs = [
+ (self.filenames, os.chdir,),
+ (self.filenames, os.chmod, 0o777),
+ (self.filenames, os.lstat,),
+ (self.filenames, os.open, os.O_RDONLY),
+ (self.filenames, os.rmdir,),
+ (self.filenames, os.stat,),
+ (self.filenames, os.unlink,),
+ ]
+ if sys.platform == "win32":
+ funcs.extend((
+ (self.bytes_filenames, os.rename, b"dst"),
+ (self.bytes_filenames, os.replace, b"dst"),
+ (self.unicode_filenames, os.rename, "dst"),
+ (self.unicode_filenames, os.replace, "dst"),
+ # Issue #16414: Don't test undecodable names with listdir()
+ # because of a Windows bug.
+ #
+ # With the ANSI code page 932, os.listdir(b'\xe7') return an
+ # empty list (instead of failing), whereas os.listdir(b'\xff')
+ # raises a FileNotFoundError. It looks like a Windows bug:
+ # b'\xe7' directory does not exist, FindFirstFileA(b'\xe7')
+ # fails with ERROR_FILE_NOT_FOUND (2), instead of
+ # ERROR_PATH_NOT_FOUND (3).
+ (self.unicode_filenames, os.listdir,),
+ ))
+ else:
+ funcs.extend((
+ (self.filenames, os.listdir,),
+ (self.filenames, os.rename, "dst"),
+ (self.filenames, os.replace, "dst"),
+ ))
+ if hasattr(os, "chown"):
+ funcs.append((self.filenames, os.chown, 0, 0))
+ if hasattr(os, "lchown"):
+ funcs.append((self.filenames, os.lchown, 0, 0))
+ if hasattr(os, "truncate"):
+ funcs.append((self.filenames, os.truncate, 0))
+ if hasattr(os, "chflags"):
+ funcs.append((self.filenames, os.chflags, 0))
+ if hasattr(os, "lchflags"):
+ funcs.append((self.filenames, os.lchflags, 0))
+ if hasattr(os, "chroot"):
+ funcs.append((self.filenames, os.chroot,))
+ if hasattr(os, "link"):
+ if sys.platform == "win32":
+ funcs.append((self.bytes_filenames, os.link, b"dst"))
+ funcs.append((self.unicode_filenames, os.link, "dst"))
+ else:
+ funcs.append((self.filenames, os.link, "dst"))
+ if hasattr(os, "listxattr"):
+ funcs.extend((
+ (self.filenames, os.listxattr,),
+ (self.filenames, os.getxattr, "user.test"),
+ (self.filenames, os.setxattr, "user.test", b'user'),
+ (self.filenames, os.removexattr, "user.test"),
+ ))
+ if hasattr(os, "lchmod"):
+ funcs.append((self.filenames, os.lchmod, 0o777))
+ if hasattr(os, "readlink"):
+ if sys.platform == "win32":
+ funcs.append((self.unicode_filenames, os.readlink,))
+ else:
+ funcs.append((self.filenames, os.readlink,))
+
+ for filenames, func, *func_args in funcs:
+ for name in filenames:
+ try:
+ func(name, *func_args)
+ except OSError as err:
+ self.assertIs(err.filename, name)
+ else:
+ self.fail("No exception thrown by {}".format(func))
+
@support.reap_threads
def test_main():
support.run_unittest(
@@ -2135,6 +2232,7 @@ def test_main():
ExtendedAttributeTests,
Win32DeprecatedBytesAPI,
TermsizeTests,
+ OSErrorTests,
RemoveDirsTests,
)
diff --git a/Lib/test/test_ossaudiodev.py b/Lib/test/test_ossaudiodev.py
index 3908a05..c9e2a24 100644
--- a/Lib/test/test_ossaudiodev.py
+++ b/Lib/test/test_ossaudiodev.py
@@ -44,7 +44,7 @@ class OSSAudioDevTests(unittest.TestCase):
def play_sound_file(self, data, rate, ssize, nchannels):
try:
dsp = ossaudiodev.open('w')
- except IOError as msg:
+ except OSError as msg:
if msg.args[0] in (errno.EACCES, errno.ENOENT,
errno.ENODEV, errno.EBUSY):
raise unittest.SkipTest(msg)
@@ -190,7 +190,7 @@ class OSSAudioDevTests(unittest.TestCase):
def test_main():
try:
dsp = ossaudiodev.open('w')
- except (ossaudiodev.error, IOError) as msg:
+ except (ossaudiodev.error, OSError) as msg:
if msg.args[0] in (errno.EACCES, errno.ENOENT,
errno.ENODEV, errno.EBUSY):
raise unittest.SkipTest(msg)
diff --git a/Lib/test/test_pep277.py b/Lib/test/test_pep277.py
index 4b16cbb..9bae6dc 100644
--- a/Lib/test/test_pep277.py
+++ b/Lib/test/test_pep277.py
@@ -99,10 +99,6 @@ class UnicodeFileTests(unittest.TestCase):
with self.assertRaises(expected_exception) as c:
fn(filename)
exc_filename = c.exception.filename
- # listdir may append a wildcard to the filename
- if fn is os.listdir and sys.platform == 'win32':
- exc_filename, _, wildcard = exc_filename.rpartition(os.sep)
- self.assertEqual(wildcard, '*.*')
if check_filename:
self.assertEqual(exc_filename, filename, "Function '%s(%a) failed "
"with bad filename in the exception: %a" %
diff --git a/Lib/test/test_poll.py b/Lib/test/test_poll.py
index f2d1795..a2fec3d 100644
--- a/Lib/test/test_poll.py
+++ b/Lib/test/test_poll.py
@@ -1,13 +1,13 @@
# Test case for the os.poll() function
-import os, select, random, unittest
+import os, select, random, unittest, subprocess
import _testcapi
from test.support import TESTFN, run_unittest
try:
select.poll
except AttributeError:
- raise unittest.SkipTest("select.poll not defined -- skipping test_poll")
+ raise unittest.SkipTest("select.poll not defined")
def find_ready_matching(ready, flag):
@@ -68,13 +68,11 @@ class PollTests(unittest.TestCase):
self.assertEqual(bufs, [MSG] * NUM_PIPES)
- def poll_unit_tests(self):
+ def test_poll_unit_tests(self):
# returns NVAL for invalid file descriptor
- FD = 42
- try:
- os.close(FD)
- except OSError:
- pass
+ FD, w = os.pipe()
+ os.close(FD)
+ os.close(w)
p = select.poll()
p.register(FD)
r = p.poll()
@@ -117,7 +115,9 @@ class PollTests(unittest.TestCase):
def test_poll2(self):
cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do echo testing...; sleep 1; done'
- p = os.popen(cmd, 'r')
+ proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
+ bufsize=0)
+ p = proc.stdout
pollster = select.poll()
pollster.register( p, select.POLLIN )
for tout in (0, 1000, 2000, 4000, 8000, 16000) + (-1,)*10:
@@ -127,7 +127,7 @@ class PollTests(unittest.TestCase):
fd, flags = fdlist[0]
if flags & select.POLLHUP:
line = p.readline()
- if line != "":
+ if line != b"":
self.fail('error: pipe seems to be closed, but still returns data')
continue
@@ -135,6 +135,7 @@ class PollTests(unittest.TestCase):
line = p.readline()
if not line:
break
+ self.assertEqual(line, b'testing...\n')
continue
else:
self.fail('Unexpected return value from select.poll: %s' % fdlist)
diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py
index c0929a0..935848b 100644
--- a/Lib/test/test_poplib.py
+++ b/Lib/test/test_poplib.py
@@ -18,6 +18,13 @@ threading = test_support.import_module('threading')
HOST = test_support.HOST
PORT = 0
+SUPPORTS_SSL = False
+if hasattr(poplib, 'POP3_SSL'):
+ import ssl
+
+ SUPPORTS_SSL = True
+ CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert.pem")
+
# the dummy data returned by server when LIST and RETR commands are issued
LIST_RESP = b'1 1\r\n2 2\r\n3 3\r\n4 4\r\n5 5\r\n.\r\n'
RETR_RESP = b"""From: postmaster@python.org\
@@ -33,11 +40,15 @@ line3\r\n\
class DummyPOP3Handler(asynchat.async_chat):
+ CAPAS = {'UIDL': [], 'IMPLEMENTATION': ['python-testlib-pop-server']}
+
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
+ self.tls_active = False
+ self.tls_starting = False
def collect_incoming_data(self, data):
self.in_buffer.append(data)
@@ -112,6 +123,65 @@ class DummyPOP3Handler(asynchat.async_chat):
self.push('+OK closing.')
self.close_when_done()
+ def _get_capas(self):
+ _capas = dict(self.CAPAS)
+ if not self.tls_active and SUPPORTS_SSL:
+ _capas['STLS'] = []
+ return _capas
+
+ def cmd_capa(self, arg):
+ self.push('+OK Capability list follows')
+ if self._get_capas():
+ for cap, params in self._get_capas().items():
+ _ln = [cap]
+ if params:
+ _ln.extend(params)
+ self.push(' '.join(_ln))
+ self.push('.')
+
+ if SUPPORTS_SSL:
+
+ def cmd_stls(self, arg):
+ if self.tls_active is False:
+ self.push('+OK Begin TLS negotiation')
+ tls_sock = ssl.wrap_socket(self.socket, certfile=CERTFILE,
+ server_side=True,
+ do_handshake_on_connect=False,
+ suppress_ragged_eofs=False)
+ self.del_channel()
+ self.set_socket(tls_sock)
+ self.tls_active = True
+ self.tls_starting = True
+ self.in_buffer = []
+ self._do_tls_handshake()
+ else:
+ self.push('-ERR Command not permitted when TLS active')
+
+ def _do_tls_handshake(self):
+ try:
+ self.socket.do_handshake()
+ except ssl.SSLError as err:
+ if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
+ ssl.SSL_ERROR_WANT_WRITE):
+ return
+ elif err.args[0] == ssl.SSL_ERROR_EOF:
+ return self.handle_close()
+ raise
+ except OSError as err:
+ if err.args[0] == errno.ECONNABORTED:
+ return self.handle_close()
+ else:
+ self.tls_active = True
+ self.tls_starting = False
+
+ def handle_read(self):
+ if self.tls_starting:
+ self._do_tls_handshake()
+ else:
+ try:
+ asynchat.async_chat.handle_read(self)
+ except ssl.SSLEOFError:
+ self.handle_close()
class DummyPOP3Server(asyncore.dispatcher, threading.Thread):
@@ -232,19 +302,35 @@ class TestPOP3Class(TestCase):
self.client.uidl()
self.client.uidl('foo')
+ def test_capa(self):
+ capa = self.client.capa()
+ self.assertTrue('IMPLEMENTATION' in capa.keys())
+
def test_quit(self):
resp = self.client.quit()
self.assertTrue(resp)
self.assertIsNone(self.client.sock)
self.assertIsNone(self.client.file)
+ if SUPPORTS_SSL:
-SUPPORTS_SSL = False
-if hasattr(poplib, 'POP3_SSL'):
- import ssl
+ def test_stls_capa(self):
+ capa = self.client.capa()
+ self.assertTrue('STLS' in capa.keys())
- SUPPORTS_SSL = True
- CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert.pem")
+ def test_stls(self):
+ expected = b'+OK Begin TLS negotiation'
+ resp = self.client.stls()
+ self.assertEqual(resp, expected)
+
+ def test_stls_context(self):
+ expected = b'+OK Begin TLS negotiation'
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ resp = self.client.stls(context=ctx)
+ self.assertEqual(resp, expected)
+
+
+if SUPPORTS_SSL:
class DummyPOP3_SSLHandler(DummyPOP3Handler):
@@ -256,34 +342,13 @@ if hasattr(poplib, 'POP3_SSL'):
self.del_channel()
self.set_socket(ssl_socket)
# Must try handshake before calling push()
- self._ssl_accepting = True
- self._do_ssl_handshake()
+ self.tls_active = True
+ self.tls_starting = True
+ self._do_tls_handshake()
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
- def _do_ssl_handshake(self):
- try:
- self.socket.do_handshake()
- except ssl.SSLError as err:
- if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
- ssl.SSL_ERROR_WANT_WRITE):
- return
- elif err.args[0] == ssl.SSL_ERROR_EOF:
- return self.handle_close()
- raise
- except socket.error as err:
- if err.args[0] == errno.ECONNABORTED:
- return self.handle_close()
- else:
- self._ssl_accepting = False
-
- def handle_read(self):
- if self._ssl_accepting:
- self._do_ssl_handshake()
- else:
- DummyPOP3Handler.handle_read(self)
-
class TestPOP3_SSLClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3_SSL
@@ -314,6 +379,39 @@ if hasattr(poplib, 'POP3_SSL'):
self.assertIs(self.client.sock.context, ctx)
self.assertTrue(self.client.noop().startswith(b'+OK'))
+ def test_stls(self):
+ self.assertRaises(poplib.error_proto, self.client.stls)
+
+ test_stls_context = test_stls
+
+ def test_stls_capa(self):
+ capa = self.client.capa()
+ self.assertFalse('STLS' in capa.keys())
+
+
+ class TestPOP3_TLSClass(TestPOP3Class):
+ # repeat previous tests by using poplib.POP3.stls()
+
+ def setUp(self):
+ self.server = DummyPOP3Server((HOST, PORT))
+ self.server.start()
+ self.client = poplib.POP3(self.server.host, self.server.port, timeout=3)
+ self.client.stls()
+
+ def tearDown(self):
+ if self.client.file is not None and self.client.sock is not None:
+ self.client.quit()
+ self.server.stop()
+
+ def test_stls(self):
+ self.assertRaises(poplib.error_proto, self.client.stls)
+
+ test_stls_context = test_stls
+
+ def test_stls_capa(self):
+ capa = self.client.capa()
+ self.assertFalse(b'STLS' in capa.keys())
+
class TestTimeouts(TestCase):
@@ -373,6 +471,7 @@ def test_main():
tests = [TestPOP3Class, TestTimeouts]
if SUPPORTS_SSL:
tests.append(TestPOP3_SSLClass)
+ tests.append(TestPOP3_TLSClass)
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py
index 4d9e1f5..4856083 100644
--- a/Lib/test/test_posix.py
+++ b/Lib/test/test_posix.py
@@ -607,7 +607,7 @@ class PosixTester(unittest.TestCase):
self.assertEqual(st.st_flags | stat.UF_IMMUTABLE, new_st.st_flags)
try:
fd = open(target_file, 'w+')
- except IOError as e:
+ except OSError as e:
self.assertEqual(e.errno, errno.EPERM)
finally:
posix.chflags(target_file, st.st_flags)
diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py
index 724c530..1f05050 100644
--- a/Lib/test/test_posixpath.py
+++ b/Lib/test/test_posixpath.py
@@ -186,63 +186,6 @@ class PosixPathTest(unittest.TestCase):
if not f.close():
f.close()
- @staticmethod
- def _create_file(filename):
- with open(filename, 'wb') as f:
- f.write(b'foo')
-
- def test_samefile(self):
- test_fn = support.TESTFN + "1"
- self._create_file(test_fn)
- self.assertTrue(posixpath.samefile(test_fn, test_fn))
- self.assertRaises(TypeError, posixpath.samefile)
-
- @unittest.skipIf(
- sys.platform.startswith('win'),
- "posixpath.samefile does not work on links in Windows")
- @unittest.skipUnless(hasattr(os, "symlink"),
- "Missing symlink implementation")
- def test_samefile_on_links(self):
- test_fn1 = support.TESTFN + "1"
- test_fn2 = support.TESTFN + "2"
- self._create_file(test_fn1)
-
- os.symlink(test_fn1, test_fn2)
- self.assertTrue(posixpath.samefile(test_fn1, test_fn2))
- os.remove(test_fn2)
-
- self._create_file(test_fn2)
- self.assertFalse(posixpath.samefile(test_fn1, test_fn2))
-
-
- def test_samestat(self):
- test_fn = support.TESTFN + "1"
- self._create_file(test_fn)
- test_fns = [test_fn]*2
- stats = map(os.stat, test_fns)
- self.assertTrue(posixpath.samestat(*stats))
-
- @unittest.skipIf(
- sys.platform.startswith('win'),
- "posixpath.samestat does not work on links in Windows")
- @unittest.skipUnless(hasattr(os, "symlink"),
- "Missing symlink implementation")
- def test_samestat_on_links(self):
- test_fn1 = support.TESTFN + "1"
- test_fn2 = support.TESTFN + "2"
- self._create_file(test_fn1)
- test_fns = (test_fn1, test_fn2)
- os.symlink(*test_fns)
- stats = map(os.stat, test_fns)
- self.assertTrue(posixpath.samestat(*stats))
- os.remove(test_fn2)
-
- self._create_file(test_fn2)
- stats = map(os.stat, test_fns)
- self.assertFalse(posixpath.samestat(*stats))
-
- self.assertRaises(TypeError, posixpath.samestat)
-
def test_ismount(self):
self.assertIs(posixpath.ismount("/"), True)
with warnings.catch_warnings():
@@ -594,11 +537,6 @@ class PosixPathTest(unittest.TestCase):
finally:
os.getcwdb = real_getcwdb
- def test_sameopenfile(self):
- fname = support.TESTFN + "1"
- with open(fname, "wb") as a, open(fname, "wb") as b:
- self.assertTrue(posixpath.sameopenfile(a.fileno(), b.fileno()))
-
class PosixCommonTest(test_genericpath.CommonTest, unittest.TestCase):
pathmodule = posixpath
diff --git a/Lib/test/test_pprint.py b/Lib/test/test_pprint.py
index d492d75..bf136de 100644
--- a/Lib/test/test_pprint.py
+++ b/Lib/test/test_pprint.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
import pprint
import test.support
import unittest
@@ -475,6 +477,42 @@ class QueryTestCase(unittest.TestCase):
self.assertEqual(pprint.pformat(dict.fromkeys(keys, 0)),
'{%r: 0, %r: 0}' % tuple(sorted(keys, key=id)))
+ def test_str_wrap(self):
+ # pprint tries to wrap strings intelligently
+ fox = 'the quick brown fox jumped over a lazy dog'
+ self.assertEqual(pprint.pformat(fox, width=20), """\
+'the quick brown '
+'fox jumped over '
+'a lazy dog'""")
+ self.assertEqual(pprint.pformat({'a': 1, 'b': fox, 'c': 2},
+ width=26), """\
+{'a': 1,
+ 'b': 'the quick brown '
+ 'fox jumped over '
+ 'a lazy dog',
+ 'c': 2}""")
+ # With some special characters
+ # - \n always triggers a new line in the pprint
+ # - \t and \n are escaped
+ # - non-ASCII is allowed
+ # - an apostrophe doesn't disrupt the pprint
+ special = "Portons dix bons \"whiskys\"\nà l'avocat goujat\t qui fumait au zoo"
+ self.assertEqual(pprint.pformat(special, width=20), """\
+'Portons dix bons '
+'"whiskys"\\n'
+"à l'avocat "
+'goujat\\t qui '
+'fumait au zoo'""")
+ # An unwrappable string is formatted as its repr
+ unwrappable = "x" * 100
+ self.assertEqual(pprint.pformat(unwrappable, width=80), repr(unwrappable))
+ self.assertEqual(pprint.pformat(''), "''")
+ # Check that the pprint is a usable repr
+ special *= 10
+ for width in range(3, 40):
+ formatted = pprint.pformat(special, width=width)
+ self.assertEqual(eval("(" + formatted + ")"), special)
+
class DottedPrettyPrinter(pprint.PrettyPrinter):
diff --git a/Lib/test/test_profile.py b/Lib/test/test_profile.py
index cd7ec58..1fc3c42 100644
--- a/Lib/test/test_profile.py
+++ b/Lib/test/test_profile.py
@@ -3,9 +3,11 @@
import sys
import pstats
import unittest
+import os
from difflib import unified_diff
from io import StringIO
-from test.support import run_unittest
+from test.support import TESTFN, run_unittest, unlink
+from contextlib import contextmanager
import profile
from test.profilee import testfunc, timer
@@ -14,9 +16,13 @@ from test.profilee import testfunc, timer
class ProfileTest(unittest.TestCase):
profilerclass = profile.Profile
+ profilermodule = profile
methodnames = ['print_stats', 'print_callers', 'print_callees']
expected_max_output = ':0(max)'
+ def tearDown(self):
+ unlink(TESTFN)
+
def get_expected_output(self):
return _ProfileOutput
@@ -74,6 +80,19 @@ class ProfileTest(unittest.TestCase):
self.assertIn(self.expected_max_output, res,
"Profiling {0!r} didn't report max:\n{1}".format(stmt, res))
+ def test_run(self):
+ with silent():
+ self.profilermodule.run("int('1')")
+ self.profilermodule.run("int('1')", filename=TESTFN)
+ self.assertTrue(os.path.exists(TESTFN))
+
+ def test_runctx(self):
+ with silent():
+ self.profilermodule.runctx("testfunc()", globals(), locals())
+ self.profilermodule.runctx("testfunc()", globals(), locals(),
+ filename=TESTFN)
+ self.assertTrue(os.path.exists(TESTFN))
+
def regenerate_expected_output(filename, cls):
filename = filename.rstrip('co')
@@ -95,6 +114,14 @@ def regenerate_expected_output(filename, cls):
method, results[i+1]))
f.write('\nif __name__ == "__main__":\n main()\n')
+@contextmanager
+def silent():
+ stdout = sys.stdout
+ try:
+ sys.stdout = StringIO()
+ yield
+ finally:
+ sys.stdout = stdout
def test_main():
run_unittest(ProfileTest)
diff --git a/Lib/test/test_pty.py b/Lib/test/test_pty.py
index 29297f8..8916861 100644
--- a/Lib/test/test_pty.py
+++ b/Lib/test/test_pty.py
@@ -187,7 +187,7 @@ class PtyTest(unittest.TestCase):
##debug("Reading from master_fd now that the child has exited")
##try:
## s1 = os.read(master_fd, 1024)
- ##except os.error:
+ ##except OSError:
## pass
##else:
## raise TestFailed("Read from master_fd did not raise exception")
diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py
index 542a29b..8adcdd8 100644
--- a/Lib/test/test_pydoc.py
+++ b/Lib/test/test_pydoc.py
@@ -395,6 +395,31 @@ class PydocDocTest(unittest.TestCase):
synopsis = pydoc.synopsis(TESTFN, {})
self.assertEqual(synopsis, 'line 1: h\xe9')
+ def test_splitdoc_with_description(self):
+ example_string = "I Am A Doc\n\n\nHere is my description"
+ self.assertEqual(pydoc.splitdoc(example_string),
+ ('I Am A Doc', '\nHere is my description'))
+
+ def test_is_object_or_method(self):
+ doc = pydoc.Doc()
+ # Bound Method
+ self.assertTrue(pydoc._is_some_method(doc.fail))
+ # Method Descriptor
+ self.assertTrue(pydoc._is_some_method(int.__add__))
+ # String
+ self.assertFalse(pydoc._is_some_method("I am not a method"))
+
+ def test_is_package_when_not_package(self):
+ with test.support.temp_cwd() as test_dir:
+ self.assertFalse(pydoc.ispackage(test_dir))
+
+ def test_is_package_when_is_package(self):
+ with test.support.temp_cwd() as test_dir:
+ init_path = os.path.join(test_dir, '__init__.py')
+ open(init_path, 'w').close()
+ self.assertTrue(pydoc.ispackage(test_dir))
+ os.remove(init_path)
+
def test_allmethods(self):
# issue 17476: allmethods was no longer returning unbound methods.
# This test is a bit fragile in the face of changes to object and type,
diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py
index c3ab7d2..34c948d 100644
--- a/Lib/test/test_random.py
+++ b/Lib/test/test_random.py
@@ -46,6 +46,39 @@ class TestBasicOps(unittest.TestCase):
self.assertRaises(TypeError, self.gen.seed, 1, 2, 3, 4)
self.assertRaises(TypeError, type(self.gen), [])
+ def test_shuffle(self):
+ shuffle = self.gen.shuffle
+ lst = []
+ shuffle(lst)
+ self.assertEqual(lst, [])
+ lst = [37]
+ shuffle(lst)
+ self.assertEqual(lst, [37])
+ seqs = [list(range(n)) for n in range(10)]
+ shuffled_seqs = [list(range(n)) for n in range(10)]
+ for shuffled_seq in shuffled_seqs:
+ shuffle(shuffled_seq)
+ for (seq, shuffled_seq) in zip(seqs, shuffled_seqs):
+ self.assertEqual(len(seq), len(shuffled_seq))
+ self.assertEqual(set(seq), set(shuffled_seq))
+
+ # The above tests all would pass if the shuffle was a
+ # no-op. The following non-deterministic test covers that. It
+ # asserts that the shuffled sequence of 1000 distinct elements
+ # must be different from the original one. Although there is
+ # mathematically a non-zero probability that this could
+ # actually happen in a genuinely random shuffle, it is
+ # completely negligible, given that the number of possible
+ # permutations of 1000 objects is 1000! (factorial of 1000),
+ # which is considerably larger than the number of atoms in the
+ # universe...
+ lst = list(range(1000))
+ shuffled_lst = list(range(1000))
+ shuffle(shuffled_lst)
+ self.assertTrue(lst != shuffled_lst)
+ shuffle(lst)
+ self.assertTrue(lst != shuffled_lst)
+
def test_choice(self):
choice = self.gen.choice
with self.assertRaises(IndexError):
diff --git a/Lib/test/test_range.py b/Lib/test/test_range.py
index 2a13bfe..f088387 100644
--- a/Lib/test/test_range.py
+++ b/Lib/test/test_range.py
@@ -313,7 +313,7 @@ class RangeTest(unittest.TestCase):
self.assertRaises(TypeError, range, IN())
# Test use of user-defined classes in slice indices.
- self.assertEqual(list(range(10)[:I(5)]), list(range(5)))
+ self.assertEqual(range(10)[:I(5)], range(5))
with self.assertRaises(RuntimeError):
range(0, 10)[:IX()]
diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py
new file mode 100644
index 0000000..5b972ca
--- /dev/null
+++ b/Lib/test/test_regrtest.py
@@ -0,0 +1,100 @@
+"""
+Tests of regrtest.py.
+"""
+
+import argparse
+import getopt
+import unittest
+from test import regrtest, support
+
+def old_parse_args(args):
+ """Parse arguments as regrtest did strictly prior to 3.4.
+
+ Raises getopt.GetoptError on bad arguments.
+ """
+ return getopt.getopt(args, 'hvqxsoS:rf:lu:t:TD:NLR:FdwWM:nj:Gm:',
+ ['help', 'verbose', 'verbose2', 'verbose3', 'quiet',
+ 'exclude', 'single', 'slow', 'randomize', 'fromfile=', 'findleaks',
+ 'use=', 'threshold=', 'coverdir=', 'nocoverdir',
+ 'runleaks', 'huntrleaks=', 'memlimit=', 'randseed=',
+ 'multiprocess=', 'coverage', 'slaveargs=', 'forever', 'debug',
+ 'start=', 'nowindows', 'header', 'testdir=', 'timeout=', 'wait',
+ 'failfast', 'match='])
+
+class ParseArgsTestCase(unittest.TestCase):
+
+ """Test that regrtest's parsing code matches the prior getopt behavior."""
+
+ def _parse_args(self, args):
+ # This is the same logic as that used in regrtest.main()
+ parser = regrtest._create_parser()
+ ns = parser.parse_args(args=args)
+ opts = regrtest._convert_namespace_to_getopt(ns)
+ return opts, ns.args
+
+ def _check_args(self, args, expected=None):
+ """
+ The expected parameter is for cases when the behavior of the new
+ parse_args differs from the old (but deliberately so).
+ """
+ if expected is None:
+ try:
+ expected = old_parse_args(args)
+ except getopt.GetoptError:
+ # Suppress usage string output when an argparse.ArgumentError
+ # error is raised.
+ with support.captured_stderr():
+ self.assertRaises(SystemExit, self._parse_args, args)
+ return
+ # The new parse_args() sorts by long option string.
+ expected[0].sort()
+ actual = self._parse_args(args)
+ self.assertEqual(actual, expected)
+
+ def test_unrecognized_argument(self):
+ self._check_args(['--xxx'])
+
+ def test_value_not_provided(self):
+ self._check_args(['--start'])
+
+ def test_short_option(self):
+ # getopt returns the short option whereas argparse returns the long.
+ expected = ([('--quiet', '')], [])
+ self._check_args(['-q'], expected=expected)
+
+ def test_long_option(self):
+ self._check_args(['--quiet'])
+
+ def test_long_option__partial(self):
+ self._check_args(['--qui'])
+
+ def test_two_options(self):
+ self._check_args(['--quiet', '--exclude'])
+
+ def test_option_with_value(self):
+ self._check_args(['--start', 'foo'])
+
+ def test_option_with_empty_string_value(self):
+ self._check_args(['--start', ''])
+
+ def test_arg(self):
+ self._check_args(['foo'])
+
+ def test_option_and_arg(self):
+ self._check_args(['--quiet', 'foo'])
+
+ def test_fromfile(self):
+ self._check_args(['--fromfile', 'file'])
+
+ def test_match(self):
+ self._check_args(['--match', 'pattern'])
+
+ def test_randomize(self):
+ self._check_args(['--randomize'])
+
+
+def test_main():
+ support.run_unittest(__name__)
+
+if __name__ == '__main__':
+ test_main()
diff --git a/Lib/test/test_resource.py b/Lib/test/test_resource.py
index f3416b7..bb3ff25 100644
--- a/Lib/test/test_resource.py
+++ b/Lib/test/test_resource.py
@@ -61,7 +61,7 @@ class ResourceTest(unittest.TestCase):
for i in range(5):
time.sleep(.1)
f.flush()
- except IOError:
+ except OSError:
if not limit_set:
raise
if limit_set:
diff --git a/Lib/test/test_select.py b/Lib/test/test_select.py
index ddb9a0f..8f9a1c9 100644
--- a/Lib/test/test_select.py
+++ b/Lib/test/test_select.py
@@ -5,7 +5,7 @@ import sys
import unittest
from test import support
-@unittest.skipIf(sys.platform[:3] in ('win', 'os2', 'riscos'),
+@unittest.skipIf((sys.platform[:3]=='win'),
"can't easily test on this system")
class SelectTestCase(unittest.TestCase):
@@ -32,7 +32,7 @@ class SelectTestCase(unittest.TestCase):
fp.close()
try:
select.select([fd], [], [], 0)
- except select.error as err:
+ except OSError as err:
self.assertEqual(err.errno, errno.EBADF)
else:
self.fail("exception not raised")
diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py
index 8e9e587..da62723 100644
--- a/Lib/test/test_set.py
+++ b/Lib/test/test_set.py
@@ -848,8 +848,6 @@ class TestBasicOps(unittest.TestCase):
for v in self.set:
self.assertIn(v, self.values)
setiter = iter(self.set)
- # note: __length_hint__ is an internal undocumented API,
- # don't rely on it in your own programs
self.assertEqual(setiter.__length_hint__(), len(self.set))
def test_pickling(self):
diff --git a/Lib/test/test_shelve.py b/Lib/test/test_shelve.py
index 13c1265..bd51d86 100644
--- a/Lib/test/test_shelve.py
+++ b/Lib/test/test_shelve.py
@@ -148,6 +148,19 @@ class TestCase(unittest.TestCase):
p2 = d[encodedkey]
self.assertNotEqual(p1, p2) # Write creates new object in store
+ def test_with(self):
+ d1 = {}
+ with shelve.Shelf(d1, protocol=2, writeback=False) as s:
+ s['key1'] = [1,2,3,4]
+ self.assertEqual(s['key1'], [1,2,3,4])
+ self.assertEqual(len(s), 1)
+ self.assertRaises(ValueError, len, s)
+ try:
+ s['key1']
+ except ValueError:
+ pass
+ else:
+ self.fail('Closed shelf should not find a key')
from test import mapping_tests
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
index 3b1b694..5a78440 100644
--- a/Lib/test/test_shutil.py
+++ b/Lib/test/test_shutil.py
@@ -18,7 +18,8 @@ from shutil import (_make_tarball, _make_zipfile, make_archive,
register_archive_format, unregister_archive_format,
get_archive_formats, Error, unpack_archive,
register_unpack_format, RegistryError,
- unregister_unpack_format, get_unpack_formats)
+ unregister_unpack_format, get_unpack_formats,
+ SameFileError)
import tarfile
import warnings
@@ -739,7 +740,7 @@ class TestShutil(unittest.TestCase):
with open(src, 'w') as f:
f.write('cheddar')
os.link(src, dst)
- self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+ self.assertRaises(shutil.SameFileError, shutil.copyfile, src, dst)
with open(src, 'r') as f:
self.assertEqual(f.read(), 'cheddar')
os.remove(dst)
@@ -759,7 +760,7 @@ class TestShutil(unittest.TestCase):
# to TESTFN/TESTFN/cheese, while it should point at
# TESTFN/cheese.
os.symlink('cheese', dst)
- self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+ self.assertRaises(shutil.SameFileError, shutil.copyfile, src, dst)
with open(src, 'r') as f:
self.assertEqual(f.read(), 'cheddar')
os.remove(dst)
@@ -1266,6 +1267,16 @@ class TestShutil(unittest.TestCase):
self.assertTrue(os.path.exists(rv))
self.assertEqual(read_file(src_file), read_file(dst_file))
+ def test_copyfile_same_file(self):
+ # copyfile() should raise SameFileError if the source and destination
+ # are the same.
+ src_dir = self.mkdtemp()
+ src_file = os.path.join(src_dir, 'foo')
+ write_file(src_file, 'foo')
+ self.assertRaises(SameFileError, shutil.copyfile, src_file, src_file)
+ # But Error should work too, to stay backward compatible.
+ self.assertRaises(Error, shutil.copyfile, src_file, src_file)
+
def test_copytree_return_value(self):
# copytree returns its destination path.
src_dir = self.mkdtemp()
@@ -1537,7 +1548,7 @@ class TestCopyFile(unittest.TestCase):
self._exited_with = exc_type, exc_val, exc_tb
if self._raise_in_exit:
self._raised = True
- raise IOError("Cannot close")
+ raise OSError("Cannot close")
return self._suppress_at_exit
def tearDown(self):
@@ -1551,12 +1562,12 @@ class TestCopyFile(unittest.TestCase):
def test_w_source_open_fails(self):
def _open(filename, mode='r'):
if filename == 'srcfile':
- raise IOError('Cannot open "srcfile"')
+ raise OSError('Cannot open "srcfile"')
assert 0 # shouldn't reach here.
self._set_shutil_open(_open)
- self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile')
+ self.assertRaises(OSError, shutil.copyfile, 'srcfile', 'destfile')
def test_w_dest_open_fails(self):
@@ -1566,14 +1577,14 @@ class TestCopyFile(unittest.TestCase):
if filename == 'srcfile':
return srcfile
if filename == 'destfile':
- raise IOError('Cannot open "destfile"')
+ raise OSError('Cannot open "destfile"')
assert 0 # shouldn't reach here.
self._set_shutil_open(_open)
shutil.copyfile('srcfile', 'destfile')
self.assertTrue(srcfile._entered)
- self.assertTrue(srcfile._exited_with[0] is IOError)
+ self.assertTrue(srcfile._exited_with[0] is OSError)
self.assertEqual(srcfile._exited_with[1].args,
('Cannot open "destfile"',))
@@ -1595,7 +1606,7 @@ class TestCopyFile(unittest.TestCase):
self.assertTrue(srcfile._entered)
self.assertTrue(destfile._entered)
self.assertTrue(destfile._raised)
- self.assertTrue(srcfile._exited_with[0] is IOError)
+ self.assertTrue(srcfile._exited_with[0] is OSError)
self.assertEqual(srcfile._exited_with[1].args,
('Cannot close',))
@@ -1613,7 +1624,7 @@ class TestCopyFile(unittest.TestCase):
self._set_shutil_open(_open)
- self.assertRaises(IOError,
+ self.assertRaises(OSError,
shutil.copyfile, 'srcfile', 'destfile')
self.assertTrue(srcfile._entered)
self.assertTrue(destfile._entered)
diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py
index 99243df..3d250e1 100644
--- a/Lib/test/test_signal.py
+++ b/Lib/test/test_signal.py
@@ -15,9 +15,6 @@ try:
except ImportError:
threading = None
-if sys.platform in ('os2', 'riscos'):
- raise unittest.SkipTest("Can't test signal on %s" % sys.platform)
-
class HandlerBCalled(Exception):
pass
@@ -36,7 +33,7 @@ def exit_subprocess():
def ignoring_eintr(__func, *args, **kwargs):
try:
return __func(*args, **kwargs)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EINTR:
raise
return None
@@ -309,10 +306,10 @@ class WakeupSignalTests(unittest.TestCase):
# We attempt to get a signal during the select call
try:
select.select([read], [], [], TIMEOUT_FULL)
- except select.error:
+ except OSError:
pass
else:
- raise Exception("select.error not raised")
+ raise Exception("OSError not raised")
after_time = time.time()
dt = after_time - before_time
if dt >= TIMEOUT_HALF:
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
index 29286c7..9c7840f 100644
--- a/Lib/test/test_site.py
+++ b/Lib/test/test_site.py
@@ -222,11 +222,7 @@ class HelperFunctionsTests(unittest.TestCase):
site.PREFIXES = ['xoxo']
dirs = site.getsitepackages()
- if sys.platform in ('os2emx', 'riscos'):
- self.assertEqual(len(dirs), 1)
- wanted = os.path.join('xoxo', 'Lib', 'site-packages')
- self.assertEqual(dirs[0], wanted)
- elif (sys.platform == "darwin" and
+ if (sys.platform == "darwin" and
sysconfig.get_config_var("PYTHONFRAMEWORK")):
# OS X framework builds
site.PREFIXES = ['Python.framework']
diff --git a/Lib/test/test_slice.py b/Lib/test/test_slice.py
index 2df9271..9203d5e 100644
--- a/Lib/test/test_slice.py
+++ b/Lib/test/test_slice.py
@@ -4,8 +4,70 @@ import unittest
from test import support
from pickle import loads, dumps
+import itertools
+import operator
import sys
+
+def evaluate_slice_index(arg):
+ """
+ Helper function to convert a slice argument to an integer, and raise
+ TypeError with a suitable message on failure.
+
+ """
+ if hasattr(arg, '__index__'):
+ return operator.index(arg)
+ else:
+ raise TypeError(
+ "slice indices must be integers or "
+ "None or have an __index__ method")
+
+def slice_indices(slice, length):
+ """
+ Reference implementation for the slice.indices method.
+
+ """
+ # Compute step and length as integers.
+ length = operator.index(length)
+ step = 1 if slice.step is None else evaluate_slice_index(slice.step)
+
+ # Raise ValueError for negative length or zero step.
+ if length < 0:
+ raise ValueError("length should not be negative")
+ if step == 0:
+ raise ValueError("slice step cannot be zero")
+
+ # Find lower and upper bounds for start and stop.
+ lower = -1 if step < 0 else 0
+ upper = length - 1 if step < 0 else length
+
+ # Compute start.
+ if slice.start is None:
+ start = upper if step < 0 else lower
+ else:
+ start = evaluate_slice_index(slice.start)
+ start = max(start + length, lower) if start < 0 else min(start, upper)
+
+ # Compute stop.
+ if slice.stop is None:
+ stop = lower if step < 0 else upper
+ else:
+ stop = evaluate_slice_index(slice.stop)
+ stop = max(stop + length, lower) if stop < 0 else min(stop, upper)
+
+ return start, stop, step
+
+
+# Class providing an __index__ method. Used for testing slice.indices.
+
+class MyIndexable(object):
+ def __init__(self, value):
+ self.value = value
+
+ def __index__(self):
+ return self.value
+
+
class SliceTest(unittest.TestCase):
def test_constructor(self):
@@ -75,6 +137,22 @@ class SliceTest(unittest.TestCase):
s = slice(obj)
self.assertTrue(s.stop is obj)
+ def check_indices(self, slice, length):
+ try:
+ actual = slice.indices(length)
+ except ValueError:
+ actual = "valueerror"
+ try:
+ expected = slice_indices(slice, length)
+ except ValueError:
+ expected = "valueerror"
+ self.assertEqual(actual, expected)
+
+ if length >= 0 and slice.step != 0:
+ actual = range(*slice.indices(length))
+ expected = range(length)[slice]
+ self.assertEqual(actual, expected)
+
def test_indices(self):
self.assertEqual(slice(None ).indices(10), (0, 10, 1))
self.assertEqual(slice(None, None, 2).indices(10), (0, 10, 2))
@@ -108,7 +186,41 @@ class SliceTest(unittest.TestCase):
self.assertEqual(list(range(10))[::sys.maxsize - 1], [0])
- self.assertRaises(OverflowError, slice(None).indices, 1<<100)
+ # Check a variety of start, stop, step and length values, including
+ # values exceeding sys.maxsize (see issue #14794).
+ vals = [None, -2**100, -2**30, -53, -7, -1, 0, 1, 7, 53, 2**30, 2**100]
+ lengths = [0, 1, 7, 53, 2**30, 2**100]
+ for slice_args in itertools.product(vals, repeat=3):
+ s = slice(*slice_args)
+ for length in lengths:
+ self.check_indices(s, length)
+ self.check_indices(slice(0, 10, 1), -3)
+
+ # Negative length should raise ValueError
+ with self.assertRaises(ValueError):
+ slice(None).indices(-1)
+
+ # Zero step should raise ValueError
+ with self.assertRaises(ValueError):
+ slice(0, 10, 0).indices(5)
+
+ # Using a start, stop or step or length that can't be interpreted as an
+ # integer should give a TypeError ...
+ with self.assertRaises(TypeError):
+ slice(0.0, 10, 1).indices(5)
+ with self.assertRaises(TypeError):
+ slice(0, 10.0, 1).indices(5)
+ with self.assertRaises(TypeError):
+ slice(0, 10, 1.0).indices(5)
+ with self.assertRaises(TypeError):
+ slice(0, 10, 1).indices(5.0)
+
+ # ... but it should be fine to use a custom class that provides index.
+ self.assertEqual(slice(0, 10, 1).indices(5), (0, 5, 1))
+ self.assertEqual(slice(MyIndexable(0), 10, 1).indices(5), (0, 5, 1))
+ self.assertEqual(slice(0, MyIndexable(10), 1).indices(5), (0, 5, 1))
+ self.assertEqual(slice(0, 10, MyIndexable(1)).indices(5), (0, 5, 1))
+ self.assertEqual(slice(0, 10, 1).indices(MyIndexable(5)), (0, 5, 1))
def test_setslice_without_getslice(self):
tmp = []
diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py
index f93d355..8f64ec1 100644
--- a/Lib/test/test_smtplib.py
+++ b/Lib/test/test_smtplib.py
@@ -222,7 +222,7 @@ class DebuggingServerTests(unittest.TestCase):
self.assertEqual(smtp.source_address, ('127.0.0.1', port))
self.assertEqual(smtp.local_hostname, 'localhost')
smtp.quit()
- except IOError as e:
+ except OSError as e:
if e.errno == errno.EADDRINUSE:
self.skipTest("couldn't bind to port %d" % port)
raise
@@ -524,12 +524,6 @@ class DebuggingServerTests(unittest.TestCase):
class NonConnectingTests(unittest.TestCase):
- def setUp(self):
- smtplib.socket = mock_socket
-
- def tearDown(self):
- smtplib.socket = socket
-
def testNotConnected(self):
# Test various operations on an unconnected SMTP object that
# should raise exceptions (at present the attempt in SMTP.send
@@ -541,10 +535,10 @@ class NonConnectingTests(unittest.TestCase):
smtp.send, 'test msg')
def testNonnumericPort(self):
- # check that non-numeric port raises socket.error
- self.assertRaises(mock_socket.error, smtplib.SMTP,
+ # check that non-numeric port raises OSError
+ self.assertRaises(OSError, smtplib.SMTP,
"localhost", "bogus")
- self.assertRaises(mock_socket.error, smtplib.SMTP,
+ self.assertRaises(OSError, smtplib.SMTP,
"localhost:bogus")
diff --git a/Lib/test/test_sndhdr.py b/Lib/test/test_sndhdr.py
index 1004688..5e0abe0 100644
--- a/Lib/test/test_sndhdr.py
+++ b/Lib/test/test_sndhdr.py
@@ -12,7 +12,7 @@ class TestFormats(unittest.TestCase):
('sndhdr.hcom', ('hcom', 22050.0, 1, -1, 8)),
('sndhdr.sndt', ('sndt', 44100, 1, 5, 8)),
('sndhdr.voc', ('voc', 0, 1, -1, 8)),
- ('sndhdr.wav', ('wav', 44100, 2, -1, 16)),
+ ('sndhdr.wav', ('wav', 44100, 2, 5, 16)),
):
filename = findfile(filename, subdir="sndhdrdata")
what = sndhdr.what(filename)
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
index d20e37b..ec76671 100644
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -2,7 +2,6 @@
import unittest
from test import support
-from unittest.case import _ExpectedFailure
import errno
import io
@@ -46,7 +45,7 @@ def _have_socket_can():
"""Check whether CAN sockets are supported on this host."""
try:
s = socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW)
- except (AttributeError, socket.error, OSError):
+ except (AttributeError, OSError):
return False
else:
s.close()
@@ -121,12 +120,42 @@ class SocketCANTest(unittest.TestCase):
interface = 'vcan0'
bufsize = 128
+ """The CAN frame structure is defined in <linux/can.h>:
+
+ struct can_frame {
+ canid_t can_id; /* 32 bit CAN_ID + EFF/RTR/ERR flags */
+ __u8 can_dlc; /* data length code: 0 .. 8 */
+ __u8 data[8] __attribute__((aligned(8)));
+ };
+ """
+ can_frame_fmt = "=IB3x8s"
+ can_frame_size = struct.calcsize(can_frame_fmt)
+
+ """The Broadcast Management Command frame structure is defined
+ in <linux/can/bcm.h>:
+
+ struct bcm_msg_head {
+ __u32 opcode;
+ __u32 flags;
+ __u32 count;
+ struct timeval ival1, ival2;
+ canid_t can_id;
+ __u32 nframes;
+ struct can_frame frames[0];
+ }
+
+ `bcm_msg_head` must be 8 bytes aligned because of the `frames` member (see
+ `struct can_frame` definition). Must use native not standard types for packing.
+ """
+ bcm_cmd_msg_fmt = "@3I4l2I"
+ bcm_cmd_msg_fmt += "x" * (struct.calcsize(bcm_cmd_msg_fmt) % 8)
+
def setUp(self):
self.s = socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW)
self.addCleanup(self.s.close)
try:
self.s.bind((self.interface,))
- except socket.error:
+ except OSError:
self.skipTest('network interface `%s` does not exist' %
self.interface)
@@ -242,9 +271,6 @@ class ThreadableTest:
raise TypeError("test_func must be a callable function")
try:
test_func()
- except _ExpectedFailure:
- # We deliberately ignore expected failures
- pass
except BaseException as e:
self.queue.put(e)
finally:
@@ -295,7 +321,7 @@ class ThreadedCANSocketTest(SocketCANTest, ThreadableTest):
self.cli = socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW)
try:
self.cli.bind((self.interface,))
- except socket.error:
+ except OSError:
# skipTest should not be called here, and will be called in the
# server instead
pass
@@ -608,7 +634,7 @@ def requireSocket(*args):
for obj in args]
try:
s = socket.socket(*callargs)
- except socket.error as e:
+ except OSError as e:
# XXX: check errno?
err = str(e)
else:
@@ -645,11 +671,11 @@ class GeneralModuleTests(unittest.TestCase):
def testSocketError(self):
# Testing socket module exceptions
msg = "Error raising socket exception (%s)."
- with self.assertRaises(socket.error, msg=msg % 'socket.error'):
- raise socket.error
- with self.assertRaises(socket.error, msg=msg % 'socket.herror'):
+ with self.assertRaises(OSError, msg=msg % 'OSError'):
+ raise OSError
+ with self.assertRaises(OSError, msg=msg % 'socket.herror'):
raise socket.herror
- with self.assertRaises(socket.error, msg=msg % 'socket.gaierror'):
+ with self.assertRaises(OSError, msg=msg % 'socket.gaierror'):
raise socket.gaierror
def testSendtoErrors(self):
@@ -712,13 +738,13 @@ class GeneralModuleTests(unittest.TestCase):
hostname = socket.gethostname()
try:
ip = socket.gethostbyname(hostname)
- except socket.error:
+ except OSError:
# Probably name lookup wasn't set up right; skip this test
return
self.assertTrue(ip.find('.') >= 0, "Error resolving host to ip.")
try:
hname, aliases, ipaddrs = socket.gethostbyaddr(ip)
- except socket.error:
+ except OSError:
# Probably a similar problem as above; skip this test
return
all_host_names = [hostname, hname] + aliases
@@ -732,7 +758,7 @@ class GeneralModuleTests(unittest.TestCase):
oldhn = socket.gethostname()
try:
socket.sethostname('new')
- except socket.error as e:
+ except OSError as e:
if e.errno == errno.EPERM:
self.skipTest("test should be run as root")
else:
@@ -766,8 +792,8 @@ class GeneralModuleTests(unittest.TestCase):
'socket.if_nameindex() not available.')
def testInvalidInterfaceNameIndex(self):
# test nonexistent interface index/name
- self.assertRaises(socket.error, socket.if_indextoname, 0)
- self.assertRaises(socket.error, socket.if_nametoindex, '_DEADBEEF')
+ self.assertRaises(OSError, socket.if_indextoname, 0)
+ self.assertRaises(OSError, socket.if_nametoindex, '_DEADBEEF')
# test with invalid values
self.assertRaises(TypeError, socket.if_nametoindex, 0)
self.assertRaises(TypeError, socket.if_indextoname, '_DEADBEEF')
@@ -788,7 +814,7 @@ class GeneralModuleTests(unittest.TestCase):
try:
# On some versions, this crashes the interpreter.
socket.getnameinfo(('x', 0, 0, 0), 0)
- except socket.error:
+ except OSError:
pass
def testNtoH(self):
@@ -835,17 +861,17 @@ class GeneralModuleTests(unittest.TestCase):
try:
port = socket.getservbyname(service, 'tcp')
break
- except socket.error:
+ except OSError:
pass
else:
- raise socket.error
+ raise OSError
# Try same call with optional protocol omitted
port2 = socket.getservbyname(service)
eq(port, port2)
# Try udp, but don't barf if it doesn't exist
try:
udpport = socket.getservbyname(service, 'udp')
- except socket.error:
+ except OSError:
udpport = None
else:
eq(udpport, port)
@@ -901,7 +927,7 @@ class GeneralModuleTests(unittest.TestCase):
g = lambda a: inet_pton(AF_INET, a)
assertInvalid = lambda func,a: self.assertRaises(
- (socket.error, ValueError), func, a
+ (OSError, ValueError), func, a
)
self.assertEqual(b'\x00\x00\x00\x00', f('0.0.0.0'))
@@ -936,7 +962,7 @@ class GeneralModuleTests(unittest.TestCase):
return
f = lambda a: inet_pton(AF_INET6, a)
assertInvalid = lambda a: self.assertRaises(
- (socket.error, ValueError), f, a
+ (OSError, ValueError), f, a
)
self.assertEqual(b'\x00' * 16, f('::'))
@@ -985,7 +1011,7 @@ class GeneralModuleTests(unittest.TestCase):
from socket import inet_ntoa as f, inet_ntop, AF_INET
g = lambda a: inet_ntop(AF_INET, a)
assertInvalid = lambda func,a: self.assertRaises(
- (socket.error, ValueError), func, a
+ (OSError, ValueError), func, a
)
self.assertEqual('1.0.1.0', f(b'\x01\x00\x01\x00'))
@@ -1014,7 +1040,7 @@ class GeneralModuleTests(unittest.TestCase):
return
f = lambda a: inet_ntop(AF_INET6, a)
assertInvalid = lambda a: self.assertRaises(
- (socket.error, ValueError), f, a
+ (OSError, ValueError), f, a
)
self.assertEqual('::', f(b'\x00' * 16))
@@ -1042,7 +1068,7 @@ class GeneralModuleTests(unittest.TestCase):
# At least for eCos. This is required for the S/390 to pass.
try:
my_ip_addr = socket.gethostbyname(socket.gethostname())
- except socket.error:
+ except OSError:
# Probably name lookup wasn't set up right; skip this test
return
self.assertIn(name[0], ("0.0.0.0", my_ip_addr), '%s invalid' % name[0])
@@ -1069,7 +1095,7 @@ class GeneralModuleTests(unittest.TestCase):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
sock.close()
- self.assertRaises(socket.error, sock.send, b"spam")
+ self.assertRaises(OSError, sock.send, b"spam")
def testNewAttributes(self):
# testing .family, .type and .protocol
@@ -1168,7 +1194,7 @@ class GeneralModuleTests(unittest.TestCase):
def test_getnameinfo(self):
# only IP addresses are allowed
- self.assertRaises(socket.error, socket.getnameinfo, ('mail.python.org',0), 0)
+ self.assertRaises(OSError, socket.getnameinfo, ('mail.python.org',0), 0)
@unittest.skipUnless(support.is_resource_enabled('network'),
'network is not enabled')
@@ -1291,10 +1317,35 @@ class BasicCANTest(unittest.TestCase):
socket.PF_CAN
socket.CAN_RAW
+ @unittest.skipUnless(hasattr(socket, "CAN_BCM"),
+ 'socket.CAN_BCM required for this test.')
+ def testBCMConstants(self):
+ socket.CAN_BCM
+
+ # opcodes
+ socket.CAN_BCM_TX_SETUP # create (cyclic) transmission task
+ socket.CAN_BCM_TX_DELETE # remove (cyclic) transmission task
+ socket.CAN_BCM_TX_READ # read properties of (cyclic) transmission task
+ socket.CAN_BCM_TX_SEND # send one CAN frame
+ socket.CAN_BCM_RX_SETUP # create RX content filter subscription
+ socket.CAN_BCM_RX_DELETE # remove RX content filter subscription
+ socket.CAN_BCM_RX_READ # read properties of RX content filter subscription
+ socket.CAN_BCM_TX_STATUS # reply to TX_READ request
+ socket.CAN_BCM_TX_EXPIRED # notification on performed transmissions (count=0)
+ socket.CAN_BCM_RX_STATUS # reply to RX_READ request
+ socket.CAN_BCM_RX_TIMEOUT # cyclic message is absent
+ socket.CAN_BCM_RX_CHANGED # updated CAN frame (detected content change)
+
def testCreateSocket(self):
with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
pass
+ @unittest.skipUnless(hasattr(socket, "CAN_BCM"),
+ 'socket.CAN_BCM required for this test.')
+ def testCreateBCMSocket(self):
+ with socket.socket(socket.PF_CAN, socket.SOCK_DGRAM, socket.CAN_BCM) as s:
+ pass
+
def testBindAny(self):
with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
s.bind(('', ))
@@ -1302,7 +1353,7 @@ class BasicCANTest(unittest.TestCase):
def testTooLongInterfaceName(self):
# most systems limit IFNAMSIZ to 16, take 1024 to be sure
with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
- self.assertRaisesRegex(socket.error, 'interface name too long',
+ self.assertRaisesRegex(OSError, 'interface name too long',
s.bind, ('x' * 1024,))
@unittest.skipUnless(hasattr(socket, "CAN_RAW_LOOPBACK"),
@@ -1327,19 +1378,8 @@ class BasicCANTest(unittest.TestCase):
@unittest.skipUnless(HAVE_SOCKET_CAN, 'SocketCan required for this test.')
-@unittest.skipUnless(thread, 'Threading required for this test.')
class CANTest(ThreadedCANSocketTest):
- """The CAN frame structure is defined in <linux/can.h>:
-
- struct can_frame {
- canid_t can_id; /* 32 bit CAN_ID + EFF/RTR/ERR flags */
- __u8 can_dlc; /* data length code: 0 .. 8 */
- __u8 data[8] __attribute__((aligned(8)));
- };
- """
- can_frame_fmt = "=IB3x8s"
-
def __init__(self, methodName='runTest'):
ThreadedCANSocketTest.__init__(self, methodName=methodName)
@@ -1388,6 +1428,46 @@ class CANTest(ThreadedCANSocketTest):
self.cf2 = self.build_can_frame(0x12, b'\x99\x22\x33')
self.cli.send(self.cf2)
+ @unittest.skipUnless(hasattr(socket, "CAN_BCM"),
+ 'socket.CAN_BCM required for this test.')
+ def _testBCM(self):
+ cf, addr = self.cli.recvfrom(self.bufsize)
+ self.assertEqual(self.cf, cf)
+ can_id, can_dlc, data = self.dissect_can_frame(cf)
+ self.assertEqual(self.can_id, can_id)
+ self.assertEqual(self.data, data)
+
+ @unittest.skipUnless(hasattr(socket, "CAN_BCM"),
+ 'socket.CAN_BCM required for this test.')
+ def testBCM(self):
+ bcm = socket.socket(socket.PF_CAN, socket.SOCK_DGRAM, socket.CAN_BCM)
+ self.addCleanup(bcm.close)
+ bcm.connect((self.interface,))
+ self.can_id = 0x123
+ self.data = bytes([0xc0, 0xff, 0xee])
+ self.cf = self.build_can_frame(self.can_id, self.data)
+ opcode = socket.CAN_BCM_TX_SEND
+ flags = 0
+ count = 0
+ ival1_seconds = ival1_usec = ival2_seconds = ival2_usec = 0
+ bcm_can_id = 0x0222
+ nframes = 1
+ assert len(self.cf) == 16
+ header = struct.pack(self.bcm_cmd_msg_fmt,
+ opcode,
+ flags,
+ count,
+ ival1_seconds,
+ ival1_usec,
+ ival2_seconds,
+ ival2_usec,
+ bcm_can_id,
+ nframes,
+ )
+ header_plus_frame = header + self.cf
+ bytes_sent = bcm.send(header_plus_frame)
+ self.assertEqual(bytes_sent, len(header_plus_frame))
+
@unittest.skipUnless(HAVE_SOCKET_RDS, 'RDS sockets required for this test.')
class BasicRDSTest(unittest.TestCase):
@@ -1602,7 +1682,7 @@ class BasicTCPTest(SocketConnectedTest):
self.assertEqual(f, fileno)
# cli_conn cannot be used anymore...
self.assertTrue(self.cli_conn._closed)
- self.assertRaises(socket.error, self.cli_conn.recv, 1024)
+ self.assertRaises(OSError, self.cli_conn.recv, 1024)
self.cli_conn.close()
# ...but we can create another socket using the (still open)
# file descriptor
@@ -1971,7 +2051,7 @@ class SendmsgTests(SendrecvmsgServerTimeoutBase):
def _testSendmsgExcessCmsgReject(self):
if not hasattr(socket, "CMSG_SPACE"):
# Can only send one item
- with self.assertRaises(socket.error) as cm:
+ with self.assertRaises(OSError) as cm:
self.sendmsgToServer([MSG], [(0, 0, b""), (0, 0, b"")])
self.assertIsNone(cm.exception.errno)
self.sendToServer(b"done")
@@ -1982,7 +2062,7 @@ class SendmsgTests(SendrecvmsgServerTimeoutBase):
def _testSendmsgAfterClose(self):
self.cli_sock.close()
- self.assertRaises(socket.error, self.sendmsgToServer, [MSG])
+ self.assertRaises(OSError, self.sendmsgToServer, [MSG])
class SendmsgStreamTests(SendmsgTests):
@@ -2026,7 +2106,7 @@ class SendmsgStreamTests(SendmsgTests):
@testSendmsgDontWait.client_skip
def _testSendmsgDontWait(self):
try:
- with self.assertRaises(socket.error) as cm:
+ with self.assertRaises(OSError) as cm:
while True:
self.sendmsgToServer([b"a"*512], [], socket.MSG_DONTWAIT)
self.assertIn(cm.exception.errno,
@@ -2046,9 +2126,9 @@ class SendmsgConnectionlessTests(SendmsgTests):
pass
def _testSendmsgNoDestAddr(self):
- self.assertRaises(socket.error, self.cli_sock.sendmsg,
+ self.assertRaises(OSError, self.cli_sock.sendmsg,
[MSG])
- self.assertRaises(socket.error, self.cli_sock.sendmsg,
+ self.assertRaises(OSError, self.cli_sock.sendmsg,
[MSG], [], 0, None)
@@ -2134,7 +2214,7 @@ class RecvmsgGenericTests(SendrecvmsgBase):
def testRecvmsgAfterClose(self):
# Check that recvmsg[_into]() fails on a closed socket.
self.serv_sock.close()
- self.assertRaises(socket.error, self.doRecvmsg, self.serv_sock, 1024)
+ self.assertRaises(OSError, self.doRecvmsg, self.serv_sock, 1024)
def _testRecvmsgAfterClose(self):
pass
@@ -2582,7 +2662,7 @@ class SCMRightsTest(SendrecvmsgServerTimeoutBase):
# call fails, just send msg with no ancillary data.
try:
nbytes = self.sendmsgToServer([msg], ancdata)
- except socket.error as e:
+ except OSError as e:
# Check that it was the system call that failed
self.assertIsInstance(e.errno, int)
nbytes = self.sendmsgToServer([msg])
@@ -2960,7 +3040,7 @@ class RFC3542AncillaryTest(SendrecvmsgServerTimeoutBase):
array.array("i", [self.traffic_class]).tobytes() + b"\x00"),
(socket.IPPROTO_IPV6, socket.IPV6_HOPLIMIT,
array.array("i", [self.hop_limit]))])
- except socket.error as e:
+ except OSError as e:
self.assertIsInstance(e.errno, int)
nbytes = self.sendmsgToServer(
[MSG],
@@ -3414,10 +3494,10 @@ class InterruptedRecvTimeoutTest(InterruptedTimeoutBase, UDPTestBase):
self.serv.settimeout(self.timeout)
def checkInterruptedRecv(self, func, *args, **kwargs):
- # Check that func(*args, **kwargs) raises socket.error with an
+ # Check that func(*args, **kwargs) raises OSError with an
# errno of EINTR when interrupted by a signal.
self.setAlarm(self.alarm_time)
- with self.assertRaises(socket.error) as cm:
+ with self.assertRaises(OSError) as cm:
func(*args, **kwargs)
self.assertNotIsInstance(cm.exception, socket.timeout)
self.assertEqual(cm.exception.errno, errno.EINTR)
@@ -3474,9 +3554,9 @@ class InterruptedSendTimeoutTest(InterruptedTimeoutBase,
def checkInterruptedSend(self, func, *args, **kwargs):
# Check that func(*args, **kwargs), run in a loop, raises
- # socket.error with an errno of EINTR when interrupted by a
+ # OSError with an errno of EINTR when interrupted by a
# signal.
- with self.assertRaises(socket.error) as cm:
+ with self.assertRaises(OSError) as cm:
while True:
self.setAlarm(self.alarm_time)
func(*args, **kwargs)
@@ -3573,7 +3653,7 @@ class NonBlockingTCPTests(ThreadedTCPSocketTest):
start = time.time()
try:
self.serv.accept()
- except socket.error:
+ except OSError:
pass
end = time.time()
self.assertTrue((end - start) < 1.0, "Error setting non-blocking mode.")
@@ -3598,7 +3678,7 @@ class NonBlockingTCPTests(ThreadedTCPSocketTest):
start = time.time()
try:
self.serv.accept()
- except socket.error:
+ except OSError:
pass
end = time.time()
self.assertTrue((end - start) < 1.0, "Error creating with non-blocking mode.")
@@ -3628,7 +3708,7 @@ class NonBlockingTCPTests(ThreadedTCPSocketTest):
self.serv.setblocking(0)
try:
conn, addr = self.serv.accept()
- except socket.error:
+ except OSError:
pass
else:
self.fail("Error trying to do non-blocking accept.")
@@ -3658,7 +3738,7 @@ class NonBlockingTCPTests(ThreadedTCPSocketTest):
conn.setblocking(0)
try:
msg = conn.recv(len(MSG))
- except socket.error:
+ except OSError:
pass
else:
self.fail("Error trying to do non-blocking recv.")
@@ -3741,7 +3821,7 @@ class FileObjectClassTestCase(SocketConnectedTest):
# First read raises a timeout
self.assertRaises(socket.timeout, self.read_file.read, 1)
# Second read is disallowed
- with self.assertRaises(IOError) as ctx:
+ with self.assertRaises(OSError) as ctx:
self.read_file.read(1)
self.assertIn("cannot read from timed out object", str(ctx.exception))
@@ -3833,7 +3913,7 @@ class FileObjectClassTestCase(SocketConnectedTest):
self.read_file.close()
self.assertRaises(ValueError, self.read_file.fileno)
self.cli_conn.close()
- self.assertRaises(socket.error, self.cli_conn.getsockname)
+ self.assertRaises(OSError, self.cli_conn.getsockname)
def _testRealClose(self):
pass
@@ -3870,7 +3950,7 @@ class FileObjectInterruptedTestCase(unittest.TestCase):
@staticmethod
def _raise_eintr():
- raise socket.error(errno.EINTR, "interrupted")
+ raise OSError(errno.EINTR, "interrupted")
def _textiowrap_mock_socket(self, mock, buffering=-1):
raw = socket.SocketIO(mock, "r")
@@ -3982,7 +4062,7 @@ class UnbufferedFileObjectClassTestCase(FileObjectClassTestCase):
self.assertEqual(msg, self.read_msg)
# ...until the file is itself closed
self.read_file.close()
- self.assertRaises(socket.error, self.cli_conn.recv, 1024)
+ self.assertRaises(OSError, self.cli_conn.recv, 1024)
def _testMakefileClose(self):
self.write_file.write(self.write_msg)
@@ -4131,7 +4211,7 @@ class NetworkConnectionNoServer(unittest.TestCase):
port = support.find_unused_port()
cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addCleanup(cli.close)
- with self.assertRaises(socket.error) as cm:
+ with self.assertRaises(OSError) as cm:
cli.connect((HOST, port))
self.assertEqual(cm.exception.errno, errno.ECONNREFUSED)
@@ -4139,7 +4219,7 @@ class NetworkConnectionNoServer(unittest.TestCase):
# Issue #9792: errors raised by create_connection() should have
# a proper errno attribute.
port = support.find_unused_port()
- with self.assertRaises(socket.error) as cm:
+ with self.assertRaises(OSError) as cm:
socket.create_connection((HOST, port))
# Issue #16257: create_connection() calls getaddrinfo() against
@@ -4287,7 +4367,7 @@ class TCPTimeoutTest(SocketTCPTest):
foo = self.serv.accept()
except socket.timeout:
self.fail("caught timeout instead of error (TCP)")
- except socket.error:
+ except OSError:
ok = True
except:
self.fail("caught unexpected exception (TCP)")
@@ -4344,7 +4424,7 @@ class UDPTimeoutTest(SocketUDPTest):
foo = self.serv.recv(1024)
except socket.timeout:
self.fail("caught timeout instead of error (UDP)")
- except socket.error:
+ except OSError:
ok = True
except:
self.fail("caught unexpected exception (UDP)")
@@ -4354,10 +4434,10 @@ class UDPTimeoutTest(SocketUDPTest):
class TestExceptions(unittest.TestCase):
def testExceptionTree(self):
- self.assertTrue(issubclass(socket.error, Exception))
- self.assertTrue(issubclass(socket.herror, socket.error))
- self.assertTrue(issubclass(socket.gaierror, socket.error))
- self.assertTrue(issubclass(socket.timeout, socket.error))
+ self.assertTrue(issubclass(OSError, Exception))
+ self.assertTrue(issubclass(socket.herror, OSError))
+ self.assertTrue(issubclass(socket.gaierror, OSError))
+ self.assertTrue(issubclass(socket.timeout, OSError))
class TestLinuxAbstractNamespace(unittest.TestCase):
@@ -4383,7 +4463,7 @@ class TestLinuxAbstractNamespace(unittest.TestCase):
def testNameOverflow(self):
address = "\x00" + "h" * self.UNIX_PATH_MAX
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s:
- self.assertRaises(socket.error, s.bind, address)
+ self.assertRaises(OSError, s.bind, address)
def testStrName(self):
# Check that an abstract name can be passed as a string.
@@ -4622,7 +4702,7 @@ class ContextManagersTest(ThreadedTCPSocketTest):
self.assertTrue(sock._closed)
# exception inside with block
with socket.socket() as sock:
- self.assertRaises(socket.error, sock.sendall, b'foo')
+ self.assertRaises(OSError, sock.sendall, b'foo')
self.assertTrue(sock._closed)
def testCreateConnectionBase(self):
@@ -4650,7 +4730,7 @@ class ContextManagersTest(ThreadedTCPSocketTest):
with socket.create_connection(address) as sock:
sock.close()
self.assertTrue(sock._closed)
- self.assertRaises(socket.error, sock.sendall, b'foo')
+ self.assertRaises(OSError, sock.sendall, b'foo')
@unittest.skipUnless(hasattr(socket, "SOCK_CLOEXEC"),
diff --git a/Lib/test/test_socketserver.py b/Lib/test/test_socketserver.py
index 464057e..84a5e7b 100644
--- a/Lib/test/test_socketserver.py
+++ b/Lib/test/test_socketserver.py
@@ -27,7 +27,7 @@ TEST_STR = b"hello world\n"
HOST = test.support.HOST
HAVE_UNIX_SOCKETS = hasattr(socket, "AF_UNIX")
-HAVE_FORKING = hasattr(os, "fork") and os.name != "os2"
+HAVE_FORKING = hasattr(os, "fork")
def signal_alarm(n):
"""Call signal.alarm when it exists (i.e. not on Windows)."""
@@ -82,7 +82,7 @@ class SocketServerTest(unittest.TestCase):
for fn in self.test_files:
try:
os.remove(fn)
- except os.error:
+ except OSError:
pass
self.test_files[:] = []
@@ -93,21 +93,7 @@ class SocketServerTest(unittest.TestCase):
# XXX: We need a way to tell AF_UNIX to pick its own name
# like AF_INET provides port==0.
dir = None
- if os.name == 'os2':
- dir = '\socket'
fn = tempfile.mktemp(prefix='unix_socket.', dir=dir)
- if os.name == 'os2':
- # AF_UNIX socket names on OS/2 require a specific prefix
- # which can't include a drive letter and must also use
- # backslashes as directory separators
- if fn[1] == ':':
- fn = fn[2:]
- if fn[0] in (os.sep, os.altsep):
- fn = fn[1:]
- if os.sep == '/':
- fn = fn.replace(os.sep, os.altsep)
- else:
- fn = fn.replace(os.altsep, os.sep)
self.test_files.append(fn)
return fn
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index 815475e..8735832 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -48,6 +48,11 @@ KEY_PASSWORD = "somepass"
CAPATH = data_file("capath")
BYTES_CAPATH = os.fsencode(CAPATH)
+# Two keys and certs signed by the same CA (for SNI tests)
+SIGNED_CERTFILE = data_file("keycert3.pem")
+SIGNED_CERTFILE2 = data_file("keycert4.pem")
+SIGNING_CA = data_file("pycacert.pem")
+
SVN_PYTHON_ORG_ROOT_CERT = data_file("https_svn_python_org_root.pem")
EMPTYCERT = data_file("nullcert.pem")
@@ -59,6 +64,7 @@ NOKIACERT = data_file("nokia.pem")
DHFILE = data_file("dh512.pem")
BYTES_DHFILE = os.fsencode(DHFILE)
+
def handle_error(prefix):
exc_format = ' '.join(traceback.format_exception(*sys.exc_info()))
if support.verbose:
@@ -89,6 +95,8 @@ def skip_if_broken_ubuntu_ssl(func):
else:
return func
+needs_sni = unittest.skipUnless(ssl.HAS_SNI, "SNI support needed for this test")
+
class BasicSocketTests(unittest.TestCase):
@@ -142,6 +150,7 @@ class BasicSocketTests(unittest.TestCase):
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),))
)
+ # Note the next three asserts will fail if the keys are regenerated
self.assertEqual(p['notAfter'], 'Oct 5 23:01:56 2020 GMT')
self.assertEqual(p['notBefore'], 'Oct 8 23:01:56 2010 GMT')
self.assertEqual(p['serialNumber'], 'D7C7381919AFC24E')
@@ -214,15 +223,15 @@ class BasicSocketTests(unittest.TestCase):
def test_wrapped_unconnected(self):
# Methods on an unconnected SSLSocket propagate the original
- # socket.error raise by the underlying socket object.
+ # OSError raise by the underlying socket object.
s = socket.socket(socket.AF_INET)
with ssl.wrap_socket(s) as ss:
- self.assertRaises(socket.error, ss.recv, 1)
- self.assertRaises(socket.error, ss.recv_into, bytearray(b'x'))
- self.assertRaises(socket.error, ss.recvfrom, 1)
- self.assertRaises(socket.error, ss.recvfrom_into, bytearray(b'x'), 1)
- self.assertRaises(socket.error, ss.send, b'x')
- self.assertRaises(socket.error, ss.sendto, b'x', ('0.0.0.0', 0))
+ self.assertRaises(OSError, ss.recv, 1)
+ self.assertRaises(OSError, ss.recv_into, bytearray(b'x'))
+ self.assertRaises(OSError, ss.recvfrom, 1)
+ self.assertRaises(OSError, ss.recvfrom_into, bytearray(b'x'), 1)
+ self.assertRaises(OSError, ss.send, b'x')
+ self.assertRaises(OSError, ss.sendto, b'x', ('0.0.0.0', 0))
def test_timeout(self):
# Issue #8524: when creating an SSL socket, the timeout of the
@@ -247,15 +256,15 @@ class BasicSocketTests(unittest.TestCase):
with ssl.wrap_socket(sock, server_side=True, certfile=CERTFILE) as s:
self.assertRaisesRegex(ValueError, "can't connect in server-side mode",
s.connect, (HOST, 8080))
- with self.assertRaises(IOError) as cm:
+ with self.assertRaises(OSError) as cm:
with socket.socket() as sock:
ssl.wrap_socket(sock, certfile=WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
- with self.assertRaises(IOError) as cm:
+ with self.assertRaises(OSError) as cm:
with socket.socket() as sock:
ssl.wrap_socket(sock, certfile=CERTFILE, keyfile=WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
- with self.assertRaises(IOError) as cm:
+ with self.assertRaises(OSError) as cm:
with socket.socket() as sock:
ssl.wrap_socket(sock, certfile=WRONGCERT, keyfile=WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
@@ -451,7 +460,7 @@ class ContextTests(unittest.TestCase):
ctx.load_cert_chain(CERTFILE)
ctx.load_cert_chain(CERTFILE, keyfile=CERTFILE)
self.assertRaises(TypeError, ctx.load_cert_chain, keyfile=CERTFILE)
- with self.assertRaises(IOError) as cm:
+ with self.assertRaises(OSError) as cm:
ctx.load_cert_chain(WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
@@ -536,7 +545,7 @@ class ContextTests(unittest.TestCase):
ctx.load_verify_locations(cafile=BYTES_CERTFILE, capath=None)
self.assertRaises(TypeError, ctx.load_verify_locations)
self.assertRaises(TypeError, ctx.load_verify_locations, None, None)
- with self.assertRaises(IOError) as cm:
+ with self.assertRaises(OSError) as cm:
ctx.load_verify_locations(WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
@@ -594,6 +603,34 @@ class ContextTests(unittest.TestCase):
self.assertRaises(ValueError, ctx.set_ecdh_curve, "foo")
self.assertRaises(ValueError, ctx.set_ecdh_curve, b"foo")
+ @needs_sni
+ def test_sni_callback(self):
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+
+ # set_servername_callback expects a callable, or None
+ self.assertRaises(TypeError, ctx.set_servername_callback)
+ self.assertRaises(TypeError, ctx.set_servername_callback, 4)
+ self.assertRaises(TypeError, ctx.set_servername_callback, "")
+ self.assertRaises(TypeError, ctx.set_servername_callback, ctx)
+
+ def dummycallback(sock, servername, ctx):
+ pass
+ ctx.set_servername_callback(None)
+ ctx.set_servername_callback(dummycallback)
+
+ @needs_sni
+ def test_sni_callback_refcycle(self):
+ # Reference cycles through the servername callback are detected
+ # and cleared.
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ def dummycallback(sock, servername, ctx, cycle=ctx):
+ pass
+ ctx.set_servername_callback(dummycallback)
+ wr = weakref.ref(ctx)
+ del ctx, dummycallback
+ gc.collect()
+ self.assertIs(wr(), None)
+
class SSLErrorTests(unittest.TestCase):
@@ -1036,7 +1073,7 @@ else:
sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n"
% (msg, ctype, msg.lower(), ctype))
self.write(msg.lower())
- except socket.error:
+ except OSError:
if self.server.chatty:
handle_error("Test server failure:\n")
self.close()
@@ -1146,7 +1183,7 @@ else:
return self.handle_close()
except ssl.SSLError:
raise
- except socket.error as err:
+ except OSError as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
@@ -1250,19 +1287,19 @@ else:
except ssl.SSLError as x:
if support.verbose:
sys.stdout.write("\nSSLError is %s\n" % x.args[1])
- except socket.error as x:
+ except OSError as x:
if support.verbose:
- sys.stdout.write("\nsocket.error is %s\n" % x.args[1])
- except IOError as x:
+ sys.stdout.write("\nOSError is %s\n" % x.args[1])
+ except OSError as x:
if x.errno != errno.ENOENT:
raise
if support.verbose:
- sys.stdout.write("\IOError is %s\n" % str(x))
+ sys.stdout.write("\OSError is %s\n" % str(x))
else:
raise AssertionError("Use of invalid cert should have failed!")
def server_params_test(client_context, server_context, indata=b"FOO\n",
- chatty=True, connectionchatty=False):
+ chatty=True, connectionchatty=False, sni_name=None):
"""
Launch a server, connect a client to it and try various reads
and writes.
@@ -1272,7 +1309,8 @@ else:
chatty=chatty,
connectionchatty=False)
with server:
- with client_context.wrap_socket(socket.socket()) as s:
+ with client_context.wrap_socket(socket.socket(),
+ server_hostname=sni_name) as s:
s.connect((HOST, server.port))
for arg in [indata, bytearray(indata), memoryview(indata)]:
if connectionchatty:
@@ -1296,6 +1334,7 @@ else:
stats.update({
'compression': s.compression(),
'cipher': s.cipher(),
+ 'peercert': s.getpeercert(),
'client_npn_protocol': s.selected_npn_protocol()
})
s.close()
@@ -1337,7 +1376,7 @@ else:
except ssl.SSLError:
if expect_success:
raise
- except socket.error as e:
+ except OSError as e:
if expect_success or e.errno != errno.ECONNRESET:
raise
else:
@@ -1411,7 +1450,7 @@ else:
"badkey.pem"))
def test_rude_shutdown(self):
- """A brutal shutdown of an SSL server should raise an IOError
+ """A brutal shutdown of an SSL server should raise an OSError
in the client when attempting handshake.
"""
listener_ready = threading.Event()
@@ -1439,7 +1478,7 @@ else:
listener_gone.wait()
try:
ssl_sock = ssl.wrap_socket(c)
- except IOError:
+ except OSError:
pass
else:
self.fail('connecting to closed SSL socket should have failed')
@@ -1482,7 +1521,7 @@ else:
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try:
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv2, True)
- except (ssl.SSLError, socket.error) as x:
+ except OSError as x:
# this fails on some older versions of OpenSSL (0.9.7l, for instance)
if support.verbose:
sys.stdout.write(
@@ -1602,7 +1641,7 @@ else:
def test_socketserver(self):
"""Using a SocketServer to create and manage SSL connections."""
- server = make_https_server(self, CERTFILE)
+ server = make_https_server(self, certfile=CERTFILE)
# try to connect
if support.verbose:
sys.stdout.write('\n')
@@ -1869,7 +1908,7 @@ else:
ssl_version=ssl.PROTOCOL_SSLv23,
chatty=False) as server:
with context.wrap_socket(socket.socket()) as s:
- with self.assertRaises((OSError, ssl.SSLError)):
+ with self.assertRaises(OSError):
s.connect((HOST, server.port))
self.assertIn("no shared cipher", str(server.conn_errors[0]))
@@ -2002,6 +2041,100 @@ else:
if len(stats['server_npn_protocols']) else 'nothing'
self.assertEqual(server_result, expected, msg % (server_result, "server"))
+ def sni_contexts(self):
+ server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ server_context.load_cert_chain(SIGNED_CERTFILE)
+ other_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ other_context.load_cert_chain(SIGNED_CERTFILE2)
+ client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ client_context.verify_mode = ssl.CERT_REQUIRED
+ client_context.load_verify_locations(SIGNING_CA)
+ return server_context, other_context, client_context
+
+ def check_common_name(self, stats, name):
+ cert = stats['peercert']
+ self.assertIn((('commonName', name),), cert['subject'])
+
+ @needs_sni
+ def test_sni_callback(self):
+ calls = []
+ server_context, other_context, client_context = self.sni_contexts()
+
+ def servername_cb(ssl_sock, server_name, initial_context):
+ calls.append((server_name, initial_context))
+ ssl_sock.context = other_context
+ server_context.set_servername_callback(servername_cb)
+
+ stats = server_params_test(client_context, server_context,
+ chatty=True,
+ sni_name='supermessage')
+ # The hostname was fetched properly, and the certificate was
+ # changed for the connection.
+ self.assertEqual(calls, [("supermessage", server_context)])
+ # CERTFILE4 was selected
+ self.check_common_name(stats, 'fakehostname')
+
+ # Check disabling the callback
+ calls = []
+ server_context.set_servername_callback(None)
+
+ stats = server_params_test(client_context, server_context,
+ chatty=True,
+ sni_name='notfunny')
+ # Certificate didn't change
+ self.check_common_name(stats, 'localhost')
+ self.assertEqual(calls, [])
+
+ @needs_sni
+ def test_sni_callback_alert(self):
+ # Returning a TLS alert is reflected to the connecting client
+ server_context, other_context, client_context = self.sni_contexts()
+
+ def cb_returning_alert(ssl_sock, server_name, initial_context):
+ return ssl.ALERT_DESCRIPTION_ACCESS_DENIED
+ server_context.set_servername_callback(cb_returning_alert)
+
+ with self.assertRaises(ssl.SSLError) as cm:
+ stats = server_params_test(client_context, server_context,
+ chatty=False,
+ sni_name='supermessage')
+ self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_ACCESS_DENIED')
+
+ @needs_sni
+ def test_sni_callback_raising(self):
+ # Raising fails the connection with a TLS handshake failure alert.
+ server_context, other_context, client_context = self.sni_contexts()
+
+ def cb_raising(ssl_sock, server_name, initial_context):
+ 1/0
+ server_context.set_servername_callback(cb_raising)
+
+ with self.assertRaises(ssl.SSLError) as cm, \
+ support.captured_stderr() as stderr:
+ stats = server_params_test(client_context, server_context,
+ chatty=False,
+ sni_name='supermessage')
+ self.assertEqual(cm.exception.reason, 'SSLV3_ALERT_HANDSHAKE_FAILURE')
+ self.assertIn("ZeroDivisionError", stderr.getvalue())
+
+ @needs_sni
+ def test_sni_callback_wrong_return_type(self):
+ # Returning the wrong return type terminates the TLS connection
+ # with an internal error alert.
+ server_context, other_context, client_context = self.sni_contexts()
+
+ def cb_wrong_return_type(ssl_sock, server_name, initial_context):
+ return "foo"
+ server_context.set_servername_callback(cb_wrong_return_type)
+
+ with self.assertRaises(ssl.SSLError) as cm, \
+ support.captured_stderr() as stderr:
+ stats = server_params_test(client_context, server_context,
+ chatty=False,
+ sni_name='supermessage')
+ self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_INTERNAL_ERROR')
+ self.assertIn("TypeError", stderr.getvalue())
+
def test_main(verbose=False):
if support.verbose:
@@ -2025,6 +2158,7 @@ def test_main(verbose=False):
for filename in [
CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, BYTES_CERTFILE,
ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY,
+ SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA,
BADCERT, BADKEY, EMPTYCERT]:
if not os.path.exists(filename):
raise support.TestFailed("Can't read certificate file %r" % filename)
diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py
index 22374d2..eb97a2c 100644
--- a/Lib/test/test_struct.py
+++ b/Lib/test/test_struct.py
@@ -489,7 +489,7 @@ class StructTest(unittest.TestCase):
def test_bool(self):
class ExplodingBool(object):
def __bool__(self):
- raise IOError
+ raise OSError
for prefix in tuple("<>!=")+('',):
false = (), [], [], '', 0
true = [1], 'test', 5, -1, 0xffffffff+1, 0xffffffff/2
@@ -520,10 +520,10 @@ class StructTest(unittest.TestCase):
try:
struct.pack(prefix + '?', ExplodingBool())
- except IOError:
+ except OSError:
pass
else:
- self.fail("Expected IOError: struct.pack(%r, "
+ self.fail("Expected OSError: struct.pack(%r, "
"ExplodingBool())" % (prefix + '?'))
for c in [b'\x01', b'\x7f', b'\xff', b'\x0f', b'\xf0']:
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index dd720fe..28f6935 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -977,8 +977,7 @@ class ProcessTestCase(BaseTestCase):
# value for that limit, but Windows has 2048, so we loop
# 1024 times (each call leaked two fds).
for i in range(1024):
- # Windows raises IOError. Others raise OSError.
- with self.assertRaises(EnvironmentError) as c:
+ with self.assertRaises(OSError) as c:
subprocess.Popen(['nonexisting_i_hope'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@@ -1231,7 +1230,7 @@ class POSIXProcessTestCase(BaseTestCase):
try:
p = subprocess.Popen([sys.executable, "-c", ""],
preexec_fn=raise_it)
- except RuntimeError as e:
+ except subprocess.SubprocessError as e:
self.assertTrue(
subprocess._posixsubprocess,
"Expected a ValueError from the preexec_fn")
@@ -1270,9 +1269,10 @@ class POSIXProcessTestCase(BaseTestCase):
"""Issue16140: Don't double close pipes on preexec error."""
def raise_it():
- raise RuntimeError("force the _execute_child() errpipe_data path.")
+ raise subprocess.SubprocessError(
+ "force the _execute_child() errpipe_data path.")
- with self.assertRaises(RuntimeError):
+ with self.assertRaises(subprocess.SubprocessError):
self._TestExecuteChildPopen(
self, [sys.executable, "-c", "pass"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
@@ -1635,12 +1635,12 @@ class POSIXProcessTestCase(BaseTestCase):
# Pure Python implementations keeps the message
self.assertIsNone(subprocess._posixsubprocess)
self.assertEqual(str(err), "surrogate:\uDCff")
- except RuntimeError as err:
+ except subprocess.SubprocessError as err:
# _posixsubprocess uses a default message
self.assertIsNotNone(subprocess._posixsubprocess)
self.assertEqual(str(err), "Exception occurred in preexec_fn.")
else:
- self.fail("Expected ValueError or RuntimeError")
+ self.fail("Expected ValueError or subprocess.SubprocessError")
def test_undecodable_env(self):
for key, value in (('test', 'abc\uDCFF'), ('test\uDCFF', '42')):
@@ -1927,7 +1927,7 @@ class POSIXProcessTestCase(BaseTestCase):
# let some time for the process to exit, and create a new Popen: this
# should trigger the wait() of p
time.sleep(0.2)
- with self.assertRaises(EnvironmentError) as c:
+ with self.assertRaises(OSError) as c:
with subprocess.Popen(['nonexisting_i_hope'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE) as proc:
diff --git a/Lib/test/test_sundry.py b/Lib/test/test_sundry.py
index fcccdf7..a364194 100644
--- a/Lib/test/test_sundry.py
+++ b/Lib/test/test_sundry.py
@@ -1,19 +1,26 @@
"""Do a minimal test of all the modules that aren't otherwise tested."""
-
-from test import support
+import importlib
import sys
+from test import support
import unittest
class TestUntestedModules(unittest.TestCase):
- def test_at_least_import_untested_modules(self):
+ def test_untested_modules_can_be_imported(self):
+ untested = ('bdb', 'encodings', 'formatter', 'getpass', 'imghdr',
+ 'keyword', 'macurl2path', 'nturl2path', 'tabnanny')
with support.check_warnings(quiet=True):
- import bdb
- import cgitb
+ for name in untested:
+ try:
+ support.import_module('test.test_{}'.format(name))
+ except unittest.SkipTest:
+ importlib.import_module(name)
+ else:
+ self.fail('{} has tests even though test_sundry claims '
+ 'otherwise'.format(name))
import distutils.bcppcompiler
import distutils.ccompiler
import distutils.cygwinccompiler
- import distutils.emxccompiler
import distutils.filelist
if sys.platform.startswith('win'):
import distutils.msvccompiler
@@ -39,22 +46,9 @@ class TestUntestedModules(unittest.TestCase):
import distutils.command.sdist
import distutils.command.upload
- import encodings
- import formatter
- import getpass
import html.entities
- import imghdr
- import keyword
- import macurl2path
- import mailcap
- import nturl2path
- import os2emxpath
- import pstats
- import py_compile
- import sndhdr
- import tabnanny
try:
- import tty # not available on Windows
+ import tty # Not available on Windows
except ImportError:
if support.verbose:
print("skipping tty")
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index 5926b69..a9d3628 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -33,7 +33,7 @@ SyntaxError: invalid syntax
>>> None = 1
Traceback (most recent call last):
-SyntaxError: assignment to keyword
+SyntaxError: can't assign to keyword
It's a syntax error to assign to the empty tuple. Why isn't it an
error to assign to the empty list? It will always raise some error at
@@ -233,7 +233,7 @@ Traceback (most recent call last):
SyntaxError: can't assign to generator expression
>>> None += 1
Traceback (most recent call last):
-SyntaxError: assignment to keyword
+SyntaxError: can't assign to keyword
>>> f() += 1
Traceback (most recent call last):
SyntaxError: can't assign to function call
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 77d333d..4749f24 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -6,6 +6,8 @@ import textwrap
import warnings
import operator
import codecs
+import gc
+import sysconfig
# count the number of test runs, used to create unique
# strings to intern in test_intern()
@@ -480,7 +482,7 @@ class SysModuleTest(unittest.TestCase):
def test_thread_info(self):
info = sys.thread_info
self.assertEqual(len(info), 3)
- self.assertIn(info.name, ('nt', 'os2', 'pthread', 'solaris', None))
+ self.assertIn(info.name, ('nt', 'pthread', 'solaris', None))
self.assertIn(info.lock, ('semaphore', 'mutex+cond', None))
def test_43581(self):
@@ -609,6 +611,36 @@ class SysModuleTest(unittest.TestCase):
ret, out, err = assert_python_ok(*args)
self.assertIn(b"free PyDictObjects", err)
+ @unittest.skipUnless(hasattr(sys, "getallocatedblocks"),
+ "sys.getallocatedblocks unavailable on this build")
+ def test_getallocatedblocks(self):
+ # Some sanity checks
+ with_pymalloc = sysconfig.get_config_var('WITH_PYMALLOC')
+ a = sys.getallocatedblocks()
+ self.assertIs(type(a), int)
+ if with_pymalloc:
+ self.assertGreater(a, 0)
+ else:
+ # When WITH_PYMALLOC isn't available, we don't know anything
+ # about the underlying implementation: the function might
+ # return 0 or something greater.
+ self.assertGreaterEqual(a, 0)
+ try:
+ # While we could imagine a Python session where the number of
+ # multiple buffer objects would exceed the sharing of references,
+ # it is unlikely to happen in a normal test run.
+ self.assertLess(a, sys.gettotalrefcount())
+ except AttributeError:
+ # gettotalrefcount() not available
+ pass
+ gc.collect()
+ b = sys.getallocatedblocks()
+ self.assertLessEqual(b, a)
+ gc.collect()
+ c = sys.getallocatedblocks()
+ self.assertIn(c, range(b - 50, b + 50))
+
+
class SizeofTest(unittest.TestCase):
def setUp(self):
diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
index 9219360..5293649 100644
--- a/Lib/test/test_sysconfig.py
+++ b/Lib/test/test_sysconfig.py
@@ -234,7 +234,7 @@ class TestSysConfig(unittest.TestCase):
self.assertTrue(os.path.isfile(config_h), config_h)
def test_get_scheme_names(self):
- wanted = ('nt', 'nt_user', 'os2', 'os2_home', 'osx_framework_user',
+ wanted = ('nt', 'nt_user', 'osx_framework_user',
'posix_home', 'posix_prefix', 'posix_user')
self.assertEqual(get_scheme_names(), wanted)
@@ -305,14 +305,13 @@ class TestSysConfig(unittest.TestCase):
if 'MACOSX_DEPLOYMENT_TARGET' in env:
del env['MACOSX_DEPLOYMENT_TARGET']
- with open('/dev/null', 'w') as devnull_fp:
- p = subprocess.Popen([
- sys.executable, '-c',
- 'import sysconfig; print(sysconfig.get_platform())',
- ],
- stdout=subprocess.PIPE,
- stderr=devnull_fp,
- env=env)
+ p = subprocess.Popen([
+ sys.executable, '-c',
+ 'import sysconfig; print(sysconfig.get_platform())',
+ ],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ env=env)
test_platform = p.communicate()[0].strip()
test_platform = test_platform.decode('utf-8')
status = p.wait()
@@ -325,20 +324,19 @@ class TestSysConfig(unittest.TestCase):
env = os.environ.copy()
env['MACOSX_DEPLOYMENT_TARGET'] = '10.1'
- with open('/dev/null') as dev_null:
- p = subprocess.Popen([
- sys.executable, '-c',
- 'import sysconfig; print(sysconfig.get_platform())',
- ],
- stdout=subprocess.PIPE,
- stderr=dev_null,
- env=env)
- test_platform = p.communicate()[0].strip()
- test_platform = test_platform.decode('utf-8')
- status = p.wait()
-
- self.assertEqual(status, 0)
- self.assertEqual(my_platform, test_platform)
+ p = subprocess.Popen([
+ sys.executable, '-c',
+ 'import sysconfig; print(sysconfig.get_platform())',
+ ],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ env=env)
+ test_platform = p.communicate()[0].strip()
+ test_platform = test_platform.decode('utf-8')
+ status = p.wait()
+
+ self.assertEqual(status, 0)
+ self.assertEqual(my_platform, test_platform)
def test_srcdir(self):
# See Issues #15322, #15364.
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index b224bf0..5bbd48e 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -1652,20 +1652,20 @@ class ContextManagerTest(unittest.TestCase):
self.assertTrue(tar.closed, "context manager failed")
def test_closed(self):
- # The __enter__() method is supposed to raise IOError
+ # The __enter__() method is supposed to raise OSError
# if the TarFile object is already closed.
tar = tarfile.open(tarname)
tar.close()
- with self.assertRaises(IOError):
+ with self.assertRaises(OSError):
with tar:
pass
def test_exception(self):
- # Test if the IOError exception is passed through properly.
+ # Test if the OSError exception is passed through properly.
with self.assertRaises(Exception) as exc:
with tarfile.open(tarname) as tar:
- raise IOError
- self.assertIsInstance(exc.exception, IOError,
+ raise OSError
+ self.assertIsInstance(exc.exception, OSError,
"wrong exception raised in context manager")
self.assertTrue(tar.closed, "context manager failed")
@@ -1743,7 +1743,7 @@ class GzipMiscReadTest(MiscReadTest):
def test_non_existent_targz_file(self):
# Test for issue11513: prevent non-existent gzipped tarfiles raising
# multiple exceptions.
- with self.assertRaisesRegex(IOError, "xxx") as ex:
+ with self.assertRaisesRegex(OSError, "xxx") as ex:
tarfile.open("xxx", self.mode)
self.assertEqual(ex.exception.errno, errno.ENOENT)
diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py
index 2962939..437b02b 100644
--- a/Lib/test/test_tempfile.py
+++ b/Lib/test/test_tempfile.py
@@ -151,7 +151,7 @@ class TestRandomNameSequence(BaseTestCase):
# via any bugs above
try:
os.kill(pid, signal.SIGKILL)
- except EnvironmentError:
+ except OSError:
pass
os.close(read_fd)
os.close(write_fd)
@@ -190,7 +190,7 @@ class TestCandidateTempdirList(BaseTestCase):
try:
dirname = os.getcwd()
- except (AttributeError, os.error):
+ except (AttributeError, OSError):
dirname = os.curdir
self.assertIn(dirname, cand)
@@ -315,7 +315,7 @@ class TestMkstempInner(BaseTestCase):
file = self.do_create()
mode = stat.S_IMODE(os.stat(file.name).st_mode)
expected = 0o600
- if sys.platform in ('win32', 'os2emx'):
+ if sys.platform == 'win32':
# There's no distinction among 'user', 'group' and 'world';
# replicate the 'user' bits.
user = expected >> 6
@@ -349,7 +349,7 @@ class TestMkstempInner(BaseTestCase):
# On Windows a spawn* /path/ with embedded spaces shouldn't be quoted,
# but an arg with embedded spaces should be decorated with double
# quotes on each end
- if sys.platform in ('win32',):
+ if sys.platform == 'win32':
decorated = '"%s"' % sys.executable
tester = '"%s"' % tester
else:
@@ -518,7 +518,7 @@ class TestMkdtemp(BaseTestCase):
mode = stat.S_IMODE(os.stat(dir).st_mode)
mode &= 0o777 # Mask off sticky bits inherited from /tmp
expected = 0o700
- if sys.platform in ('win32', 'os2emx'):
+ if sys.platform == 'win32':
# There's no distinction among 'user', 'group' and 'world';
# replicate the 'user' bits.
user = expected >> 6
diff --git a/Lib/test/test_thread.py b/Lib/test/test_thread.py
index a191e15..f9a721b 100644
--- a/Lib/test/test_thread.py
+++ b/Lib/test/test_thread.py
@@ -68,7 +68,7 @@ class ThreadRunningTests(BasicThreadTest):
thread.stack_size(0)
self.assertEqual(thread.stack_size(), 0, "stack_size not reset to default")
- if os.name not in ("nt", "os2", "posix"):
+ if os.name not in ("nt", "posix"):
return
tss_supported = True
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index 11e63d3..fff00f4 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -452,7 +452,7 @@ class ThreadJoinOnShutdown(BaseTestCase):
# problems with some operating systems (issue #3863): skip problematic tests
# on platforms known to behave badly.
platforms_to_skip = ('freebsd4', 'freebsd5', 'freebsd6', 'netbsd5',
- 'os2emx', 'hp-ux11')
+ 'hp-ux11')
def _run_and_join(self, script):
script = """if 1:
@@ -754,7 +754,8 @@ class ThreadingExceptionTests(BaseTestCase):
lock = threading.Lock()
self.assertRaises(RuntimeError, lock.release)
- @unittest.skipUnless(sys.platform == 'darwin', 'test macosx problem')
+ @unittest.skipUnless(sys.platform == 'darwin' and test.support.python_is_optimized(),
+ 'test macosx problem')
def test_recursion_limit(self):
# Issue 9670
# test that excessive recursion within a non-main thread causes
diff --git a/Lib/test/test_threadsignals.py b/Lib/test/test_threadsignals.py
index f975a75..b1004e6 100644
--- a/Lib/test/test_threadsignals.py
+++ b/Lib/test/test_threadsignals.py
@@ -8,7 +8,7 @@ from test.support import run_unittest, import_module
thread = import_module('_thread')
import time
-if sys.platform[:3] in ('win', 'os2') or sys.platform=='riscos':
+if (sys.platform[:3] == 'win'):
raise unittest.SkipTest("Can't test signal on %s" % sys.platform)
process_pid = os.getpid()
diff --git a/Lib/test/test_timeout.py b/Lib/test/test_timeout.py
index c3c4acf..d28d62b 100644
--- a/Lib/test/test_timeout.py
+++ b/Lib/test/test_timeout.py
@@ -194,7 +194,7 @@ class TCPTimeoutTestCase(TimeoutTestCase):
sock.connect((whitehole))
except socket.timeout:
pass
- except IOError as err:
+ except OSError as err:
if err.errno == errno.ECONNREFUSED:
skip = False
finally:
diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py
index 3ee4c6b..ec10752 100644
--- a/Lib/test/test_types.py
+++ b/Lib/test/test_types.py
@@ -2,6 +2,7 @@
from test.support import run_unittest, run_with_locale
import collections
+import pickle
import locale
import sys
import types
@@ -1077,9 +1078,19 @@ class SimpleNamespaceTests(unittest.TestCase):
ns2 = types.SimpleNamespace()
ns2.x = "spam"
ns2._y = 5
+ name = "namespace"
- self.assertEqual(repr(ns1), "namespace(w=3, x=1, y=2)")
- self.assertEqual(repr(ns2), "namespace(_y=5, x='spam')")
+ self.assertEqual(repr(ns1), "{name}(w=3, x=1, y=2)".format(name=name))
+ self.assertEqual(repr(ns2), "{name}(_y=5, x='spam')".format(name=name))
+
+ def test_equal(self):
+ ns1 = types.SimpleNamespace(x=1)
+ ns2 = types.SimpleNamespace()
+ ns2.x = 1
+
+ self.assertEqual(types.SimpleNamespace(), types.SimpleNamespace())
+ self.assertEqual(ns1, ns2)
+ self.assertNotEqual(ns2, types.SimpleNamespace())
def test_nested(self):
ns1 = types.SimpleNamespace(a=1, b=2)
@@ -1117,11 +1128,12 @@ class SimpleNamespaceTests(unittest.TestCase):
ns1.spam = ns1
ns2.spam = ns3
ns3.spam = ns2
+ name = "namespace"
+ repr1 = "{name}(c='cookie', spam={name}(...))".format(name=name)
+ repr2 = "{name}(spam={name}(spam={name}(...), x=1))".format(name=name)
- self.assertEqual(repr(ns1),
- "namespace(c='cookie', spam=namespace(...))")
- self.assertEqual(repr(ns2),
- "namespace(spam=namespace(spam=namespace(...), x=1))")
+ self.assertEqual(repr(ns1), repr1)
+ self.assertEqual(repr(ns2), repr2)
def test_as_dict(self):
ns = types.SimpleNamespace(spam='spamspamspam')
@@ -1144,6 +1156,19 @@ class SimpleNamespaceTests(unittest.TestCase):
self.assertIs(type(spam), Spam)
self.assertEqual(vars(spam), {'ham': 8, 'eggs': 9})
+ def test_pickle(self):
+ ns = types.SimpleNamespace(breakfast="spam", lunch="spam")
+
+ for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
+ pname = "protocol {}".format(protocol)
+ try:
+ ns_pickled = pickle.dumps(ns, protocol)
+ except TypeError as e:
+ raise TypeError(pname) from e
+ ns_roundtrip = pickle.loads(ns_pickled)
+
+ self.assertEqual(ns, ns_roundtrip, pname)
+
def test_main():
run_unittest(TypesTests, MappingProxyTests, ClassCreationTests,
diff --git a/Lib/test/test_ucn.py b/Lib/test/test_ucn.py
index 2e63745..59bde74 100644
--- a/Lib/test/test_ucn.py
+++ b/Lib/test/test_ucn.py
@@ -173,7 +173,7 @@ class UnicodeNamesTest(unittest.TestCase):
try:
testdata = support.open_urlresource(url, encoding="utf-8",
check=check_version)
- except (IOError, HTTPException):
+ except (OSError, HTTPException):
self.skipTest("Could not retrieve " + url)
self.addCleanup(testdata.close)
for line in testdata:
diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py
index d4e2222..d1f5142 100644
--- a/Lib/test/test_unicode.py
+++ b/Lib/test/test_unicode.py
@@ -843,11 +843,9 @@ class UnicodeTest(string_tests.CommonTest,
self.assertEqual('{0:d}'.format(G('data')), 'G(data)')
self.assertEqual('{0!s}'.format(G('data')), 'string is data')
- msg = 'object.__format__ with a non-empty format string is deprecated'
- with support.check_warnings((msg, DeprecationWarning)):
- self.assertEqual('{0:^10}'.format(E('data')), ' E(data) ')
- self.assertEqual('{0:^10s}'.format(E('data')), ' E(data) ')
- self.assertEqual('{0:>15s}'.format(G('data')), ' string is data')
+ self.assertRaises(TypeError, '{0:^10}'.format, E('data'))
+ self.assertRaises(TypeError, '{0:^10s}'.format, E('data'))
+ self.assertRaises(TypeError, '{0:>15s}'.format, G('data'))
self.assertEqual("{0:date: %Y-%m-%d}".format(I(year=2007,
month=8,
@@ -1984,9 +1982,10 @@ class UnicodeTest(string_tests.CommonTest,
# Test PyUnicode_FromFormat()
def test_from_format(self):
support.import_module('ctypes')
- from ctypes import (pythonapi, py_object,
+ from ctypes import (
+ pythonapi, py_object, sizeof,
c_int, c_long, c_longlong, c_ssize_t,
- c_uint, c_ulong, c_ulonglong, c_size_t)
+ c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p)
name = "PyUnicode_FromFormat"
_PyUnicode_FromFormat = getattr(pythonapi, name)
_PyUnicode_FromFormat.restype = py_object
@@ -2037,6 +2036,31 @@ class UnicodeTest(string_tests.CommonTest,
self.assertEqual(PyUnicode_FromFormat(b'%llu', c_ulonglong(123)), '123')
self.assertEqual(PyUnicode_FromFormat(b'%zu', c_size_t(123)), '123')
+ # test long output
+ min_longlong = -(2 ** (8 * sizeof(c_longlong) - 1))
+ max_longlong = -min_longlong - 1
+ self.assertEqual(PyUnicode_FromFormat(b'%lld', c_longlong(min_longlong)), str(min_longlong))
+ self.assertEqual(PyUnicode_FromFormat(b'%lld', c_longlong(max_longlong)), str(max_longlong))
+ max_ulonglong = 2 ** (8 * sizeof(c_ulonglong)) - 1
+ self.assertEqual(PyUnicode_FromFormat(b'%llu', c_ulonglong(max_ulonglong)), str(max_ulonglong))
+ PyUnicode_FromFormat(b'%p', c_void_p(-1))
+
+ # test padding (width and/or precision)
+ self.assertEqual(PyUnicode_FromFormat(b'%010i', c_int(123)), '123'.rjust(10, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100i', c_int(123)), '123'.rjust(100))
+ self.assertEqual(PyUnicode_FromFormat(b'%.100i', c_int(123)), '123'.rjust(100, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100.80i', c_int(123)), '123'.rjust(80, '0').rjust(100))
+
+ self.assertEqual(PyUnicode_FromFormat(b'%010u', c_uint(123)), '123'.rjust(10, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100u', c_uint(123)), '123'.rjust(100))
+ self.assertEqual(PyUnicode_FromFormat(b'%.100u', c_uint(123)), '123'.rjust(100, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100.80u', c_uint(123)), '123'.rjust(80, '0').rjust(100))
+
+ self.assertEqual(PyUnicode_FromFormat(b'%010x', c_int(0x123)), '123'.rjust(10, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100x', c_int(0x123)), '123'.rjust(100))
+ self.assertEqual(PyUnicode_FromFormat(b'%.100x', c_int(0x123)), '123'.rjust(100, '0'))
+ self.assertEqual(PyUnicode_FromFormat(b'%100.80x', c_int(0x123)), '123'.rjust(80, '0').rjust(100))
+
# test %A
text = PyUnicode_FromFormat(b'%%A:%A', 'abc\xe9\uabcd\U0010ffff')
self.assertEqual(text, r"%A:'abc\xe9\uabcd\U0010ffff'")
diff --git a/Lib/test/test_unicodedata.py b/Lib/test/test_unicodedata.py
index 99aa003..677508b 100644
--- a/Lib/test/test_unicodedata.py
+++ b/Lib/test/test_unicodedata.py
@@ -80,7 +80,7 @@ class UnicodeDatabaseTest(unittest.TestCase):
class UnicodeFunctionsTest(UnicodeDatabaseTest):
# update this, if the database changes
- expectedchecksum = '17fe2f12b788e4fff5479b469c4404bb6ecf841f'
+ expectedchecksum = 'ebd64e81553c9cb37f424f5616254499fcd8849e'
def test_function_checksum(self):
data = []
h = hashlib.sha1()
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index 4f71b24..cbe4327 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -235,7 +235,7 @@ class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin):
self.check_read(b"1.1")
def test_read_bogus(self):
- # urlopen() should raise IOError for many error codes.
+ # urlopen() should raise OSError for many error codes.
self.fakehttp(b'''HTTP/1.1 401 Authentication Required
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
@@ -243,12 +243,12 @@ Connection: close
Content-Type: text/html; charset=iso-8859-1
''')
try:
- self.assertRaises(IOError, urlopen, "http://python.org/")
+ self.assertRaises(OSError, urlopen, "http://python.org/")
finally:
self.unfakehttp()
def test_invalid_redirect(self):
- # urlopen() should raise IOError for many error codes.
+ # urlopen() should raise OSError for many error codes.
self.fakehttp(b'''HTTP/1.1 302 Found
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
@@ -263,19 +263,20 @@ Content-Type: text/html; charset=iso-8859-1
self.unfakehttp()
def test_empty_socket(self):
- # urlopen() raises IOError if the underlying socket does not send any
+ # urlopen() raises OSError if the underlying socket does not send any
# data. (#1680230)
self.fakehttp(b'')
try:
- self.assertRaises(IOError, urlopen, "http://something")
+ self.assertRaises(OSError, urlopen, "http://something")
finally:
self.unfakehttp()
def test_missing_localfile(self):
# Test for #10836
- # 3.3 - URLError is not captured, explicit IOError is raised.
- with self.assertRaises(IOError):
+ with self.assertRaises(urllib.error.URLError) as e:
urlopen('file://localhost/a/file/which/doesnot/exists.py')
+ self.assertTrue(e.exception.filename)
+ self.assertTrue(e.exception.reason)
def test_file_notexists(self):
fd, tmp_file = tempfile.mkstemp()
@@ -288,20 +289,21 @@ Content-Type: text/html; charset=iso-8859-1
os.close(fd)
os.unlink(tmp_file)
self.assertFalse(os.path.exists(tmp_file))
- # 3.3 - IOError instead of URLError
- with self.assertRaises(IOError):
+ with self.assertRaises(urllib.error.URLError):
urlopen(tmp_fileurl)
def test_ftp_nohost(self):
test_ftp_url = 'ftp:///path'
- # 3.3 - IOError instead of URLError
- with self.assertRaises(IOError):
+ with self.assertRaises(urllib.error.URLError) as e:
urlopen(test_ftp_url)
+ self.assertFalse(e.exception.filename)
+ self.assertTrue(e.exception.reason)
def test_ftp_nonexisting(self):
- # 3.3 - IOError instead of URLError
- with self.assertRaises(IOError):
+ with self.assertRaises(urllib.error.URLError) as e:
urlopen('ftp://localhost/a/file/which/doesnot/exists.py')
+ self.assertFalse(e.exception.filename)
+ self.assertTrue(e.exception.reason)
def test_userpass_inurl(self):
@@ -338,6 +340,79 @@ Content-Type: text/html; charset=iso-8859-1
with support.check_warnings(('',DeprecationWarning)):
urllib.request.URLopener()
+class urlopen_DataTests(unittest.TestCase):
+ """Test urlopen() opening a data URL."""
+
+ def setUp(self):
+ # text containing URL special- and unicode-characters
+ self.text = "test data URLs :;,%=& \u00f6 \u00c4 "
+ # 2x1 pixel RGB PNG image with one black and one white pixel
+ self.image = (
+ b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x02\x00\x00\x00'
+ b'\x01\x08\x02\x00\x00\x00{@\xe8\xdd\x00\x00\x00\x01sRGB\x00\xae'
+ b'\xce\x1c\xe9\x00\x00\x00\x0fIDAT\x08\xd7c```\xf8\xff\xff?\x00'
+ b'\x06\x01\x02\xfe\no/\x1e\x00\x00\x00\x00IEND\xaeB`\x82')
+
+ self.text_url = (
+ "data:text/plain;charset=UTF-8,test%20data%20URLs%20%3A%3B%2C%25%3"
+ "D%26%20%C3%B6%20%C3%84%20")
+ self.text_url_base64 = (
+ "data:text/plain;charset=ISO-8859-1;base64,dGVzdCBkYXRhIFVSTHMgOjs"
+ "sJT0mIPYgxCA%3D")
+ # base64 encoded data URL that contains ignorable spaces,
+ # such as "\n", " ", "%0A", and "%20".
+ self.image_url = (
+ "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAIAAAABCAIAAAB7\n"
+ "QOjdAAAAAXNSR0IArs4c6QAAAA9JREFUCNdj%0AYGBg%2BP//PwAGAQL%2BCm8 "
+ "vHgAAAABJRU5ErkJggg%3D%3D%0A%20")
+
+ self.text_url_resp = urllib.request.urlopen(self.text_url)
+ self.text_url_base64_resp = urllib.request.urlopen(
+ self.text_url_base64)
+ self.image_url_resp = urllib.request.urlopen(self.image_url)
+
+ def test_interface(self):
+ # Make sure object returned by urlopen() has the specified methods
+ for attr in ("read", "readline", "readlines",
+ "close", "info", "geturl", "getcode", "__iter__"):
+ self.assertTrue(hasattr(self.text_url_resp, attr),
+ "object returned by urlopen() lacks %s attribute" %
+ attr)
+
+ def test_info(self):
+ self.assertIsInstance(self.text_url_resp.info(), email.message.Message)
+ self.assertEqual(self.text_url_base64_resp.info().get_params(),
+ [('text/plain', ''), ('charset', 'ISO-8859-1')])
+ self.assertEqual(self.image_url_resp.info()['content-length'],
+ str(len(self.image)))
+ self.assertEqual(urllib.request.urlopen("data:,").info().get_params(),
+ [('text/plain', ''), ('charset', 'US-ASCII')])
+
+ def test_geturl(self):
+ self.assertEqual(self.text_url_resp.geturl(), self.text_url)
+ self.assertEqual(self.text_url_base64_resp.geturl(),
+ self.text_url_base64)
+ self.assertEqual(self.image_url_resp.geturl(), self.image_url)
+
+ def test_read_text(self):
+ self.assertEqual(self.text_url_resp.read().decode(
+ dict(self.text_url_resp.info().get_params())['charset']), self.text)
+
+ def test_read_text_base64(self):
+ self.assertEqual(self.text_url_base64_resp.read().decode(
+ dict(self.text_url_base64_resp.info().get_params())['charset']),
+ self.text)
+
+ def test_read_image(self):
+ self.assertEqual(self.image_url_resp.read(), self.image)
+
+ def test_missing_comma(self):
+ self.assertRaises(ValueError,urllib.request.urlopen,'data:text/plain')
+
+ def test_invalid_base64_data(self):
+ # missing padding character
+ self.assertRaises(ValueError,urllib.request.urlopen,'data:;base64,Cg=')
+
class urlretrieve_FileTests(unittest.TestCase):
"""Test urllib.urlretrieve() on local files"""
@@ -1316,6 +1391,7 @@ def test_main():
support.run_unittest(
urlopen_FileTests,
urlopen_HttpTests,
+ urlopen_DataTests,
urlretrieve_FileTests,
urlretrieve_HttpTests,
ProxyTests,
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 01f4dc4..0e05723 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -127,6 +127,19 @@ def test_request_headers_methods():
>>> r.get_header("Not-there", "default")
'default'
+ Method r.remove_header should remove items both from r.headers and
+ r.unredirected_hdrs dictionaries
+
+ >>> r.remove_header("Spam-eggs")
+ >>> r.has_header("Spam-eggs")
+ False
+ >>> r.add_unredirected_header("Unredirected-spam", "Eggs")
+ >>> r.has_header("Unredirected-spam")
+ True
+ >>> r.remove_header("Unredirected-spam")
+ >>> r.has_header("Unredirected-spam")
+ False
+
"""
@@ -305,6 +318,7 @@ class MockHTTPClass:
self.req_headers = []
self.data = None
self.raise_on_endheaders = False
+ self.sock = None
self._tunnel_headers = {}
def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
@@ -333,7 +347,7 @@ class MockHTTPClass:
self.data = body
if self.raise_on_endheaders:
import socket
- raise socket.error()
+ raise OSError()
def getresponse(self):
return MockHTTPResponse(MockFile(), {}, 200, "OK")
@@ -619,27 +633,6 @@ class OpenerDirectorTests(unittest.TestCase):
self.assertTrue(args[1] is None or
isinstance(args[1], MockResponse))
- def test_method_deprecations(self):
- req = Request("http://www.example.com")
-
- with self.assertWarns(DeprecationWarning):
- req.add_data("data")
- with self.assertWarns(DeprecationWarning):
- req.get_data()
- with self.assertWarns(DeprecationWarning):
- req.has_data()
- with self.assertWarns(DeprecationWarning):
- req.get_host()
- with self.assertWarns(DeprecationWarning):
- req.get_selector()
- with self.assertWarns(DeprecationWarning):
- req.is_unverifiable()
- with self.assertWarns(DeprecationWarning):
- req.get_origin_req_host()
- with self.assertWarns(DeprecationWarning):
- req.get_type()
-
-
def sanepathname2url(path):
try:
path.encode("utf-8")
@@ -834,7 +827,7 @@ class HandlerTests(unittest.TestCase):
("Foo", "bar"), ("Spam", "eggs")])
self.assertEqual(http.data, data)
- # check socket.error converted to URLError
+ # check OSError converted to URLError
http.raise_on_endheaders = True
self.assertRaises(urllib.error.URLError, h.do_open, http, req)
@@ -1397,6 +1390,10 @@ class HandlerTests(unittest.TestCase):
class MiscTests(unittest.TestCase):
+ def opener_has_handler(self, opener, handler_class):
+ self.assertTrue(any(h.__class__ == handler_class
+ for h in opener.handlers))
+
def test_build_opener(self):
class MyHTTPHandler(urllib.request.HTTPHandler): pass
class FooHandler(urllib.request.BaseHandler):
@@ -1434,9 +1431,39 @@ class MiscTests(unittest.TestCase):
self.opener_has_handler(o, MyHTTPHandler)
self.opener_has_handler(o, MyOtherHTTPHandler)
- def opener_has_handler(self, opener, handler_class):
- self.assertTrue(any(h.__class__ == handler_class
- for h in opener.handlers))
+ @unittest.skipUnless(support.is_resource_enabled('network'),
+ 'test requires network access')
+ def test_issue16464(self):
+ opener = urllib.request.build_opener()
+ request = urllib.request.Request("http://www.python.org/~jeremy/")
+ self.assertEqual(None, request.data)
+
+ opener.open(request, "1".encode("us-ascii"))
+ self.assertEqual(b"1", request.data)
+ self.assertEqual("1", request.get_header("Content-length"))
+
+ opener.open(request, "1234567890".encode("us-ascii"))
+ self.assertEqual(b"1234567890", request.data)
+ self.assertEqual("10", request.get_header("Content-length"))
+
+ def test_HTTPError_interface(self):
+ """
+ Issue 13211 reveals that HTTPError didn't implement the URLError
+ interface even though HTTPError is a subclass of URLError.
+
+ >>> msg = 'something bad happened'
+ >>> url = code = fp = None
+ >>> hdrs = 'Content-Length: 42'
+ >>> err = urllib.error.HTTPError(url, code, msg, hdrs, fp)
+ >>> assert hasattr(err, 'reason')
+ >>> err.reason
+ 'something bad happened'
+ >>> assert hasattr(err, 'headers')
+ >>> err.headers
+ 'Content-Length: 42'
+ >>> expected_errmsg = 'HTTP Error %s: %s' % (err.code, err.msg)
+ >>> assert str(err) == expected_errmsg
+ """
class RequestTests(unittest.TestCase):
@@ -1457,6 +1484,25 @@ class RequestTests(unittest.TestCase):
self.assertTrue(self.get.data)
self.assertEqual("POST", self.get.get_method())
+ # issue 16464
+ # if we change data we need to remove content-length header
+ # (cause it's most probably calculated for previous value)
+ def test_setting_data_should_remove_content_length(self):
+ self.assertNotIn("Content-length", self.get.unredirected_hdrs)
+ self.get.add_unredirected_header("Content-length", 42)
+ self.assertEqual(42, self.get.unredirected_hdrs["Content-length"])
+ self.get.data = "spam"
+ self.assertNotIn("Content-length", self.get.unredirected_hdrs)
+
+ # issue 17485 same for deleting data.
+ def test_deleting_data_should_remove_content_length(self):
+ self.assertNotIn("Content-length", self.get.unredirected_hdrs)
+ self.get.data = 'foo'
+ self.get.add_unredirected_header("Content-length", 3)
+ self.assertEqual(3, self.get.unredirected_hdrs["Content-length"])
+ del self.get.data
+ self.assertNotIn("Content-length", self.get.unredirected_hdrs)
+
def test_get_full_url(self):
self.assertEqual("http://www.python.org/~jeremy/",
self.get.get_full_url())
@@ -1498,41 +1544,6 @@ class RequestTests(unittest.TestCase):
req = Request(url)
self.assertEqual(req.get_full_url(), url)
- def test_HTTPError_interface(self):
- """
- Issue 13211 reveals that HTTPError didn't implement the URLError
- interface even though HTTPError is a subclass of URLError.
-
- >>> msg = 'something bad happened'
- >>> url = code = fp = None
- >>> hdrs = 'Content-Length: 42'
- >>> err = urllib.error.HTTPError(url, code, msg, hdrs, fp)
- >>> assert hasattr(err, 'reason')
- >>> err.reason
- 'something bad happened'
- >>> assert hasattr(err, 'hdrs')
- >>> err.hdrs
- 'Content-Length: 42'
- >>> expected_errmsg = 'HTTP Error %s: %s' % (err.code, err.msg)
- >>> assert str(err) == expected_errmsg
- """
-
- def test_HTTPError_interface_call(self):
- """
- Issue 15701 - HTTPError interface has info method available from URLError
- """
- err = urllib.request.HTTPError(msg="something bad happened", url=None,
- code=None, hdrs='Content-Length:42', fp=None)
- self.assertTrue(hasattr(err, 'reason'))
- assert hasattr(err, 'reason')
- assert hasattr(err, 'info')
- assert callable(err.info)
- try:
- err.info()
- except AttributeError:
- self.fail('err.info call failed.')
- self.assertEqual(err.info(), "Content-Length:42")
-
def test_main(verbose=None):
from test import test_urllib2
support.run_doctest(test_urllib2, verbose)
diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py
index 1eba14b..5880b3a 100644
--- a/Lib/test/test_urllib2_localnet.py
+++ b/Lib/test/test_urllib2_localnet.py
@@ -9,7 +9,10 @@ import unittest
import hashlib
from test import support
threading = support.import_module('threading')
-
+try:
+ import ssl
+except ImportError:
+ ssl = None
here = os.path.dirname(__file__)
# Self-signed cert file for 'localhost'
@@ -17,6 +20,7 @@ CERT_localhost = os.path.join(here, 'keycert.pem')
# Self-signed cert file for 'fakehostname'
CERT_fakehostname = os.path.join(here, 'keycert2.pem')
+
# Loopback http server infrastructure
class LoopbackHttpServer(http.server.HTTPServer):
@@ -353,12 +357,15 @@ class TestUrlopen(unittest.TestCase):
def setUp(self):
super(TestUrlopen, self).setUp()
# Ignore proxies for localhost tests.
+ self.old_environ = os.environ.copy()
os.environ['NO_PROXY'] = '*'
self.server = None
def tearDown(self):
if self.server is not None:
self.server.stop()
+ os.environ.clear()
+ os.environ.update(self.old_environ)
super(TestUrlopen, self).tearDown()
def urlopen(self, url, data=None, **kwargs):
@@ -386,14 +393,14 @@ class TestUrlopen(unittest.TestCase):
handler.port = port
return handler
- def start_https_server(self, responses=None, certfile=CERT_localhost):
+ def start_https_server(self, responses=None, **kwargs):
if not hasattr(urllib.request, 'HTTPSHandler'):
self.skipTest('ssl support required')
from test.ssl_servers import make_https_server
if responses is None:
responses = [(200, [], b"we care a bit")]
handler = GetRequestHandler(responses)
- server = make_https_server(self, certfile=certfile, handler_class=handler)
+ server = make_https_server(self, handler_class=handler, **kwargs)
handler.port = server.port
return handler
@@ -483,6 +490,21 @@ class TestUrlopen(unittest.TestCase):
self.urlopen("https://localhost:%s/bizarre" % handler.port,
cadefault=True)
+ def test_https_sni(self):
+ if ssl is None:
+ self.skipTest("ssl module required")
+ if not ssl.HAS_SNI:
+ self.skipTest("SNI support required in OpenSSL")
+ sni_name = None
+ def cb_sni(ssl_sock, server_name, initial_context):
+ nonlocal sni_name
+ sni_name = server_name
+ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ context.set_servername_callback(cb_sni)
+ handler = self.start_https_server(context=context, certfile=CERT_localhost)
+ self.urlopen("https://localhost:%s" % handler.port)
+ self.assertEqual(sni_name, "localhost")
+
def test_sending_headers(self):
handler = self.start_server()
req = urllib.request.Request("http://localhost:%s/" % handler.port,
@@ -524,7 +546,7 @@ class TestUrlopen(unittest.TestCase):
def test_bad_address(self):
# Make sure proper exception is raised when connecting to a bogus
# address.
- self.assertRaises(IOError,
+ self.assertRaises(OSError,
# Given that both VeriSign and various ISPs have in
# the past or are presently hijacking various invalid
# domain name requests in an attempt to boost traffic
diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py
index 7f3c93a..e276d2e 100644
--- a/Lib/test/test_urllib2net.py
+++ b/Lib/test/test_urllib2net.py
@@ -216,7 +216,7 @@ class OtherNetworkTests(unittest.TestCase):
debug(url)
try:
f = urlopen(url, req, TIMEOUT)
- except EnvironmentError as err:
+ except OSError as err:
debug(err)
if expected_err:
msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" %
@@ -330,31 +330,9 @@ class TimeoutTest(unittest.TestCase):
self.assertEqual(u.fp.fp.raw._sock.gettimeout(), 60)
-@unittest.skipUnless(ssl, "requires SSL support")
-class HTTPSTests(unittest.TestCase):
-
- def test_sni(self):
- self.skipTest("test disabled - test server needed")
- # Checks that Server Name Indication works, if supported by the
- # OpenSSL linked to.
- # The ssl module itself doesn't have server-side support for SNI,
- # so we rely on a third-party test site.
- expect_sni = ssl.HAS_SNI
- with support.transient_internet("XXX"):
- u = urllib.request.urlopen("XXX")
- contents = u.readall()
- if expect_sni:
- self.assertIn(b"Great", contents)
- self.assertNotIn(b"Unfortunately", contents)
- else:
- self.assertNotIn(b"Great", contents)
- self.assertIn(b"Unfortunately", contents)
-
-
def test_main():
support.requires("network")
support.run_unittest(AuthTests,
- HTTPSTests,
OtherNetworkTests,
CloseSocketTest,
TimeoutTest,
diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py
index d3fe69d..896d7c9 100644
--- a/Lib/test/test_urllibnet.py
+++ b/Lib/test/test_urllibnet.py
@@ -121,16 +121,15 @@ class urlopenNetworkTests(unittest.TestCase):
else:
# This happens with some overzealous DNS providers such as OpenDNS
self.skipTest("%r should not resolve for test to work" % bogus_domain)
- self.assertRaises(IOError,
- # SF patch 809915: In Sep 2003, VeriSign started
- # highjacking invalid .com and .net addresses to
- # boost traffic to their own site. This test
- # started failing then. One hopes the .invalid
- # domain will be spared to serve its defined
- # purpose.
- # urllib.urlopen, "http://www.sadflkjsasadf.com/")
- urllib.request.urlopen,
- "http://sadflkjsasf.i.nvali.d/")
+ failure_explanation = ('opening an invalid URL did not raise OSError; '
+ 'can be caused by a broken DNS server '
+ '(e.g. returns 404 or hijacks page)')
+ with self.assertRaises(OSError, msg=failure_explanation):
+ # SF patch 809915: In Sep 2003, VeriSign started highjacking
+ # invalid .com and .net addresses to boost traffic to their own
+ # site. This test started failing then. One hopes the .invalid
+ # domain will be spared to serve its defined purpose.
+ urllib.request.urlopen("http://sadflkjsasf.i.nvali.d/")
class urlretrieveNetworkTests(unittest.TestCase):
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index 378a427..c938f09 100755
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -847,6 +847,14 @@ class UrlParseTestCase(unittest.TestCase):
self.assertEqual(p1.path, '863-1234')
self.assertEqual(p1.params, 'phone-context=+1-914-555')
+ def test_unwrap(self):
+ url = urllib.parse.unwrap('<URL:type://host/path>')
+ self.assertEqual(url, 'type://host/path')
+
+ def test_Quoter_repr(self):
+ quoter = urllib.parse.Quoter(urllib.parse._ALWAYS_SAFE)
+ self.assertIn('Quoter', repr(quoter))
+
def test_main():
support.run_unittest(UrlParseTestCase)
diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py
index 9696844..2a528c9 100644
--- a/Lib/test/test_venv.py
+++ b/Lib/test/test_venv.py
@@ -113,13 +113,68 @@ class BasicTest(BaseTest):
out, err = p.communicate()
self.assertEqual(out.strip(), expected.encode())
+ if sys.platform == 'win32':
+ ENV_SUBDIRS = (
+ ('Scripts',),
+ ('Include',),
+ ('Lib',),
+ ('Lib', 'site-packages'),
+ )
+ else:
+ ENV_SUBDIRS = (
+ ('bin',),
+ ('include',),
+ ('lib',),
+ ('lib', 'python%d.%d' % sys.version_info[:2]),
+ ('lib', 'python%d.%d' % sys.version_info[:2], 'site-packages'),
+ )
+
+ def create_contents(self, paths, filename):
+ """
+ Create some files in the environment which are unrelated
+ to the virtual environment.
+ """
+ for subdirs in paths:
+ d = os.path.join(self.env_dir, *subdirs)
+ os.mkdir(d)
+ fn = os.path.join(d, filename)
+ with open(fn, 'wb') as f:
+ f.write(b'Still here?')
+
def test_overwrite_existing(self):
"""
- Test control of overwriting an existing environment directory.
+ Test creating environment in an existing directory.
"""
- self.assertRaises(ValueError, venv.create, self.env_dir)
+ self.create_contents(self.ENV_SUBDIRS, 'foo')
+ venv.create(self.env_dir)
+ for subdirs in self.ENV_SUBDIRS:
+ fn = os.path.join(self.env_dir, *(subdirs + ('foo',)))
+ self.assertTrue(os.path.exists(fn))
+ with open(fn, 'rb') as f:
+ self.assertEqual(f.read(), b'Still here?')
+
builder = venv.EnvBuilder(clear=True)
builder.create(self.env_dir)
+ for subdirs in self.ENV_SUBDIRS:
+ fn = os.path.join(self.env_dir, *(subdirs + ('foo',)))
+ self.assertFalse(os.path.exists(fn))
+
+ def clear_directory(self, path):
+ for fn in os.listdir(path):
+ fn = os.path.join(path, fn)
+ if os.path.islink(fn) or os.path.isfile(fn):
+ os.remove(fn)
+ elif os.path.isdir(fn):
+ shutil.rmtree(fn)
+
+ def test_unoverwritable_fails(self):
+ #create a file clashing with directories in the env dir
+ for paths in self.ENV_SUBDIRS[:3]:
+ fn = os.path.join(self.env_dir, *paths)
+ with open(fn, 'wb') as f:
+ f.write(b'')
+ self.assertRaises((ValueError, OSError), venv.create, self.env_dir)
+ self.clear_directory(self.env_dir)
def test_upgrade(self):
"""
diff --git a/Lib/test/test_wait3.py b/Lib/test/test_wait3.py
index bd06c8d..f6a065d 100644
--- a/Lib/test/test_wait3.py
+++ b/Lib/test/test_wait3.py
@@ -7,15 +7,11 @@ import unittest
from test.fork_wait import ForkWait
from test.support import run_unittest, reap_children
-try:
- os.fork
-except AttributeError:
- raise unittest.SkipTest("os.fork not defined -- skipping test_wait3")
+if not hasattr(os, 'fork'):
+ raise unittest.SkipTest("os.fork not defined")
-try:
- os.wait3
-except AttributeError:
- raise unittest.SkipTest("os.wait3 not defined -- skipping test_wait3")
+if not hasattr(os, 'wait3'):
+ raise unittest.SkipTest("os.wait3 not defined")
class Wait3Test(ForkWait):
def wait_impl(self, cpid):
diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py
index 571e33f..cdd26c7 100644
--- a/Lib/test/test_weakref.py
+++ b/Lib/test/test_weakref.py
@@ -47,6 +47,11 @@ class Object:
return NotImplemented
def __hash__(self):
return hash(self.arg)
+ def some_method(self):
+ return 4
+ def other_method(self):
+ return 5
+
class RefCycle:
def __init__(self):
@@ -901,6 +906,140 @@ class SubclassableWeakrefTestCase(TestBase):
self.assertEqual(self.cbcalled, 0)
+class WeakMethodTestCase(unittest.TestCase):
+
+ def _subclass(self):
+ """Return a Object subclass overriding `some_method`."""
+ class C(Object):
+ def some_method(self):
+ return 6
+ return C
+
+ def test_alive(self):
+ o = Object(1)
+ r = weakref.WeakMethod(o.some_method)
+ self.assertIsInstance(r, weakref.ReferenceType)
+ self.assertIsInstance(r(), type(o.some_method))
+ self.assertIs(r().__self__, o)
+ self.assertIs(r().__func__, o.some_method.__func__)
+ self.assertEqual(r()(), 4)
+
+ def test_object_dead(self):
+ o = Object(1)
+ r = weakref.WeakMethod(o.some_method)
+ del o
+ gc.collect()
+ self.assertIs(r(), None)
+
+ def test_method_dead(self):
+ C = self._subclass()
+ o = C(1)
+ r = weakref.WeakMethod(o.some_method)
+ del C.some_method
+ gc.collect()
+ self.assertIs(r(), None)
+
+ def test_callback_when_object_dead(self):
+ # Test callback behaviour when object dies first.
+ C = self._subclass()
+ calls = []
+ def cb(arg):
+ calls.append(arg)
+ o = C(1)
+ r = weakref.WeakMethod(o.some_method, cb)
+ del o
+ gc.collect()
+ self.assertEqual(calls, [r])
+ # Callback is only called once.
+ C.some_method = Object.some_method
+ gc.collect()
+ self.assertEqual(calls, [r])
+
+ def test_callback_when_method_dead(self):
+ # Test callback behaviour when method dies first.
+ C = self._subclass()
+ calls = []
+ def cb(arg):
+ calls.append(arg)
+ o = C(1)
+ r = weakref.WeakMethod(o.some_method, cb)
+ del C.some_method
+ gc.collect()
+ self.assertEqual(calls, [r])
+ # Callback is only called once.
+ del o
+ gc.collect()
+ self.assertEqual(calls, [r])
+
+ @support.cpython_only
+ def test_no_cycles(self):
+ # A WeakMethod doesn't create any reference cycle to itself.
+ o = Object(1)
+ def cb(_):
+ pass
+ r = weakref.WeakMethod(o.some_method, cb)
+ wr = weakref.ref(r)
+ del r
+ self.assertIs(wr(), None)
+
+ def test_equality(self):
+ def _eq(a, b):
+ self.assertTrue(a == b)
+ self.assertFalse(a != b)
+ def _ne(a, b):
+ self.assertTrue(a != b)
+ self.assertFalse(a == b)
+ x = Object(1)
+ y = Object(1)
+ a = weakref.WeakMethod(x.some_method)
+ b = weakref.WeakMethod(y.some_method)
+ c = weakref.WeakMethod(x.other_method)
+ d = weakref.WeakMethod(y.other_method)
+ # Objects equal, same method
+ _eq(a, b)
+ _eq(c, d)
+ # Objects equal, different method
+ _ne(a, c)
+ _ne(a, d)
+ _ne(b, c)
+ _ne(b, d)
+ # Objects unequal, same or different method
+ z = Object(2)
+ e = weakref.WeakMethod(z.some_method)
+ f = weakref.WeakMethod(z.other_method)
+ _ne(a, e)
+ _ne(a, f)
+ _ne(b, e)
+ _ne(b, f)
+ del x, y, z
+ gc.collect()
+ # Dead WeakMethods compare by identity
+ refs = a, b, c, d, e, f
+ for q in refs:
+ for r in refs:
+ self.assertEqual(q == r, q is r)
+ self.assertEqual(q != r, q is not r)
+
+ def test_hashing(self):
+ # Alive WeakMethods are hashable if the underlying object is
+ # hashable.
+ x = Object(1)
+ y = Object(1)
+ a = weakref.WeakMethod(x.some_method)
+ b = weakref.WeakMethod(y.some_method)
+ c = weakref.WeakMethod(y.other_method)
+ # Since WeakMethod objects are equal, the hashes should be equal.
+ self.assertEqual(hash(a), hash(b))
+ ha = hash(a)
+ # Dead WeakMethods retain their old hash value
+ del x, y
+ gc.collect()
+ self.assertEqual(hash(a), ha)
+ self.assertEqual(hash(b), ha)
+ # If it wasn't hashed when alive, a dead WeakMethod cannot be hashed.
+ self.assertRaises(TypeError, hash, c)
+
+
class MappingTestCase(TestBase):
COUNT = 10
@@ -1476,6 +1615,7 @@ __test__ = {'libreftest' : libreftest}
def test_main():
support.run_unittest(
ReferencesTestCase,
+ WeakMethodTestCase,
MappingTestCase,
WeakValueDictionaryTestCase,
WeakKeyDictionaryTestCase,
diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py
index a164d2f..a8454a0 100644
--- a/Lib/test/test_winreg.py
+++ b/Lib/test/test_winreg.py
@@ -8,7 +8,7 @@ threading = support.import_module("threading")
from platform import machine
# Do this first so test will be skipped if module doesn't exist
-support.import_module('winreg')
+support.import_module('winreg', required_on=['win'])
# Now import everything
from winreg import *
@@ -54,13 +54,13 @@ class BaseWinregTests(unittest.TestCase):
def delete_tree(self, root, subkey):
try:
hkey = OpenKey(root, subkey, KEY_ALL_ACCESS)
- except WindowsError:
+ except OSError:
# subkey does not exist
return
while True:
try:
subsubkey = EnumKey(hkey, 0)
- except WindowsError:
+ except OSError:
# no more subkeys
break
self.delete_tree(hkey, subsubkey)
@@ -97,7 +97,7 @@ class BaseWinregTests(unittest.TestCase):
QueryInfoKey(int_sub_key)
self.fail("It appears the CloseKey() function does "
"not close the actual key!")
- except EnvironmentError:
+ except OSError:
pass
# ... and close that key that way :-)
int_key = int(key)
@@ -106,7 +106,7 @@ class BaseWinregTests(unittest.TestCase):
QueryInfoKey(int_key)
self.fail("It appears the key.Close() function "
"does not close the actual key!")
- except EnvironmentError:
+ except OSError:
pass
def _read_test_data(self, root_key, subkeystr="sub_key", OpenKey=OpenKey):
@@ -123,7 +123,7 @@ class BaseWinregTests(unittest.TestCase):
while 1:
try:
data = EnumValue(sub_key, index)
- except EnvironmentError:
+ except OSError:
break
self.assertEqual(data in test_data, True,
"Didn't read back the correct test data")
@@ -144,7 +144,7 @@ class BaseWinregTests(unittest.TestCase):
try:
EnumKey(key, 1)
self.fail("Was able to get a second key when I only have one!")
- except EnvironmentError:
+ except OSError:
pass
key.Close()
@@ -168,7 +168,7 @@ class BaseWinregTests(unittest.TestCase):
# Shouldnt be able to delete it twice!
DeleteKey(key, subkeystr)
self.fail("Deleting the key twice succeeded")
- except EnvironmentError:
+ except OSError:
pass
key.Close()
DeleteKey(root_key, test_key_name)
@@ -176,7 +176,7 @@ class BaseWinregTests(unittest.TestCase):
try:
key = OpenKey(root_key, test_key_name)
self.fail("Could open the non-existent key")
- except WindowsError: # Use this error name this time
+ except OSError: # Use this error name this time
pass
def _test_all(self, root_key, subkeystr="sub_key"):
@@ -227,7 +227,7 @@ class LocalWinregTests(BaseWinregTests):
def test_inexistant_remote_registry(self):
connect = lambda: ConnectRegistry("abcdefghijkl", HKEY_CURRENT_USER)
- self.assertRaises(WindowsError, connect)
+ self.assertRaises(OSError, connect)
def testExpandEnvironmentStrings(self):
r = ExpandEnvironmentStrings("%windir%\\test")
@@ -239,8 +239,8 @@ class LocalWinregTests(BaseWinregTests):
try:
with ConnectRegistry(None, HKEY_LOCAL_MACHINE) as h:
self.assertNotEqual(h.handle, 0)
- raise WindowsError
- except WindowsError:
+ raise OSError
+ except OSError:
self.assertEqual(h.handle, 0)
def test_changing_value(self):
@@ -404,7 +404,7 @@ class Win64WinregTests(BaseWinregTests):
open_fail = lambda: OpenKey(HKEY_CURRENT_USER,
test_reflect_key_name, 0,
KEY_READ | KEY_WOW64_64KEY)
- self.assertRaises(WindowsError, open_fail)
+ self.assertRaises(OSError, open_fail)
# Now explicitly open the 64-bit version of the key
with OpenKey(HKEY_CURRENT_USER, test_reflect_key_name, 0,
@@ -444,7 +444,7 @@ class Win64WinregTests(BaseWinregTests):
open_fail = lambda: OpenKeyEx(HKEY_CURRENT_USER,
test_reflect_key_name, 0,
KEY_READ | KEY_WOW64_64KEY)
- self.assertRaises(WindowsError, open_fail)
+ self.assertRaises(OSError, open_fail)
# Make sure the 32-bit key is actually there
with OpenKeyEx(HKEY_CURRENT_USER, test_reflect_key_name, 0,
diff --git a/Lib/test/test_winsound.py b/Lib/test/test_winsound.py
index eb7f75f..61d864a 100644
--- a/Lib/test/test_winsound.py
+++ b/Lib/test/test_winsound.py
@@ -22,7 +22,7 @@ def has_sound(sound):
key = winreg.OpenKeyEx(winreg.HKEY_CURRENT_USER,
"AppEvents\Schemes\Apps\.Default\{0}\.Default".format(sound))
return winreg.EnumValue(key, 0)[1] != ""
- except WindowsError:
+ except OSError:
return False
class BeepTest(unittest.TestCase):
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
index f3683f2..001c13d 100644
--- a/Lib/test/test_xml_etree.py
+++ b/Lib/test/test_xml_etree.py
@@ -1569,6 +1569,11 @@ class ElementFindTest(unittest.TestCase):
self.assertEqual(e.find('./tag[last()-1]').attrib['class'], 'c')
self.assertEqual(e.find('./tag[last()-2]').attrib['class'], 'b')
+ self.assertRaisesRegex(SyntaxError, 'XPath', e.find, './tag[0]')
+ self.assertRaisesRegex(SyntaxError, 'XPath', e.find, './tag[-1]')
+ self.assertRaisesRegex(SyntaxError, 'XPath', e.find, './tag[last()-0]')
+ self.assertRaisesRegex(SyntaxError, 'XPath', e.find, './tag[last()+1]')
+
def test_findall(self):
e = ET.XML(SAMPLE_XML)
e[2] = ET.XML(SAMPLE_SECTION)
@@ -2156,6 +2161,18 @@ class IOTest(unittest.TestCase):
ET.tostring(root, 'utf-16'),
b''.join(ET.tostringlist(root, 'utf-16')))
+ def test_short_empty_elements(self):
+ root = ET.fromstring('<tag>a<x />b<y></y>c</tag>')
+ self.assertEqual(
+ ET.tostring(root, 'unicode'),
+ '<tag>a<x />b<y />c</tag>')
+ self.assertEqual(
+ ET.tostring(root, 'unicode', short_empty_elements=True),
+ '<tag>a<x />b<y />c</tag>')
+ self.assertEqual(
+ ET.tostring(root, 'unicode', short_empty_elements=False),
+ '<tag>a<x></x>b<y></y>c</tag>')
+
class ParseErrorTest(unittest.TestCase):
def test_subclass(self):
diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py
index 16f85c5..817cbd8 100644
--- a/Lib/test/test_xmlrpc.py
+++ b/Lib/test/test_xmlrpc.py
@@ -215,7 +215,7 @@ class XMLRPCTestCase(unittest.TestCase):
xmlrpc.client.ServerProxy('https://localhost:9999').bad_function()
except NotImplementedError:
self.assertFalse(has_ssl, "xmlrpc client's error with SSL support")
- except socket.error:
+ except OSError:
self.assertTrue(has_ssl)
class HelperTestCase(unittest.TestCase):
@@ -492,7 +492,7 @@ def is_unavailable_exception(e):
return True
exc_mess = e.headers.get('X-exception')
except AttributeError:
- # Ignore socket.errors here.
+ # Ignore OSErrors here.
exc_mess = str(e)
if exc_mess and 'temporarily unavailable' in exc_mess.lower():
@@ -507,7 +507,7 @@ def make_request_and_skipIf(condition, reason):
def make_request_and_skip(self):
try:
xmlrpclib.ServerProxy(URL).my_function()
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
if not is_unavailable_exception(e):
raise
raise unittest.SkipTest(reason)
@@ -545,7 +545,7 @@ class SimpleServerTestCase(BaseServerTestCase):
try:
p = xmlrpclib.ServerProxy(URL)
self.assertEqual(p.pow(6,8), 6**8)
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
@@ -558,7 +558,7 @@ class SimpleServerTestCase(BaseServerTestCase):
p = xmlrpclib.ServerProxy(URL)
self.assertEqual(p.add(start_string, end_string),
start_string + end_string)
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
@@ -584,7 +584,7 @@ class SimpleServerTestCase(BaseServerTestCase):
p = xmlrpclib.ServerProxy(URL)
meth = p.system.listMethods()
self.assertEqual(set(meth), expected_methods)
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
@@ -597,7 +597,7 @@ class SimpleServerTestCase(BaseServerTestCase):
p = xmlrpclib.ServerProxy(URL)
divhelp = p.system.methodHelp('div')
self.assertEqual(divhelp, 'This is the div function')
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
@@ -611,7 +611,7 @@ class SimpleServerTestCase(BaseServerTestCase):
p = xmlrpclib.ServerProxy(URL)
myfunction = p.system.methodHelp('my_function')
self.assertEqual(myfunction, 'This is my function')
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
@@ -624,7 +624,7 @@ class SimpleServerTestCase(BaseServerTestCase):
p = xmlrpclib.ServerProxy(URL)
divsig = p.system.methodSignature('div')
self.assertEqual(divsig, 'signatures not supported')
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
@@ -641,7 +641,7 @@ class SimpleServerTestCase(BaseServerTestCase):
self.assertEqual(add_result, 2+3)
self.assertEqual(pow_result, 6**8)
self.assertEqual(div_result, 127//42)
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
@@ -662,7 +662,7 @@ class SimpleServerTestCase(BaseServerTestCase):
self.assertEqual(result.results[0]['faultString'],
'<class \'Exception\'>:method "this_is_not_exists" '
'is not supported')
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
@@ -915,7 +915,7 @@ class FailingServerTestCase(unittest.TestCase):
try:
p = xmlrpclib.ServerProxy(URL)
self.assertEqual(p.pow(6,8), 6**8)
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
@@ -928,7 +928,7 @@ class FailingServerTestCase(unittest.TestCase):
try:
p = xmlrpclib.ServerProxy(URL)
p.pow(6,8)
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e) and hasattr(e, "headers"):
# The two server-side error headers shouldn't be sent back in this case
@@ -948,7 +948,7 @@ class FailingServerTestCase(unittest.TestCase):
try:
p = xmlrpclib.ServerProxy(URL)
p.pow(6,8)
- except (xmlrpclib.ProtocolError, socket.error) as e:
+ except (xmlrpclib.ProtocolError, OSError) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e) and hasattr(e, "headers"):
# We should get error info in the response
diff --git a/Lib/test/test_xmlrpc_net.py b/Lib/test/test_xmlrpc_net.py
index dfb5f9a..457e3fb 100644
--- a/Lib/test/test_xmlrpc_net.py
+++ b/Lib/test/test_xmlrpc_net.py
@@ -18,7 +18,7 @@ class CurrentTimeTest(unittest.TestCase):
server = xmlrpclib.ServerProxy("http://time.xmlrpc.com/RPC2")
try:
t0 = server.currentTime.getCurrentTime()
- except socket.error as e:
+ except OSError as e:
self.skipTest("network error: %s" % e)
return
@@ -42,7 +42,7 @@ class CurrentTimeTest(unittest.TestCase):
server = xmlrpclib.ServerProxy("http://buildbot.python.org/all/xmlrpc/")
try:
builders = server.getAllBuilders()
- except socket.error as e:
+ except OSError as e:
self.skipTest("network error: %s" % e)
return
self.addCleanup(lambda: server('close')())
diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py
index 5e837cd..529f7b8 100644
--- a/Lib/test/test_zipfile.py
+++ b/Lib/test/test_zipfile.py
@@ -1050,7 +1050,7 @@ class OtherTests(unittest.TestCase):
try:
with zipfile.ZipFile(TESTFN, 'a') as zf:
zf.writestr(filename, content)
- except IOError:
+ except OSError:
self.fail('Could not append data to a non-existent zip file.')
self.assertTrue(os.path.exists(TESTFN))
@@ -1130,7 +1130,7 @@ class OtherTests(unittest.TestCase):
chk = zipfile.is_zipfile(fp)
self.assertTrue(chk)
- def test_non_existent_file_raises_IOError(self):
+ def test_non_existent_file_raises_OSError(self):
# make sure we don't raise an AttributeError when a partially-constructed
# ZipFile instance is finalized; this tests for regression on SF tracker
# bug #403871.
@@ -1142,7 +1142,7 @@ class OtherTests(unittest.TestCase):
# it is ignored, but the user should be sufficiently annoyed by
# the message on the output that regression will be noticed
# quickly.
- self.assertRaises(IOError, zipfile.ZipFile, TESTFN)
+ self.assertRaises(OSError, zipfile.ZipFile, TESTFN)
def test_empty_file_raises_BadZipFile(self):
f = open(TESTFN, 'w')
@@ -1416,7 +1416,7 @@ class OtherTests(unittest.TestCase):
def test_open_empty_file(self):
# Issue 1710703: Check that opening a file with less than 22 bytes
# raises a BadZipFile exception (rather than the previously unhelpful
- # IOError)
+ # OSError)
f = open(TESTFN, 'w')
f.close()
self.assertRaises(zipfile.BadZipFile, zipfile.ZipFile, TESTFN, 'r')
diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py
index f7cb8b9..a60d3e0 100644
--- a/Lib/test/test_zipimport.py
+++ b/Lib/test/test_zipimport.py
@@ -196,6 +196,7 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
for name, (mtime, data) in files.items():
zinfo = ZipInfo(name, time.localtime(mtime))
zinfo.compress_type = self.compression
+ zinfo.comment = b"spam"
z.writestr(zinfo, data)
z.close()
@@ -245,6 +246,7 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
for name, (mtime, data) in files.items():
zinfo = ZipInfo(name, time.localtime(mtime))
zinfo.compress_type = self.compression
+ zinfo.comment = b"eggs"
z.writestr(zinfo, data)
z.close()
@@ -459,7 +461,7 @@ class BadFileZipImportTestCase(unittest.TestCase):
self.assertRaises(error, z.load_module, 'abc')
self.assertRaises(error, z.get_code, 'abc')
- self.assertRaises(IOError, z.get_data, 'abc')
+ self.assertRaises(OSError, z.get_data, 'abc')
self.assertRaises(error, z.get_source, 'abc')
self.assertRaises(error, z.is_package, 'abc')
finally:
diff --git a/Lib/test/tf_inherit_check.py b/Lib/test/tf_inherit_check.py
index 92ebd95..afe50d2 100644
--- a/Lib/test/tf_inherit_check.py
+++ b/Lib/test/tf_inherit_check.py
@@ -11,7 +11,7 @@ try:
try:
os.write(fd, b"blat")
- except os.error:
+ except OSError:
# Success -- could not write to fd.
sys.exit(0)
else:
diff --git a/Lib/threading.py b/Lib/threading.py
index 6c34d49..80dd51f 100644
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -10,6 +10,11 @@ except ImportError:
from time import time as _time
from traceback import format_exc as _format_exc
from _weakrefset import WeakSet
+from itertools import islice as _islice
+try:
+ from _collections import deque as _deque
+except ImportError:
+ from collections import deque as _deque
# Note regarding PEP 8 compliant names
# This threading model was originally inspired by Java, and inherited
@@ -79,7 +84,7 @@ class _RLock:
def acquire(self, blocking=True, timeout=-1):
me = get_ident()
if self._owner == me:
- self._count = self._count + 1
+ self._count += 1
return 1
rc = self._block.acquire(blocking, timeout)
if rc:
@@ -146,7 +151,7 @@ class Condition:
self._is_owned = lock._is_owned
except AttributeError:
pass
- self._waiters = []
+ self._waiters = _deque()
def __enter__(self):
return self._lock.__enter__()
@@ -216,14 +221,14 @@ class Condition:
def notify(self, n=1):
if not self._is_owned():
raise RuntimeError("cannot notify on un-acquired lock")
- __waiters = self._waiters
- waiters = __waiters[:n]
- if not waiters:
+ all_waiters = self._waiters
+ waiters_to_notify = _deque(_islice(all_waiters, n))
+ if not waiters_to_notify:
return
- for waiter in waiters:
+ for waiter in waiters_to_notify:
waiter.release()
try:
- __waiters.remove(waiter)
+ all_waiters.remove(waiter)
except ValueError:
pass
@@ -261,7 +266,7 @@ class Semaphore:
break
self._cond.wait(timeout)
else:
- self._value = self._value - 1
+ self._value -= 1
rc = True
self._cond.release()
return rc
@@ -270,7 +275,7 @@ class Semaphore:
def release(self):
self._cond.acquire()
- self._value = self._value + 1
+ self._value += 1
self._cond.notify()
self._cond.release()
@@ -506,7 +511,7 @@ class BrokenBarrierError(RuntimeError): pass
_counter = 0
def _newname(template="Thread-%d"):
global _counter
- _counter = _counter + 1
+ _counter += 1
return template % _counter
# Active thread administration
diff --git a/Lib/timeit.py b/Lib/timeit.py
index 4f7d28f..ead2030 100644
--- a/Lib/timeit.py
+++ b/Lib/timeit.py
@@ -31,38 +31,29 @@ treated similarly.
If -n is not given, a suitable number of loops is calculated by trying
successive powers of 10 until the total time is at least 0.2 seconds.
-The difference in default timer function is because on Windows,
-clock() has microsecond granularity but time()'s granularity is 1/60th
-of a second; on Unix, clock() has 1/100th of a second granularity and
-time() is much more precise. On either platform, the default timer
-functions measure wall clock time, not the CPU time. This means that
-other processes running on the same computer may interfere with the
-timing. The best thing to do when accurate timing is necessary is to
-repeat the timing a few times and use the best time. The -r option is
-good for this; the default of 3 repetitions is probably enough in most
-cases. On Unix, you can use clock() to measure CPU time.
-
Note: there is a certain baseline overhead associated with executing a
-pass statement. The code here doesn't try to hide it, but you should
-be aware of it. The baseline overhead can be measured by invoking the
-program without arguments.
-
-The baseline overhead differs between Python versions! Also, to
-fairly compare older Python versions to Python 2.3, you may want to
-use python -O for the older versions to avoid timing SET_LINENO
-instructions.
+pass statement. It differs between versions. The code here doesn't try
+to hide it, but you should be aware of it. The baseline overhead can be
+measured by invoking the program without arguments.
+
+Classes:
+
+ Timer
+
+Functions:
+
+ timeit(string, string) -> float
+ repeat(string, string) -> list
+ default_timer() -> float
+
"""
import gc
import sys
import time
-try:
- import itertools
-except ImportError:
- # Must be an older Python version (see timeit() below)
- itertools = None
+import itertools
-__all__ = ["Timer"]
+__all__ = ["Timer", "timeit", "repeat", "default_timer"]
dummy_src_name = "<timeit-src>"
default_number = 1000000
@@ -180,10 +171,7 @@ class Timer:
to one million. The main statement, the setup statement and
the timer function to be used are passed to the constructor.
"""
- if itertools:
- it = itertools.repeat(None, number)
- else:
- it = [None] * number
+ it = itertools.repeat(None, number)
gcold = gc.isenabled()
gc.disable()
try:
diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py
index f619786..f78d24d 100644
--- a/Lib/tkinter/__init__.py
+++ b/Lib/tkinter/__init__.py
@@ -2181,45 +2181,6 @@ class Button(Widget):
"""
return self.tk.call(self._w, 'invoke')
-
-# Indices:
-# XXX I don't like these -- take them away
-def AtEnd():
- warnings.warn("tkinter.AtEnd will be removed in 3.4",
- DeprecationWarning, stacklevel=2)
- return 'end'
-
-
-def AtInsert(*args):
- warnings.warn("tkinter.AtInsert will be removed in 3.4",
- DeprecationWarning, stacklevel=2)
- s = 'insert'
- for a in args:
- if a: s = s + (' ' + a)
- return s
-
-
-def AtSelFirst():
- warnings.warn("tkinter.AtSelFirst will be removed in 3.4",
- DeprecationWarning, stacklevel=2)
- return 'sel.first'
-
-
-def AtSelLast():
- warnings.warn("tkinter.AtSelLast will be removed in 3.4",
- DeprecationWarning, stacklevel=2)
- return 'sel.last'
-
-
-def At(x, y=None):
- warnings.warn("tkinter.At will be removed in 3.4",
- DeprecationWarning, stacklevel=2)
- if y is None:
- return '@%r' % (x,)
- else:
- return '@%r,%r' % (x, y)
-
-
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
diff --git a/Lib/tkinter/filedialog.py b/Lib/tkinter/filedialog.py
index 3ffb252..a71afb2 100644
--- a/Lib/tkinter/filedialog.py
+++ b/Lib/tkinter/filedialog.py
@@ -166,7 +166,7 @@ class FileDialog:
dir, pat = self.get_filter()
try:
names = os.listdir(dir)
- except os.error:
+ except OSError:
self.master.bell()
return
self.directory = dir
@@ -209,7 +209,7 @@ class FileDialog:
if not os.path.isabs(dir):
try:
pwd = os.getcwd()
- except os.error:
+ except OSError:
pwd = None
if pwd:
dir = os.path.join(pwd, dir)
diff --git a/Lib/token.py b/Lib/token.py
index 31fae0a..7470c8c 100755
--- a/Lib/token.py
+++ b/Lib/token.py
@@ -93,7 +93,7 @@ def _main():
outFileName = args[1]
try:
fp = open(inFileName)
- except IOError as err:
+ except OSError as err:
sys.stdout.write("I/O error: %s\n" % str(err))
sys.exit(1)
lines = fp.read().split("\n")
@@ -112,7 +112,7 @@ def _main():
# load the output skeleton from the target:
try:
fp = open(outFileName)
- except IOError as err:
+ except OSError as err:
sys.stderr.write("I/O error: %s\n" % str(err))
sys.exit(2)
format = fp.read().split("\n")
@@ -129,7 +129,7 @@ def _main():
format[start:end] = lines
try:
fp = open(outFileName, 'w')
- except IOError as err:
+ except OSError as err:
sys.stderr.write("I/O error: %s\n" % str(err))
sys.exit(4)
fp.write("\n".join(format))
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index cbf91ef..2fbde0f 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -670,7 +670,7 @@ def main():
error(err.args[0], filename, (line, column))
except SyntaxError as err:
error(err, filename)
- except IOError as err:
+ except OSError as err:
error(err)
except KeyboardInterrupt:
print("interrupted\n")
diff --git a/Lib/trace.py b/Lib/trace.py
index c09b365..09fe9ee 100644
--- a/Lib/trace.py
+++ b/Lib/trace.py
@@ -238,7 +238,7 @@ class CoverageResults:
counts, calledfuncs, callers = \
pickle.load(open(self.infile, 'rb'))
self.update(self.__class__(counts, calledfuncs, callers))
- except (IOError, EOFError, ValueError) as err:
+ except (OSError, EOFError, ValueError) as err:
print(("Skipping counts file %r: %s"
% (self.infile, err)), file=sys.stderr)
@@ -348,7 +348,7 @@ class CoverageResults:
try:
pickle.dump((self.counts, self.calledfuncs, self.callers),
open(self.outfile, 'wb'), 1)
- except IOError as err:
+ except OSError as err:
print("Can't save counts files because %s" % err, file=sys.stderr)
def write_results_file(self, path, lines, lnotab, lines_hit, encoding=None):
@@ -356,7 +356,7 @@ class CoverageResults:
try:
outfile = open(path, "w", encoding=encoding)
- except IOError as err:
+ except OSError as err:
print(("trace: Could not open %r for writing: %s"
"- skipping" % (path, err)), file=sys.stderr)
return 0, 0
@@ -437,7 +437,7 @@ def _find_executable_linenos(filename):
with tokenize.open(filename) as f:
prog = f.read()
encoding = f.encoding
- except IOError as err:
+ except OSError as err:
print(("Not printing coverage data for %r: %s"
% (filename, err)), file=sys.stderr)
return {}
@@ -802,7 +802,7 @@ def main(argv=None):
'__cached__': None,
}
t.runctx(code, globs, globs)
- except IOError as err:
+ except OSError as err:
_err_exit("Cannot run file %r because: %s" % (sys.argv[0], err))
except SystemExit:
pass
diff --git a/Lib/traceback.py b/Lib/traceback.py
index eeb9e73..33b86c7 100644
--- a/Lib/traceback.py
+++ b/Lib/traceback.py
@@ -132,8 +132,7 @@ def _iter_chain(exc, custom_tb=None, seen=None):
its.append([(exc, custom_tb or exc.__traceback__)])
# itertools.chain is in an extension module and may be unavailable
for it in its:
- for x in it:
- yield x
+ yield from it
def print_exception(etype, value, tb, limit=None, file=None, chain=True):
diff --git a/Lib/turtle.py b/Lib/turtle.py
index 49cd86d..7edbb05 100644
--- a/Lib/turtle.py
+++ b/Lib/turtle.py
@@ -3843,18 +3843,18 @@ def write_docstringdict(filename="turtle_docstringdict"):
key = "Turtle."+methodname
docsdict[key] = eval(key).__doc__
- f = open("%s.py" % filename,"w")
- keys = sorted([x for x in docsdict.keys()
- if x.split('.')[1] not in _alias_list])
- f.write('docsdict = {\n\n')
- for key in keys[:-1]:
+ with open("%s.py" % filename,"w") as f:
+ keys = sorted([x for x in docsdict.keys()
+ if x.split('.')[1] not in _alias_list])
+ f.write('docsdict = {\n\n')
+ for key in keys[:-1]:
+ f.write('%s :\n' % repr(key))
+ f.write(' """%s\n""",\n\n' % docsdict[key])
+ key = keys[-1]
f.write('%s :\n' % repr(key))
- f.write(' """%s\n""",\n\n' % docsdict[key])
- key = keys[-1]
- f.write('%s :\n' % repr(key))
- f.write(' """%s\n"""\n\n' % docsdict[key])
- f.write("}\n")
- f.close()
+ f.write(' """%s\n"""\n\n' % docsdict[key])
+ f.write("}\n")
+ f.close()
def read_docstrings(lang):
"""Read in docstrings from lang-specific docstring dictionary.
diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py
index ad1fa84..cf7301b 100644
--- a/Lib/unittest/case.py
+++ b/Lib/unittest/case.py
@@ -7,6 +7,7 @@ import pprint
import re
import warnings
import collections
+import contextlib
from . import result
from .util import (strclass, safe_repr, _count_diff_all_purpose,
@@ -26,17 +27,11 @@ class SkipTest(Exception):
instead of raising this directly.
"""
-class _ExpectedFailure(Exception):
+class _ShouldStop(Exception):
"""
- Raise this when a test is expected to fail.
-
- This is an implementation detail.
+ The test should stop.
"""
- def __init__(self, exc_info):
- super(_ExpectedFailure, self).__init__()
- self.exc_info = exc_info
-
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
@@ -44,13 +39,40 @@ class _UnexpectedSuccess(Exception):
class _Outcome(object):
- def __init__(self):
+ def __init__(self, result=None):
+ self.expecting_failure = False
+ self.result = result
+ self.result_supports_subtests = hasattr(result, "addSubTest")
self.success = True
- self.skipped = None
- self.unexpectedSuccess = None
+ self.skipped = []
self.expectedFailure = None
self.errors = []
- self.failures = []
+
+ @contextlib.contextmanager
+ def testPartExecutor(self, test_case, isTest=False):
+ old_success = self.success
+ self.success = True
+ try:
+ yield
+ except KeyboardInterrupt:
+ raise
+ except SkipTest as e:
+ self.success = False
+ self.skipped.append((test_case, str(e)))
+ except _ShouldStop:
+ pass
+ except:
+ exc_info = sys.exc_info()
+ if self.expecting_failure:
+ self.expectedFailure = exc_info
+ else:
+ self.success = False
+ self.errors.append((test_case, exc_info))
+ else:
+ if self.result_supports_subtests and self.success:
+ self.errors.append((test_case, None))
+ finally:
+ self.success = self.success and old_success
def _id(obj):
@@ -88,16 +110,9 @@ def skipUnless(condition, reason):
return skip(reason)
return _id
-
-def expectedFailure(func):
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- try:
- func(*args, **kwargs)
- except Exception:
- raise _ExpectedFailure(sys.exc_info())
- raise _UnexpectedSuccess
- return wrapper
+def expectedFailure(test_item):
+ test_item.__unittest_expecting_failure__ = True
+ return test_item
class _AssertRaisesBaseContext(object):
@@ -271,7 +286,7 @@ class TestCase(object):
not have a method with the specified name.
"""
self._testMethodName = methodName
- self._outcomeForDoCleanups = None
+ self._outcome = None
self._testMethodDoc = 'No test'
try:
testMethod = getattr(self, methodName)
@@ -284,6 +299,7 @@ class TestCase(object):
else:
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
+ self._subtest = None
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
@@ -371,44 +387,80 @@ class TestCase(object):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
- def _addSkip(self, result, reason):
+ def _addSkip(self, result, test_case, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
- addSkip(self, reason)
+ addSkip(test_case, reason)
else:
warnings.warn("TestResult has no addSkip method, skips not reported",
RuntimeWarning, 2)
+ result.addSuccess(test_case)
+
+ @contextlib.contextmanager
+ def subTest(self, msg=None, **params):
+ """Return a context manager that will return the enclosed block
+ of code in a subtest identified by the optional message and
+ keyword parameters. A failure in the subtest marks the test
+ case as failed but resumes execution at the end of the enclosed
+ block, allowing further test code to be executed.
+ """
+ if not self._outcome.result_supports_subtests:
+ yield
+ return
+ parent = self._subtest
+ if parent is None:
+ params_map = collections.ChainMap(params)
+ else:
+ params_map = parent.params.new_child(params)
+ self._subtest = _SubTest(self, msg, params_map)
+ try:
+ with self._outcome.testPartExecutor(self._subtest, isTest=True):
+ yield
+ if not self._outcome.success:
+ result = self._outcome.result
+ if result is not None and result.failfast:
+ raise _ShouldStop
+ elif self._outcome.expectedFailure:
+ # If the test is expecting a failure, we really want to
+ # stop now and register the expected failure.
+ raise _ShouldStop
+ finally:
+ self._subtest = parent
+
+ def _feedErrorsToResult(self, result, errors):
+ for test, exc_info in errors:
+ if isinstance(test, _SubTest):
+ result.addSubTest(test.test_case, test, exc_info)
+ elif exc_info is not None:
+ if issubclass(exc_info[0], self.failureException):
+ result.addFailure(test, exc_info)
+ else:
+ result.addError(test, exc_info)
+
+ def _addExpectedFailure(self, result, exc_info):
+ try:
+ addExpectedFailure = result.addExpectedFailure
+ except AttributeError:
+ warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
+ RuntimeWarning)
result.addSuccess(self)
+ else:
+ addExpectedFailure(self, exc_info)
- def _executeTestPart(self, function, outcome, isTest=False):
+ def _addUnexpectedSuccess(self, result):
try:
- function()
- except KeyboardInterrupt:
- raise
- except SkipTest as e:
- outcome.success = False
- outcome.skipped = str(e)
- except _UnexpectedSuccess:
- exc_info = sys.exc_info()
- outcome.success = False
- if isTest:
- outcome.unexpectedSuccess = exc_info
- else:
- outcome.errors.append(exc_info)
- except _ExpectedFailure:
- outcome.success = False
- exc_info = sys.exc_info()
- if isTest:
- outcome.expectedFailure = exc_info
- else:
- outcome.errors.append(exc_info)
- except self.failureException:
- outcome.success = False
- outcome.failures.append(sys.exc_info())
- exc_info = sys.exc_info()
- except:
- outcome.success = False
- outcome.errors.append(sys.exc_info())
+ addUnexpectedSuccess = result.addUnexpectedSuccess
+ except AttributeError:
+ warnings.warn("TestResult has no addUnexpectedSuccess method, reporting as failure",
+ RuntimeWarning)
+ # We need to pass an actual exception and traceback to addFailure,
+ # otherwise the legacy result can choke.
+ try:
+ raise _UnexpectedSuccess from None
+ except _UnexpectedSuccess:
+ result.addFailure(self, sys.exc_info())
+ else:
+ addUnexpectedSuccess(self)
def run(self, result=None):
orig_result = result
@@ -427,46 +479,38 @@ class TestCase(object):
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
- self._addSkip(result, skip_why)
+ self._addSkip(result, self, skip_why)
finally:
result.stopTest(self)
return
+ expecting_failure = getattr(testMethod,
+ "__unittest_expecting_failure__", False)
try:
- outcome = _Outcome()
- self._outcomeForDoCleanups = outcome
+ outcome = _Outcome(result)
+ self._outcome = outcome
- self._executeTestPart(self.setUp, outcome)
+ with outcome.testPartExecutor(self):
+ self.setUp()
if outcome.success:
- self._executeTestPart(testMethod, outcome, isTest=True)
- self._executeTestPart(self.tearDown, outcome)
+ outcome.expecting_failure = expecting_failure
+ with outcome.testPartExecutor(self, isTest=True):
+ testMethod()
+ outcome.expecting_failure = False
+ with outcome.testPartExecutor(self):
+ self.tearDown()
self.doCleanups()
+ for test, reason in outcome.skipped:
+ self._addSkip(result, test, reason)
+ self._feedErrorsToResult(result, outcome.errors)
if outcome.success:
- result.addSuccess(self)
- else:
- if outcome.skipped is not None:
- self._addSkip(result, outcome.skipped)
- for exc_info in outcome.errors:
- result.addError(self, exc_info)
- for exc_info in outcome.failures:
- result.addFailure(self, exc_info)
- if outcome.unexpectedSuccess is not None:
- addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
- if addUnexpectedSuccess is not None:
- addUnexpectedSuccess(self)
+ if expecting_failure:
+ if outcome.expectedFailure:
+ self._addExpectedFailure(result, outcome.expectedFailure)
else:
- warnings.warn("TestResult has no addUnexpectedSuccess method, reporting as failures",
- RuntimeWarning)
- result.addFailure(self, outcome.unexpectedSuccess)
-
- if outcome.expectedFailure is not None:
- addExpectedFailure = getattr(result, 'addExpectedFailure', None)
- if addExpectedFailure is not None:
- addExpectedFailure(self, outcome.expectedFailure)
- else:
- warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
- RuntimeWarning)
- result.addSuccess(self)
+ self._addUnexpectedSuccess(result)
+ else:
+ result.addSuccess(self)
return result
finally:
result.stopTest(self)
@@ -478,11 +522,11 @@ class TestCase(object):
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
- outcome = self._outcomeForDoCleanups or _Outcome()
+ outcome = self._outcome or _Outcome()
while self._cleanups:
function, args, kwargs = self._cleanups.pop()
- part = lambda: function(*args, **kwargs)
- self._executeTestPart(part, outcome)
+ with outcome.testPartExecutor(self):
+ function(*args, **kwargs)
# return this for backwards compatibility
# even though we no longer us it internally
@@ -1213,3 +1257,39 @@ class FunctionTestCase(TestCase):
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
+
+
+class _SubTest(TestCase):
+
+ def __init__(self, test_case, message, params):
+ super().__init__()
+ self._message = message
+ self.test_case = test_case
+ self.params = params
+ self.failureException = test_case.failureException
+
+ def runTest(self):
+ raise NotImplementedError("subtests cannot be run directly")
+
+ def _subDescription(self):
+ parts = []
+ if self._message:
+ parts.append("[{}]".format(self._message))
+ if self.params:
+ params_desc = ', '.join(
+ "{}={!r}".format(k, v)
+ for (k, v) in sorted(self.params.items()))
+ parts.append("({})".format(params_desc))
+ return " ".join(parts) or '(<subtest>)'
+
+ def id(self):
+ return "{} {}".format(self.test_case.id(), self._subDescription())
+
+ def shortDescription(self):
+ """Returns a one-line description of the subtest, or None if no
+ description has been provided.
+ """
+ return self.test_case.shortDescription()
+
+ def __str__(self):
+ return "{} {}".format(self.test_case, self._subDescription())
diff --git a/Lib/unittest/loader.py b/Lib/unittest/loader.py
index 541884e..25a9122 100644
--- a/Lib/unittest/loader.py
+++ b/Lib/unittest/loader.py
@@ -34,6 +34,14 @@ def _make_failed_test(classname, methodname, exception, suiteClass):
TestClass = type(classname, (case.TestCase,), attrs)
return suiteClass((TestClass(methodname),))
+def _make_skipped_test(methodname, exception, suiteClass):
+ @case.skip(str(exception))
+ def testSkipped(self):
+ pass
+ attrs = {methodname: testSkipped}
+ TestClass = type("ModuleSkipped", (case.TestCase,), attrs)
+ return suiteClass((TestClass(methodname),))
+
def _jython_aware_splitext(path):
if path.lower().endswith('$py.class'):
return path[:-9]
@@ -169,6 +177,9 @@ class TestLoader(object):
The pattern is deliberately not stored as a loader attribute so that
packages can continue discovery themselves. top_level_dir is stored so
load_tests does not need to pass this argument in to loader.discover().
+
+ Paths are sorted before being imported to ensure reproducible execution
+ order even on filesystems with non-alphabetical ordering like ext3/4.
"""
set_implicit_top = False
if top_level_dir is None and self._top_level_dir is not None:
@@ -245,7 +256,7 @@ class TestLoader(object):
def _find_tests(self, start_dir, pattern):
"""Used by discovery. Yields test suites it loads."""
- paths = os.listdir(start_dir)
+ paths = sorted(os.listdir(start_dir))
for path in paths:
full_path = os.path.join(start_dir, path)
@@ -259,6 +270,8 @@ class TestLoader(object):
name = self._get_name_from_path(full_path)
try:
module = self._get_module_from_name(name)
+ except case.SkipTest as e:
+ yield _make_skipped_test(name, e, self.suiteClass)
except:
yield _make_failed_import_test(name, self.suiteClass)
else:
@@ -291,8 +304,7 @@ class TestLoader(object):
# tests loaded from package file
yield tests
# recurse into the package
- for test in self._find_tests(full_path, pattern):
- yield test
+ yield from self._find_tests(full_path, pattern)
else:
try:
yield load_tests(self, tests, pattern)
diff --git a/Lib/unittest/main.py b/Lib/unittest/main.py
index ead6493..edb141a 100644
--- a/Lib/unittest/main.py
+++ b/Lib/unittest/main.py
@@ -164,7 +164,10 @@ class TestProgram(object):
# to support python -m unittest ...
self.module = None
else:
- self.testNames = (self.defaultTest,)
+ if isinstance(self.defaultTest, str):
+ self.testNames = (self.defaultTest,)
+ else:
+ self.testNames = list(self.defaultTest)
self.createTests()
def createTests(self):
diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py
index 57bf957..5a1db8a 100644
--- a/Lib/unittest/mock.py
+++ b/Lib/unittest/mock.py
@@ -27,7 +27,7 @@ __version__ = '1.0'
import inspect
import pprint
import sys
-from functools import wraps
+from functools import wraps, partial
BaseExceptions = (BaseException,)
@@ -66,55 +66,45 @@ DescriptorTypes = (
)
-def _getsignature(func, skipfirst, instance=False):
- if isinstance(func, type) and not instance:
+def _get_signature_object(func, as_instance, eat_self):
+ """
+ Given an arbitrary, possibly callable object, try to create a suitable
+ signature object.
+ Return a (reduced func, signature) tuple, or None.
+ """
+ if isinstance(func, type) and not as_instance:
+ # If it's a type and should be modelled as a type, use __init__.
try:
func = func.__init__
except AttributeError:
- return
- skipfirst = True
+ return None
+ # Skip the `self` argument in __init__
+ eat_self = True
elif not isinstance(func, FunctionTypes):
- # for classes where instance is True we end up here too
+ # If we really want to model an instance of the passed type,
+ # __call__ should be looked up, not __init__.
try:
func = func.__call__
except AttributeError:
- return
-
+ return None
+ if eat_self:
+ sig_func = partial(func, None)
+ else:
+ sig_func = func
try:
- argspec = inspect.getfullargspec(func)
- except TypeError:
- # C function / method, possibly inherited object().__init__
- return
-
- regargs, varargs, varkw, defaults, kwonly, kwonlydef, ann = argspec
-
-
- # instance methods and classmethods need to lose the self argument
- if getattr(func, '__self__', None) is not None:
- regargs = regargs[1:]
- if skipfirst:
- # this condition and the above one are never both True - why?
- regargs = regargs[1:]
-
- signature = inspect.formatargspec(
- regargs, varargs, varkw, defaults,
- kwonly, kwonlydef, ann, formatvalue=lambda value: "")
- return signature[1:-1], func
+ return func, inspect.signature(sig_func)
+ except ValueError:
+ # Certain callable types are not supported by inspect.signature()
+ return None
def _check_signature(func, mock, skipfirst, instance=False):
- if not _callable(func):
+ sig = _get_signature_object(func, instance, skipfirst)
+ if sig is None:
return
-
- result = _getsignature(func, skipfirst, instance)
- if result is None:
- return
- signature, func = result
-
- # can't use self because "self" is common as an argument name
- # unfortunately even not in the first place
- src = "lambda _mock_self, %s: None" % signature
- checksig = eval(src, {})
+ func, sig = sig
+ def checksig(_mock_self, *args, **kwargs):
+ sig.bind(*args, **kwargs)
_copy_func_details(func, checksig)
type(mock)._mock_check_sig = checksig
@@ -166,15 +156,12 @@ def _set_signature(mock, original, instance=False):
return
skipfirst = isinstance(original, type)
- result = _getsignature(original, skipfirst, instance)
+ result = _get_signature_object(original, instance, skipfirst)
if result is None:
- # was a C function (e.g. object().__init__ ) that can't be mocked
return
-
- signature, func = result
-
- src = "lambda %s: None" % signature
- checksig = eval(src, {})
+ func, sig = result
+ def checksig(*args, **kwargs):
+ sig.bind(*args, **kwargs)
_copy_func_details(func, checksig)
name = original.__name__
@@ -368,7 +355,7 @@ class NonCallableMock(Base):
def __init__(
self, spec=None, wraps=None, name=None, spec_set=None,
parent=None, _spec_state=None, _new_name='', _new_parent=None,
- **kwargs
+ _spec_as_instance=False, _eat_self=None, **kwargs
):
if _new_parent is None:
_new_parent = parent
@@ -382,8 +369,10 @@ class NonCallableMock(Base):
if spec_set is not None:
spec = spec_set
spec_set = True
+ if _eat_self is None:
+ _eat_self = parent is not None
- self._mock_add_spec(spec, spec_set)
+ self._mock_add_spec(spec, spec_set, _spec_as_instance, _eat_self)
__dict__['_mock_children'] = {}
__dict__['_mock_wraps'] = wraps
@@ -428,20 +417,26 @@ class NonCallableMock(Base):
self._mock_add_spec(spec, spec_set)
- def _mock_add_spec(self, spec, spec_set):
+ def _mock_add_spec(self, spec, spec_set, _spec_as_instance=False,
+ _eat_self=False):
_spec_class = None
+ _spec_signature = None
if spec is not None and not _is_list(spec):
if isinstance(spec, type):
_spec_class = spec
else:
_spec_class = _get_class(spec)
+ res = _get_signature_object(spec,
+ _spec_as_instance, _eat_self)
+ _spec_signature = res and res[1]
spec = dir(spec)
__dict__ = self.__dict__
__dict__['_spec_class'] = _spec_class
__dict__['_spec_set'] = spec_set
+ __dict__['_spec_signature'] = _spec_signature
__dict__['_mock_methods'] = spec
@@ -695,7 +690,6 @@ class NonCallableMock(Base):
self._mock_children[name] = _deleted
-
def _format_mock_call_signature(self, args, kwargs):
name = self._mock_name or 'mock'
return _format_call_signature(name, args, kwargs)
@@ -711,6 +705,28 @@ class NonCallableMock(Base):
return message % (expected_string, actual_string)
+ def _call_matcher(self, _call):
+ """
+ Given a call (or simply a (args, kwargs) tuple), return a
+ comparison key suitable for matching with other calls.
+ This is a best effort method which relies on the spec's signature,
+ if available, or falls back on the arguments themselves.
+ """
+ sig = self._spec_signature
+ if sig is not None:
+ if len(_call) == 2:
+ name = ''
+ args, kwargs = _call
+ else:
+ name, args, kwargs = _call
+ try:
+ return name, sig.bind(*args, **kwargs)
+ except TypeError as e:
+ return e.with_traceback(None)
+ else:
+ return _call
+
+
def assert_called_with(_mock_self, *args, **kwargs):
"""assert that the mock was called with the specified arguments.
@@ -721,9 +737,14 @@ class NonCallableMock(Base):
expected = self._format_mock_call_signature(args, kwargs)
raise AssertionError('Expected call: %s\nNot called' % (expected,))
- if self.call_args != (args, kwargs):
+ def _error_message():
msg = self._format_mock_failure_message(args, kwargs)
- raise AssertionError(msg)
+ return msg
+ expected = self._call_matcher((args, kwargs))
+ actual = self._call_matcher(self.call_args)
+ if expected != actual:
+ cause = expected if isinstance(expected, Exception) else None
+ raise AssertionError(_error_message()) from cause
def assert_called_once_with(_mock_self, *args, **kwargs):
@@ -747,18 +768,21 @@ class NonCallableMock(Base):
If `any_order` is True then the calls can be in any order, but
they must all appear in `mock_calls`."""
+ expected = [self._call_matcher(c) for c in calls]
+ cause = expected if isinstance(expected, Exception) else None
+ all_calls = _CallList(self._call_matcher(c) for c in self.mock_calls)
if not any_order:
- if calls not in self.mock_calls:
+ if expected not in all_calls:
raise AssertionError(
'Calls not found.\nExpected: %r\n'
'Actual: %r' % (calls, self.mock_calls)
- )
+ ) from cause
return
- all_calls = list(self.mock_calls)
+ all_calls = list(all_calls)
not_found = []
- for kall in calls:
+ for kall in expected:
try:
all_calls.remove(kall)
except ValueError:
@@ -766,7 +790,7 @@ class NonCallableMock(Base):
if not_found:
raise AssertionError(
'%r not all found in call list' % (tuple(not_found),)
- )
+ ) from cause
def assert_any_call(self, *args, **kwargs):
@@ -775,12 +799,14 @@ class NonCallableMock(Base):
The assert passes if the mock has *ever* been called, unlike
`assert_called_with` and `assert_called_once_with` that only pass if
the call is the most recent one."""
- kall = call(*args, **kwargs)
- if kall not in self.call_args_list:
+ expected = self._call_matcher((args, kwargs))
+ actual = [self._call_matcher(c) for c in self.call_args_list]
+ if expected not in actual:
+ cause = expected if isinstance(expected, Exception) else None
expected_string = self._format_mock_call_signature(args, kwargs)
raise AssertionError(
'%s call not found' % expected_string
- )
+ ) from cause
def _get_child_mock(self, **kw):
@@ -850,11 +876,12 @@ class CallableMixin(Base):
self = _mock_self
self.called = True
self.call_count += 1
- self.call_args = _Call((args, kwargs), two=True)
- self.call_args_list.append(_Call((args, kwargs), two=True))
-
_new_name = self._mock_new_name
_new_parent = self._mock_new_parent
+
+ _call = _Call((args, kwargs), two=True)
+ self.call_args = _call
+ self.call_args_list.append(_call)
self.mock_calls.append(_Call(('', args, kwargs)))
seen = set()
@@ -907,8 +934,6 @@ class CallableMixin(Base):
return result
ret_val = effect(*args, **kwargs)
- if ret_val is DEFAULT:
- ret_val = self.return_value
if (self._mock_wraps is not None and
self._mock_return_value is DEFAULT):
@@ -2028,6 +2053,8 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None,
elif spec is None:
# None we mock with a normal mock without a spec
_kwargs = {}
+ if _kwargs and instance:
+ _kwargs['_spec_as_instance'] = True
_kwargs.update(kwargs)
@@ -2094,10 +2121,12 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None,
if isinstance(spec, FunctionTypes):
parent = mock.mock
+ skipfirst = _must_skip(spec, entry, is_type)
+ kwargs['_eat_self'] = skipfirst
new = MagicMock(parent=parent, name=entry, _new_name=entry,
- _new_parent=parent, **kwargs)
+ _new_parent=parent,
+ **kwargs)
mock._mock_children[entry] = new
- skipfirst = _must_skip(spec, entry, is_type)
_check_signature(original, new, skipfirst=skipfirst)
# so functions created with _set_signature become instance attributes,
@@ -2111,6 +2140,10 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None,
def _must_skip(spec, entry, is_type):
+ """
+ Return whether we should skip the first argument on spec's `entry`
+ attribute.
+ """
if not isinstance(spec, type):
if entry in getattr(spec, '__dict__', {}):
# instance attribute - shouldn't skip
@@ -2123,7 +2156,12 @@ def _must_skip(spec, entry, is_type):
continue
if isinstance(result, (staticmethod, classmethod)):
return False
- return is_type
+ elif isinstance(getattr(result, '__get__', None), MethodWrapperTypes):
+ # Normal method => skip if looked up on type
+ # (if looked up on instance, self is already skipped)
+ return is_type
+ else:
+ return False
# shouldn't get here unless function is a dynamically provided attribute
# XXXX untested behaviour
@@ -2157,9 +2195,31 @@ FunctionTypes = (
type(ANY.__eq__),
)
+MethodWrapperTypes = (
+ type(ANY.__eq__.__get__),
+)
+
file_spec = None
+def _iterate_read_data(read_data):
+ # Helper for mock_open:
+ # Retrieve lines from read_data via a generator so that separate calls to
+ # readline, read, and readlines are properly interleaved
+ data_as_list = ['{}\n'.format(l) for l in read_data.split('\n')]
+
+ if data_as_list[-1] == '\n':
+ # If the last line ended in a newline, the list comprehension will have an
+ # extra entry that's just a newline. Remove this.
+ data_as_list = data_as_list[:-1]
+ else:
+ # If there wasn't an extra newline by itself, then the file being
+ # emulated doesn't have a newline to end the last line remove the
+ # newline that our naive format() added
+ data_as_list[-1] = data_as_list[-1][:-1]
+
+ for line in data_as_list:
+ yield line
def mock_open(mock=None, read_data=''):
"""
@@ -2170,9 +2230,27 @@ def mock_open(mock=None, read_data=''):
default) then a `MagicMock` will be created for you, with the API limited
to methods or attributes available on standard file handles.
- `read_data` is a string for the `read` method of the file handle to return.
- This is an empty string by default.
+ `read_data` is a string for the `read` methoddline`, and `readlines` of the
+ file handle to return. This is an empty string by default.
"""
+ def _readlines_side_effect(*args, **kwargs):
+ if handle.readlines.return_value is not None:
+ return handle.readlines.return_value
+ return list(_data)
+
+ def _read_side_effect(*args, **kwargs):
+ if handle.read.return_value is not None:
+ return handle.read.return_value
+ return ''.join(_data)
+
+ def _readline_side_effect():
+ if handle.readline.return_value is not None:
+ while True:
+ yield handle.readline.return_value
+ for line in _data:
+ yield line
+
+
global file_spec
if file_spec is None:
import _io
@@ -2182,9 +2260,18 @@ def mock_open(mock=None, read_data=''):
mock = MagicMock(name='open', spec=open)
handle = MagicMock(spec=file_spec)
- handle.write.return_value = None
handle.__enter__.return_value = handle
- handle.read.return_value = read_data
+
+ _data = _iterate_read_data(read_data)
+
+ handle.write.return_value = None
+ handle.read.return_value = None
+ handle.readline.return_value = None
+ handle.readlines.return_value = None
+
+ handle.read.side_effect = _read_side_effect
+ handle.readline.side_effect = _readline_side_effect()
+ handle.readlines.side_effect = _readlines_side_effect
mock.return_value = handle
return mock
diff --git a/Lib/unittest/result.py b/Lib/unittest/result.py
index 97e5426..eec4f21 100644
--- a/Lib/unittest/result.py
+++ b/Lib/unittest/result.py
@@ -121,6 +121,22 @@ class TestResult(object):
self.failures.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
+ @failfast
+ def addSubTest(self, test, subtest, err):
+ """Called at the end of a subtest.
+ 'err' is None if the subtest ended successfully, otherwise it's a
+ tuple of values as returned by sys.exc_info().
+ """
+ # By default, we don't do anything with successful subtests, but
+ # more sophisticated test results might want to record them.
+ if err is not None:
+ if issubclass(err[0], test.failureException):
+ errors = self.failures
+ else:
+ errors = self.errors
+ errors.append((test, self._exc_info_to_string(err, test)))
+ self._mirrorOutput = True
+
def addSuccess(self, test):
"Called when a test has completed successfully"
pass
diff --git a/Lib/unittest/test/support.py b/Lib/unittest/test/support.py
index dbe4ddc..02e8f3a 100644
--- a/Lib/unittest/test/support.py
+++ b/Lib/unittest/test/support.py
@@ -41,7 +41,7 @@ class TestHashing(object):
self.fail("Problem hashing %s and %s: %s" % (obj_1, obj_2, e))
-class LoggingResult(unittest.TestResult):
+class _BaseLoggingResult(unittest.TestResult):
def __init__(self, log):
self._events = log
super().__init__()
@@ -52,7 +52,7 @@ class LoggingResult(unittest.TestResult):
def startTestRun(self):
self._events.append('startTestRun')
- super(LoggingResult, self).startTestRun()
+ super().startTestRun()
def stopTest(self, test):
self._events.append('stopTest')
@@ -60,7 +60,7 @@ class LoggingResult(unittest.TestResult):
def stopTestRun(self):
self._events.append('stopTestRun')
- super(LoggingResult, self).stopTestRun()
+ super().stopTestRun()
def addFailure(self, *args):
self._events.append('addFailure')
@@ -68,7 +68,7 @@ class LoggingResult(unittest.TestResult):
def addSuccess(self, *args):
self._events.append('addSuccess')
- super(LoggingResult, self).addSuccess(*args)
+ super().addSuccess(*args)
def addError(self, *args):
self._events.append('addError')
@@ -76,15 +76,39 @@ class LoggingResult(unittest.TestResult):
def addSkip(self, *args):
self._events.append('addSkip')
- super(LoggingResult, self).addSkip(*args)
+ super().addSkip(*args)
def addExpectedFailure(self, *args):
self._events.append('addExpectedFailure')
- super(LoggingResult, self).addExpectedFailure(*args)
+ super().addExpectedFailure(*args)
def addUnexpectedSuccess(self, *args):
self._events.append('addUnexpectedSuccess')
- super(LoggingResult, self).addUnexpectedSuccess(*args)
+ super().addUnexpectedSuccess(*args)
+
+
+class LegacyLoggingResult(_BaseLoggingResult):
+ """
+ A legacy TestResult implementation, without an addSubTest method,
+ which records its method calls.
+ """
+
+ @property
+ def addSubTest(self):
+ raise AttributeError
+
+
+class LoggingResult(_BaseLoggingResult):
+ """
+ A TestResult implementation which records its method calls.
+ """
+
+ def addSubTest(self, test, subtest, err):
+ if err is None:
+ self._events.append('addSubTestSuccess')
+ else:
+ self._events.append('addSubTestFailure')
+ super().addSubTest(test, subtest, err)
class ResultWithNoStartTestRunStopTestRun(object):
diff --git a/Lib/unittest/test/test_case.py b/Lib/unittest/test/test_case.py
index fdb2e78..0d923f0 100644
--- a/Lib/unittest/test/test_case.py
+++ b/Lib/unittest/test/test_case.py
@@ -13,7 +13,7 @@ from test import support
import unittest
from .support import (
- TestEquality, TestHashing, LoggingResult,
+ TestEquality, TestHashing, LoggingResult, LegacyLoggingResult,
ResultWithNoStartTestRunStopTestRun
)
@@ -297,6 +297,98 @@ class Test_TestCase(unittest.TestCase, TestEquality, TestHashing):
Foo('test').run()
+ def _check_call_order__subtests(self, result, events, expected_events):
+ class Foo(Test.LoggingTestCase):
+ def test(self):
+ super(Foo, self).test()
+ for i in [1, 2, 3]:
+ with self.subTest(i=i):
+ if i == 1:
+ self.fail('failure')
+ for j in [2, 3]:
+ with self.subTest(j=j):
+ if i * j == 6:
+ raise RuntimeError('raised by Foo.test')
+ 1 / 0
+
+ # Order is the following:
+ # i=1 => subtest failure
+ # i=2, j=2 => subtest success
+ # i=2, j=3 => subtest error
+ # i=3, j=2 => subtest error
+ # i=3, j=3 => subtest success
+ # toplevel => error
+ Foo(events).run(result)
+ self.assertEqual(events, expected_events)
+
+ def test_run_call_order__subtests(self):
+ events = []
+ result = LoggingResult(events)
+ expected = ['startTest', 'setUp', 'test', 'tearDown',
+ 'addSubTestFailure', 'addSubTestSuccess',
+ 'addSubTestFailure', 'addSubTestFailure',
+ 'addSubTestSuccess', 'addError', 'stopTest']
+ self._check_call_order__subtests(result, events, expected)
+
+ def test_run_call_order__subtests_legacy(self):
+ # With a legacy result object (without a addSubTest method),
+ # text execution stops after the first subtest failure.
+ events = []
+ result = LegacyLoggingResult(events)
+ expected = ['startTest', 'setUp', 'test', 'tearDown',
+ 'addFailure', 'stopTest']
+ self._check_call_order__subtests(result, events, expected)
+
+ def _check_call_order__subtests_success(self, result, events, expected_events):
+ class Foo(Test.LoggingTestCase):
+ def test(self):
+ super(Foo, self).test()
+ for i in [1, 2]:
+ with self.subTest(i=i):
+ for j in [2, 3]:
+ with self.subTest(j=j):
+ pass
+
+ Foo(events).run(result)
+ self.assertEqual(events, expected_events)
+
+ def test_run_call_order__subtests_success(self):
+ events = []
+ result = LoggingResult(events)
+ # The 6 subtest successes are individually recorded, in addition
+ # to the whole test success.
+ expected = (['startTest', 'setUp', 'test', 'tearDown']
+ + 6 * ['addSubTestSuccess']
+ + ['addSuccess', 'stopTest'])
+ self._check_call_order__subtests_success(result, events, expected)
+
+ def test_run_call_order__subtests_success_legacy(self):
+ # With a legacy result, only the whole test success is recorded.
+ events = []
+ result = LegacyLoggingResult(events)
+ expected = ['startTest', 'setUp', 'test', 'tearDown',
+ 'addSuccess', 'stopTest']
+ self._check_call_order__subtests_success(result, events, expected)
+
+ def test_run_call_order__subtests_failfast(self):
+ events = []
+ result = LoggingResult(events)
+ result.failfast = True
+
+ class Foo(Test.LoggingTestCase):
+ def test(self):
+ super(Foo, self).test()
+ with self.subTest(i=1):
+ self.fail('failure')
+ with self.subTest(i=2):
+ self.fail('failure')
+ self.fail('failure')
+
+ expected = ['startTest', 'setUp', 'test', 'tearDown',
+ 'addSubTestFailure', 'stopTest']
+ Foo(events).run(result)
+ self.assertEqual(events, expected)
+
# "This class attribute gives the exception raised by the test() method.
# If a test framework needs to use a specialized exception, possibly to
# carry additional information, it must subclass this exception in
diff --git a/Lib/unittest/test/test_discovery.py b/Lib/unittest/test/test_discovery.py
index 1fdf991..eca348e 100644
--- a/Lib/unittest/test/test_discovery.py
+++ b/Lib/unittest/test/test_discovery.py
@@ -46,9 +46,9 @@ class TestDiscovery(unittest.TestCase):
def restore_isdir():
os.path.isdir = original_isdir
- path_lists = [['test1.py', 'test2.py', 'not_a_test.py', 'test_dir',
+ path_lists = [['test2.py', 'test1.py', 'not_a_test.py', 'test_dir',
'test.foo', 'test-not-a-module.py', 'another_dir'],
- ['test3.py', 'test4.py', ]]
+ ['test4.py', 'test3.py', ]]
os.listdir = lambda path: path_lists.pop(0)
self.addCleanup(restore_listdir)
@@ -70,6 +70,8 @@ class TestDiscovery(unittest.TestCase):
loader._top_level_dir = top_level
suite = list(loader._find_tests(top_level, 'test*.py'))
+ # The test suites found should be sorted alphabetically for reliable
+ # execution order.
expected = [name + ' module tests' for name in
('test1', 'test2')]
expected.extend([('test_dir.%s' % name) + ' module tests' for name in
@@ -132,6 +134,7 @@ class TestDiscovery(unittest.TestCase):
# and directly from the test_directory2 package
self.assertEqual(suite,
['load_tests', 'test_directory2' + ' module tests'])
+ # The test module paths should be sorted for reliable execution order
self.assertEqual(Module.paths, ['test_directory', 'test_directory2'])
# load_tests should have been called once with loader, tests and pattern
@@ -184,11 +187,9 @@ class TestDiscovery(unittest.TestCase):
self.assertEqual(_find_tests_args, [(start_dir, 'pattern')])
self.assertIn(top_level_dir, sys.path)
- def test_discover_with_modules_that_fail_to_import(self):
- loader = unittest.TestLoader()
-
+ def setup_import_issue_tests(self, fakefile):
listdir = os.listdir
- os.listdir = lambda _: ['test_this_does_not_exist.py']
+ os.listdir = lambda _: [fakefile]
isfile = os.path.isfile
os.path.isfile = lambda _: True
orig_sys_path = sys.path[:]
@@ -198,6 +199,11 @@ class TestDiscovery(unittest.TestCase):
sys.path[:] = orig_sys_path
self.addCleanup(restore)
+ def test_discover_with_modules_that_fail_to_import(self):
+ loader = unittest.TestLoader()
+
+ self.setup_import_issue_tests('test_this_does_not_exist.py')
+
suite = loader.discover('.')
self.assertIn(os.getcwd(), sys.path)
self.assertEqual(suite.countTestCases(), 1)
@@ -206,6 +212,22 @@ class TestDiscovery(unittest.TestCase):
with self.assertRaises(ImportError):
test.test_this_does_not_exist()
+ def test_discover_with_module_that_raises_SkipTest_on_import(self):
+ loader = unittest.TestLoader()
+
+ def _get_module_from_name(name):
+ raise unittest.SkipTest('skipperoo')
+ loader._get_module_from_name = _get_module_from_name
+
+ self.setup_import_issue_tests('test_skip_dummy.py')
+
+ suite = loader.discover('.')
+ self.assertEqual(suite.countTestCases(), 1)
+
+ result = unittest.TestResult()
+ suite.run(result)
+ self.assertEqual(len(result.skipped), 1)
+
def test_command_line_handling_parseArgs(self):
program = TestableTestProgram()
diff --git a/Lib/unittest/test/test_program.py b/Lib/unittest/test/test_program.py
index 9794868..97d7a24 100644
--- a/Lib/unittest/test/test_program.py
+++ b/Lib/unittest/test/test_program.py
@@ -64,6 +64,41 @@ class Test_TestProgram(unittest.TestCase):
return self.suiteClass(
[self.loadTestsFromTestCase(Test_TestProgram.FooBar)])
+ def loadTestsFromNames(self, names, module):
+ return self.suiteClass(
+ [self.loadTestsFromTestCase(Test_TestProgram.FooBar)])
+
+ def test_defaultTest_with_string(self):
+ class FakeRunner(object):
+ def run(self, test):
+ self.test = test
+ return True
+
+ old_argv = sys.argv
+ sys.argv = ['faketest']
+ runner = FakeRunner()
+ program = unittest.TestProgram(testRunner=runner, exit=False,
+ defaultTest='unittest.test',
+ testLoader=self.FooBarLoader())
+ sys.argv = old_argv
+ self.assertEqual(('unittest.test',), program.testNames)
+
+ def test_defaultTest_with_iterable(self):
+ class FakeRunner(object):
+ def run(self, test):
+ self.test = test
+ return True
+
+ old_argv = sys.argv
+ sys.argv = ['faketest']
+ runner = FakeRunner()
+ program = unittest.TestProgram(
+ testRunner=runner, exit=False,
+ defaultTest=['unittest.test', 'unittest.test2'],
+ testLoader=self.FooBarLoader())
+ sys.argv = old_argv
+ self.assertEqual(['unittest.test', 'unittest.test2'],
+ program.testNames)
def test_NonExit(self):
program = unittest.main(exit=False,
diff --git a/Lib/unittest/test/test_result.py b/Lib/unittest/test/test_result.py
index 1c58e61..fed1762 100644
--- a/Lib/unittest/test/test_result.py
+++ b/Lib/unittest/test/test_result.py
@@ -234,6 +234,37 @@ class Test_TestResult(unittest.TestCase):
'testGetDescriptionWithoutDocstring (' + __name__ +
'.Test_TestResult)')
+ def testGetSubTestDescriptionWithoutDocstring(self):
+ with self.subTest(foo=1, bar=2):
+ result = unittest.TextTestResult(None, True, 1)
+ self.assertEqual(
+ result.getDescription(self._subtest),
+ 'testGetSubTestDescriptionWithoutDocstring (' + __name__ +
+ '.Test_TestResult) (bar=2, foo=1)')
+ with self.subTest('some message'):
+ result = unittest.TextTestResult(None, True, 1)
+ self.assertEqual(
+ result.getDescription(self._subtest),
+ 'testGetSubTestDescriptionWithoutDocstring (' + __name__ +
+ '.Test_TestResult) [some message]')
+
+ def testGetSubTestDescriptionWithoutDocstringAndParams(self):
+ with self.subTest():
+ result = unittest.TextTestResult(None, True, 1)
+ self.assertEqual(
+ result.getDescription(self._subtest),
+ 'testGetSubTestDescriptionWithoutDocstringAndParams '
+ '(' + __name__ + '.Test_TestResult) (<subtest>)')
+
+ def testGetNestedSubTestDescriptionWithoutDocstring(self):
+ with self.subTest(foo=1):
+ with self.subTest(bar=2):
+ result = unittest.TextTestResult(None, True, 1)
+ self.assertEqual(
+ result.getDescription(self._subtest),
+ 'testGetNestedSubTestDescriptionWithoutDocstring '
+ '(' + __name__ + '.Test_TestResult) (bar=2, foo=1)')
+
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def testGetDescriptionWithOneLineDocstring(self):
@@ -247,6 +278,18 @@ class Test_TestResult(unittest.TestCase):
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
+ def testGetSubTestDescriptionWithOneLineDocstring(self):
+ """Tests getDescription() for a method with a docstring."""
+ result = unittest.TextTestResult(None, True, 1)
+ with self.subTest(foo=1, bar=2):
+ self.assertEqual(
+ result.getDescription(self._subtest),
+ ('testGetSubTestDescriptionWithOneLineDocstring '
+ '(' + __name__ + '.Test_TestResult) (bar=2, foo=1)\n'
+ 'Tests getDescription() for a method with a docstring.'))
+
+ @unittest.skipIf(sys.flags.optimize >= 2,
+ "Docstrings are omitted with -O2 and above")
def testGetDescriptionWithMultiLineDocstring(self):
"""Tests getDescription() for a method with a longer docstring.
The second line of the docstring.
@@ -259,6 +302,21 @@ class Test_TestResult(unittest.TestCase):
'Tests getDescription() for a method with a longer '
'docstring.'))
+ @unittest.skipIf(sys.flags.optimize >= 2,
+ "Docstrings are omitted with -O2 and above")
+ def testGetSubTestDescriptionWithMultiLineDocstring(self):
+ """Tests getDescription() for a method with a longer docstring.
+ The second line of the docstring.
+ """
+ result = unittest.TextTestResult(None, True, 1)
+ with self.subTest(foo=1, bar=2):
+ self.assertEqual(
+ result.getDescription(self._subtest),
+ ('testGetSubTestDescriptionWithMultiLineDocstring '
+ '(' + __name__ + '.Test_TestResult) (bar=2, foo=1)\n'
+ 'Tests getDescription() for a method with a longer '
+ 'docstring.'))
+
def testStackFrameTrimming(self):
class Frame(object):
class tb_frame(object):
diff --git a/Lib/unittest/test/test_runner.py b/Lib/unittest/test/test_runner.py
index aed1e76..8f5cd2e 100644
--- a/Lib/unittest/test/test_runner.py
+++ b/Lib/unittest/test/test_runner.py
@@ -5,6 +5,7 @@ import pickle
import subprocess
import unittest
+from unittest.case import _Outcome
from .support import LoggingResult, ResultWithNoStartTestRunStopTestRun
@@ -42,12 +43,8 @@ class TestCleanUp(unittest.TestCase):
def testNothing(self):
pass
- class MockOutcome(object):
- success = True
- errors = []
-
test = TestableTest('testNothing')
- test._outcomeForDoCleanups = MockOutcome
+ outcome = test._outcome = _Outcome()
exc1 = Exception('foo')
exc2 = Exception('bar')
@@ -61,9 +58,10 @@ class TestCleanUp(unittest.TestCase):
test.addCleanup(cleanup2)
self.assertFalse(test.doCleanups())
- self.assertFalse(MockOutcome.success)
+ self.assertFalse(outcome.success)
- (Type1, instance1, _), (Type2, instance2, _) = reversed(MockOutcome.errors)
+ ((_, (Type1, instance1, _)),
+ (_, (Type2, instance2, _))) = reversed(outcome.errors)
self.assertEqual((Type1, instance1), (Exception, exc1))
self.assertEqual((Type2, instance2), (Exception, exc2))
diff --git a/Lib/unittest/test/test_skipping.py b/Lib/unittest/test/test_skipping.py
index 952240e..3556932 100644
--- a/Lib/unittest/test/test_skipping.py
+++ b/Lib/unittest/test/test_skipping.py
@@ -29,6 +29,31 @@ class Test_TestSkipping(unittest.TestCase):
self.assertEqual(result.skipped, [(test, "testing")])
self.assertEqual(result.testsRun, 1)
+ def test_skipping_subtests(self):
+ class Foo(unittest.TestCase):
+ def test_skip_me(self):
+ with self.subTest(a=1):
+ with self.subTest(b=2):
+ self.skipTest("skip 1")
+ self.skipTest("skip 2")
+ self.skipTest("skip 3")
+ events = []
+ result = LoggingResult(events)
+ test = Foo("test_skip_me")
+ test.run(result)
+ self.assertEqual(events, ['startTest', 'addSkip', 'addSkip',
+ 'addSkip', 'stopTest'])
+ self.assertEqual(len(result.skipped), 3)
+ subtest, msg = result.skipped[0]
+ self.assertEqual(msg, "skip 1")
+ self.assertIsInstance(subtest, unittest.TestCase)
+ self.assertIsNot(subtest, test)
+ subtest, msg = result.skipped[1]
+ self.assertEqual(msg, "skip 2")
+ self.assertIsInstance(subtest, unittest.TestCase)
+ self.assertIsNot(subtest, test)
+ self.assertEqual(result.skipped[2], (test, "skip 3"))
+
def test_skipping_decorators(self):
op_table = ((unittest.skipUnless, False, True),
(unittest.skipIf, True, False))
@@ -95,6 +120,31 @@ class Test_TestSkipping(unittest.TestCase):
self.assertEqual(result.expectedFailures[0][0], test)
self.assertTrue(result.wasSuccessful())
+ def test_expected_failure_subtests(self):
+ # A failure in any subtest counts as the expected failure of the
+ # whole test.
+ class Foo(unittest.TestCase):
+ @unittest.expectedFailure
+ def test_die(self):
+ with self.subTest():
+ # This one succeeds
+ pass
+ with self.subTest():
+ self.fail("help me!")
+ with self.subTest():
+ # This one doesn't get executed
+ self.fail("shouldn't come here")
+ events = []
+ result = LoggingResult(events)
+ test = Foo("test_die")
+ test.run(result)
+ self.assertEqual(events,
+ ['startTest', 'addSubTestSuccess',
+ 'addExpectedFailure', 'stopTest'])
+ self.assertEqual(len(result.expectedFailures), 1)
+ self.assertIs(result.expectedFailures[0][0], test)
+ self.assertTrue(result.wasSuccessful())
+
def test_unexpected_success(self):
class Foo(unittest.TestCase):
@unittest.expectedFailure
@@ -110,6 +160,30 @@ class Test_TestSkipping(unittest.TestCase):
self.assertEqual(result.unexpectedSuccesses, [test])
self.assertTrue(result.wasSuccessful())
+ def test_unexpected_success_subtests(self):
+ # Success in all subtests counts as the unexpected success of
+ # the whole test.
+ class Foo(unittest.TestCase):
+ @unittest.expectedFailure
+ def test_die(self):
+ with self.subTest():
+ # This one succeeds
+ pass
+ with self.subTest():
+ # So does this one
+ pass
+ events = []
+ result = LoggingResult(events)
+ test = Foo("test_die")
+ test.run(result)
+ self.assertEqual(events,
+ ['startTest',
+ 'addSubTestSuccess', 'addSubTestSuccess',
+ 'addUnexpectedSuccess', 'stopTest'])
+ self.assertFalse(result.failures)
+ self.assertEqual(result.unexpectedSuccesses, [test])
+ self.assertTrue(result.wasSuccessful())
+
def test_skip_doesnt_run_setup(self):
class Foo(unittest.TestCase):
wasSetUp = False
diff --git a/Lib/unittest/test/testmock/testhelpers.py b/Lib/unittest/test/testmock/testhelpers.py
index 8bfb293..8b2e96d 100644
--- a/Lib/unittest/test/testmock/testhelpers.py
+++ b/Lib/unittest/test/testmock/testhelpers.py
@@ -337,9 +337,10 @@ class SpecSignatureTest(unittest.TestCase):
def test_basic(self):
- for spec in (SomeClass, SomeClass()):
- mock = create_autospec(spec)
- self._check_someclass_mock(mock)
+ mock = create_autospec(SomeClass)
+ self._check_someclass_mock(mock)
+ mock = create_autospec(SomeClass())
+ self._check_someclass_mock(mock)
def test_create_autospec_return_value(self):
@@ -576,10 +577,10 @@ class SpecSignatureTest(unittest.TestCase):
def test_spec_inheritance_for_classes(self):
class Foo(object):
- def a(self):
+ def a(self, x):
pass
class Bar(object):
- def f(self):
+ def f(self, y):
pass
class_mock = create_autospec(Foo)
@@ -587,26 +588,30 @@ class SpecSignatureTest(unittest.TestCase):
self.assertIsNot(class_mock, class_mock())
for this_mock in class_mock, class_mock():
- this_mock.a()
- this_mock.a.assert_called_with()
- self.assertRaises(TypeError, this_mock.a, 'foo')
+ this_mock.a(x=5)
+ this_mock.a.assert_called_with(x=5)
+ this_mock.a.assert_called_with(5)
+ self.assertRaises(TypeError, this_mock.a, 'foo', 'bar')
self.assertRaises(AttributeError, getattr, this_mock, 'b')
instance_mock = create_autospec(Foo())
- instance_mock.a()
- instance_mock.a.assert_called_with()
- self.assertRaises(TypeError, instance_mock.a, 'foo')
+ instance_mock.a(5)
+ instance_mock.a.assert_called_with(5)
+ instance_mock.a.assert_called_with(x=5)
+ self.assertRaises(TypeError, instance_mock.a, 'foo', 'bar')
self.assertRaises(AttributeError, getattr, instance_mock, 'b')
# The return value isn't isn't callable
self.assertRaises(TypeError, instance_mock)
- instance_mock.Bar.f()
- instance_mock.Bar.f.assert_called_with()
+ instance_mock.Bar.f(6)
+ instance_mock.Bar.f.assert_called_with(6)
+ instance_mock.Bar.f.assert_called_with(y=6)
self.assertRaises(AttributeError, getattr, instance_mock.Bar, 'g')
- instance_mock.Bar().f()
- instance_mock.Bar().f.assert_called_with()
+ instance_mock.Bar().f(6)
+ instance_mock.Bar().f.assert_called_with(6)
+ instance_mock.Bar().f.assert_called_with(y=6)
self.assertRaises(AttributeError, getattr, instance_mock.Bar(), 'g')
@@ -663,12 +668,15 @@ class SpecSignatureTest(unittest.TestCase):
self.assertRaises(TypeError, mock)
mock(1, 2)
mock.assert_called_with(1, 2)
+ mock.assert_called_with(1, b=2)
+ mock.assert_called_with(a=1, b=2)
f.f = f
mock = create_autospec(f)
self.assertRaises(TypeError, mock.f)
mock.f(3, 4)
mock.f.assert_called_with(3, 4)
+ mock.f.assert_called_with(a=3, b=4)
def test_skip_attributeerrors(self):
@@ -704,9 +712,13 @@ class SpecSignatureTest(unittest.TestCase):
self.assertRaises(TypeError, mock)
mock(1)
mock.assert_called_once_with(1)
+ mock.assert_called_once_with(a=1)
+ self.assertRaises(AssertionError, mock.assert_called_once_with, 2)
mock(4, 5)
mock.assert_called_with(4, 5)
+ mock.assert_called_with(a=4, b=5)
+ self.assertRaises(AssertionError, mock.assert_called_with, a=5, b=4)
def test_class_with_no_init(self):
@@ -719,24 +731,27 @@ class SpecSignatureTest(unittest.TestCase):
def test_signature_callable(self):
class Callable(object):
- def __init__(self):
+ def __init__(self, x, y):
pass
def __call__(self, a):
pass
mock = create_autospec(Callable)
- mock()
- mock.assert_called_once_with()
+ mock(1, 2)
+ mock.assert_called_once_with(1, 2)
+ mock.assert_called_once_with(x=1, y=2)
self.assertRaises(TypeError, mock, 'a')
- instance = mock()
+ instance = mock(1, 2)
self.assertRaises(TypeError, instance)
instance(a='a')
+ instance.assert_called_once_with('a')
instance.assert_called_once_with(a='a')
instance('a')
instance.assert_called_with('a')
+ instance.assert_called_with(a='a')
- mock = create_autospec(Callable())
+ mock = create_autospec(Callable(1, 2))
mock(a='a')
mock.assert_called_once_with(a='a')
self.assertRaises(TypeError, mock)
@@ -779,7 +794,11 @@ class SpecSignatureTest(unittest.TestCase):
pass
a = create_autospec(Foo)
+ a.f(10)
+ a.f.assert_called_with(10)
+ a.f.assert_called_with(self=10)
a.f(self=10)
+ a.f.assert_called_with(10)
a.f.assert_called_with(self=10)
diff --git a/Lib/unittest/test/testmock/testmock.py b/Lib/unittest/test/testmock/testmock.py
index 2c6f128..475a826 100644
--- a/Lib/unittest/test/testmock/testmock.py
+++ b/Lib/unittest/test/testmock/testmock.py
@@ -25,6 +25,18 @@ class Iter(object):
__next__ = next
+class Something(object):
+ def meth(self, a, b, c, d=None):
+ pass
+
+ @classmethod
+ def cmeth(cls, a, b, c, d=None):
+ pass
+
+ @staticmethod
+ def smeth(a, b, c, d=None):
+ pass
+
class MockTest(unittest.TestCase):
@@ -273,6 +285,43 @@ class MockTest(unittest.TestCase):
mock.assert_called_with(1, 2, 3, a='fish', b='nothing')
+ def test_assert_called_with_function_spec(self):
+ def f(a, b, c, d=None):
+ pass
+
+ mock = Mock(spec=f)
+
+ mock(1, b=2, c=3)
+ mock.assert_called_with(1, 2, 3)
+ mock.assert_called_with(a=1, b=2, c=3)
+ self.assertRaises(AssertionError, mock.assert_called_with,
+ 1, b=3, c=2)
+ # Expected call doesn't match the spec's signature
+ with self.assertRaises(AssertionError) as cm:
+ mock.assert_called_with(e=8)
+ self.assertIsInstance(cm.exception.__cause__, TypeError)
+
+
+ def test_assert_called_with_method_spec(self):
+ def _check(mock):
+ mock(1, b=2, c=3)
+ mock.assert_called_with(1, 2, 3)
+ mock.assert_called_with(a=1, b=2, c=3)
+ self.assertRaises(AssertionError, mock.assert_called_with,
+ 1, b=3, c=2)
+
+ mock = Mock(spec=Something().meth)
+ _check(mock)
+ mock = Mock(spec=Something.cmeth)
+ _check(mock)
+ mock = Mock(spec=Something().cmeth)
+ _check(mock)
+ mock = Mock(spec=Something.smeth)
+ _check(mock)
+ mock = Mock(spec=Something().smeth)
+ _check(mock)
+
+
def test_assert_called_once_with(self):
mock = Mock()
mock()
@@ -297,6 +346,29 @@ class MockTest(unittest.TestCase):
)
+ def test_assert_called_once_with_function_spec(self):
+ def f(a, b, c, d=None):
+ pass
+
+ mock = Mock(spec=f)
+
+ mock(1, b=2, c=3)
+ mock.assert_called_once_with(1, 2, 3)
+ mock.assert_called_once_with(a=1, b=2, c=3)
+ self.assertRaises(AssertionError, mock.assert_called_once_with,
+ 1, b=3, c=2)
+ # Expected call doesn't match the spec's signature
+ with self.assertRaises(AssertionError) as cm:
+ mock.assert_called_once_with(e=8)
+ self.assertIsInstance(cm.exception.__cause__, TypeError)
+ # Mock called more than once => always fails
+ mock(4, 5, 6)
+ self.assertRaises(AssertionError, mock.assert_called_once_with,
+ 1, 2, 3)
+ self.assertRaises(AssertionError, mock.assert_called_once_with,
+ 4, 5, 6)
+
+
def test_attribute_access_returns_mocks(self):
mock = Mock()
something = mock.something
@@ -991,6 +1063,39 @@ class MockTest(unittest.TestCase):
)
+ def test_assert_has_calls_with_function_spec(self):
+ def f(a, b, c, d=None):
+ pass
+
+ mock = Mock(spec=f)
+
+ mock(1, b=2, c=3)
+ mock(4, 5, c=6, d=7)
+ mock(10, 11, c=12)
+ calls = [
+ ('', (1, 2, 3), {}),
+ ('', (4, 5, 6), {'d': 7}),
+ ((10, 11, 12), {}),
+ ]
+ mock.assert_has_calls(calls)
+ mock.assert_has_calls(calls, any_order=True)
+ mock.assert_has_calls(calls[1:])
+ mock.assert_has_calls(calls[1:], any_order=True)
+ mock.assert_has_calls(calls[:-1])
+ mock.assert_has_calls(calls[:-1], any_order=True)
+ # Reversed order
+ calls = list(reversed(calls))
+ with self.assertRaises(AssertionError):
+ mock.assert_has_calls(calls)
+ mock.assert_has_calls(calls, any_order=True)
+ with self.assertRaises(AssertionError):
+ mock.assert_has_calls(calls[1:])
+ mock.assert_has_calls(calls[1:], any_order=True)
+ with self.assertRaises(AssertionError):
+ mock.assert_has_calls(calls[:-1])
+ mock.assert_has_calls(calls[:-1], any_order=True)
+
+
def test_assert_any_call(self):
mock = Mock()
mock(1, 2)
@@ -1017,6 +1122,26 @@ class MockTest(unittest.TestCase):
)
+ def test_assert_any_call_with_function_spec(self):
+ def f(a, b, c, d=None):
+ pass
+
+ mock = Mock(spec=f)
+
+ mock(1, b=2, c=3)
+ mock(4, 5, c=6, d=7)
+ mock.assert_any_call(1, 2, 3)
+ mock.assert_any_call(a=1, b=2, c=3)
+ mock.assert_any_call(4, 5, 6, 7)
+ mock.assert_any_call(a=4, b=5, c=6, d=7)
+ self.assertRaises(AssertionError, mock.assert_any_call,
+ 1, b=3, c=2)
+ # Expected call doesn't match the spec's signature
+ with self.assertRaises(AssertionError) as cm:
+ mock.assert_any_call(e=8)
+ self.assertIsInstance(cm.exception.__cause__, TypeError)
+
+
def test_mock_calls_create_autospec(self):
def f(a, b):
pass
diff --git a/Lib/unittest/test/testmock/testwith.py b/Lib/unittest/test/testmock/testwith.py
index 0a0cfad..f54e051 100644
--- a/Lib/unittest/test/testmock/testwith.py
+++ b/Lib/unittest/test/testmock/testwith.py
@@ -172,5 +172,88 @@ class TestMockOpen(unittest.TestCase):
self.assertEqual(result, 'foo')
+ def test_readline_data(self):
+ # Check that readline will return all the lines from the fake file
+ mock = mock_open(read_data='foo\nbar\nbaz\n')
+ with patch('%s.open' % __name__, mock, create=True):
+ h = open('bar')
+ line1 = h.readline()
+ line2 = h.readline()
+ line3 = h.readline()
+ self.assertEqual(line1, 'foo\n')
+ self.assertEqual(line2, 'bar\n')
+ self.assertEqual(line3, 'baz\n')
+
+ # Check that we properly emulate a file that doesn't end in a newline
+ mock = mock_open(read_data='foo')
+ with patch('%s.open' % __name__, mock, create=True):
+ h = open('bar')
+ result = h.readline()
+ self.assertEqual(result, 'foo')
+
+
+ def test_readlines_data(self):
+ # Test that emulating a file that ends in a newline character works
+ mock = mock_open(read_data='foo\nbar\nbaz\n')
+ with patch('%s.open' % __name__, mock, create=True):
+ h = open('bar')
+ result = h.readlines()
+ self.assertEqual(result, ['foo\n', 'bar\n', 'baz\n'])
+
+ # Test that files without a final newline will also be correctly
+ # emulated
+ mock = mock_open(read_data='foo\nbar\nbaz')
+ with patch('%s.open' % __name__, mock, create=True):
+ h = open('bar')
+ result = h.readlines()
+
+ self.assertEqual(result, ['foo\n', 'bar\n', 'baz'])
+
+
+ def test_mock_open_read_with_argument(self):
+ # At one point calling read with an argument was broken
+ # for mocks returned by mock_open
+ some_data = 'foo\nbar\nbaz'
+ mock = mock_open(read_data=some_data)
+ self.assertEqual(mock().read(10), some_data)
+
+
+ def test_interleaved_reads(self):
+ # Test that calling read, readline, and readlines pulls data
+ # sequentially from the data we preload with
+ mock = mock_open(read_data='foo\nbar\nbaz\n')
+ with patch('%s.open' % __name__, mock, create=True):
+ h = open('bar')
+ line1 = h.readline()
+ rest = h.readlines()
+ self.assertEqual(line1, 'foo\n')
+ self.assertEqual(rest, ['bar\n', 'baz\n'])
+
+ mock = mock_open(read_data='foo\nbar\nbaz\n')
+ with patch('%s.open' % __name__, mock, create=True):
+ h = open('bar')
+ line1 = h.readline()
+ rest = h.read()
+ self.assertEqual(line1, 'foo\n')
+ self.assertEqual(rest, 'bar\nbaz\n')
+
+
+ def test_overriding_return_values(self):
+ mock = mock_open(read_data='foo')
+ handle = mock()
+
+ handle.read.return_value = 'bar'
+ handle.readline.return_value = 'bar'
+ handle.readlines.return_value = ['bar']
+
+ self.assertEqual(handle.read(), 'bar')
+ self.assertEqual(handle.readline(), 'bar')
+ self.assertEqual(handle.readlines(), ['bar'])
+
+ # call repeatedly to check that a StopIteration is not propagated
+ self.assertEqual(handle.readline(), 'bar')
+ self.assertEqual(handle.readline(), 'bar')
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/urllib/error.py b/Lib/urllib/error.py
index b712ebb..45b7169 100644
--- a/Lib/urllib/error.py
+++ b/Lib/urllib/error.py
@@ -1,6 +1,6 @@
"""Exception classes raised by urllib.
-The base exception class is URLError, which inherits from IOError. It
+The base exception class is URLError, which inherits from OSError. It
doesn't define any behavior of its own, but is the base class for all
exceptions defined in this package.
@@ -17,11 +17,11 @@ __all__ = ['URLError', 'HTTPError', 'ContentTooShortError']
# do these error classes make sense?
-# make sure all of the IOError stuff is overridden. we just want to be
+# make sure all of the OSError stuff is overridden. we just want to be
# subtypes.
-class URLError(IOError):
- # URLError is a sub-type of IOError, but it doesn't share any of
+class URLError(OSError):
+ # URLError is a sub-type of OSError, but it doesn't share any of
# the implementation. need to override __init__ and __str__.
# It sets self.args for compatibility with other EnvironmentError
# subclasses, but args doesn't have the typical format with errno in
@@ -61,9 +61,13 @@ class HTTPError(URLError, urllib.response.addinfourl):
def reason(self):
return self.msg
- def info(self):
+ @property
+ def headers(self):
return self.hdrs
+ @headers.setter
+ def headers(self, headers):
+ self.hdrs = headers
# exception raised when downloaded size does not match content-length
class ContentTooShortError(URLError):
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index 5ddec5f..8b3cdf9 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -18,7 +18,7 @@ urlopen(url, data=None) -- Basic usage is the same as original
urllib. pass the url and optionally data to post to an HTTP URL, and
get a file-like object back. One difference is that you can also pass
a Request instance instead of URL. Raises a URLError (subclass of
-IOError); for HTTP errors, raises an HTTPError, which can also be
+OSError); for HTTP errors, raises an HTTPError, which can also be
treated as a valid response.
build_opener -- Function that creates a new OpenerDirector instance.
@@ -103,7 +103,8 @@ from urllib.error import URLError, HTTPError, ContentTooShortError
from urllib.parse import (
urlparse, urlsplit, urljoin, unwrap, quote, unquote,
splittype, splithost, splitport, splituser, splitpasswd,
- splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse)
+ splitattr, splitquery, splitvalue, splittag, to_bytes,
+ unquote_to_bytes, urlunparse)
from urllib.response import addinfourl, addclosehook
# check for SSL
@@ -121,7 +122,7 @@ __all__ = [
'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm',
'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler',
'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler',
- 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler',
+ 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', 'DataHandler',
'UnknownHandler', 'HTTPErrorProcessor',
# Functions
'urlopen', 'install_opener', 'build_opener',
@@ -231,7 +232,7 @@ def urlcleanup():
for temp_file in _url_tempfiles:
try:
os.unlink(temp_file)
- except EnvironmentError:
+ except OSError:
pass
del _url_tempfiles[:]
@@ -265,12 +266,13 @@ class Request:
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self.full_url = unwrap(url)
self.full_url, self.fragment = splittag(self.full_url)
- self.data = data
self.headers = {}
+ self.unredirected_hdrs = {}
+ self._data = None
+ self.data = data
self._tunnel_host = None
for key, value in headers.items():
self.add_header(key, value)
- self.unredirected_hdrs = {}
if origin_req_host is None:
origin_req_host = request_host(self)
self.origin_req_host = origin_req_host
@@ -278,6 +280,24 @@ class Request:
self.method = method
self._parse()
+ @property
+ def data(self):
+ return self._data
+
+ @data.setter
+ def data(self, data):
+ if data != self._data:
+ self._data = data
+ # issue 16464
+ # if we change data we need to remove content-length header
+ # (cause it's most probably calculated for previous value)
+ if self.has_header("Content-length"):
+ self.remove_header("Content-length")
+
+ @data.deleter
+ def data(self):
+ self.data = None
+
def _parse(self):
self.type, rest = splittype(self.full_url)
if self.type is None:
@@ -301,50 +321,6 @@ class Request:
else:
return self.full_url
- # Begin deprecated methods
-
- def add_data(self, data):
- msg = "Request.add_data method is deprecated."
- warnings.warn(msg, DeprecationWarning, stacklevel=1)
- self.data = data
-
- def has_data(self):
- msg = "Request.has_data method is deprecated."
- warnings.warn(msg, DeprecationWarning, stacklevel=1)
- return self.data is not None
-
- def get_data(self):
- msg = "Request.get_data method is deprecated."
- warnings.warn(msg, DeprecationWarning, stacklevel=1)
- return self.data
-
- def get_type(self):
- msg = "Request.get_type method is deprecated."
- warnings.warn(msg, DeprecationWarning, stacklevel=1)
- return self.type
-
- def get_host(self):
- msg = "Request.get_host method is deprecated."
- warnings.warn(msg, DeprecationWarning, stacklevel=1)
- return self.host
-
- def get_selector(self):
- msg = "Request.get_selector method is deprecated."
- warnings.warn(msg, DeprecationWarning, stacklevel=1)
- return self.selector
-
- def is_unverifiable(self):
- msg = "Request.is_unverifiable method is deprecated."
- warnings.warn(msg, DeprecationWarning, stacklevel=1)
- return self.unverifiable
-
- def get_origin_req_host(self):
- msg = "Request.get_origin_req_host method is deprecated."
- warnings.warn(msg, DeprecationWarning, stacklevel=1)
- return self.origin_req_host
-
- # End deprecated methods
-
def set_proxy(self, host, type):
if self.type == 'https' and not self._tunnel_host:
self._tunnel_host = self.host
@@ -373,6 +349,10 @@ class Request:
header_name,
self.unredirected_hdrs.get(header_name, default))
+ def remove_header(self, header_name):
+ self.headers.pop(header_name, None)
+ self.unredirected_hdrs.pop(header_name, None)
+
def header_items(self):
hdrs = self.unredirected_hdrs.copy()
hdrs.update(self.headers)
@@ -535,7 +515,8 @@ def build_opener(*handlers):
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
- FTPHandler, FileHandler, HTTPErrorProcessor]
+ FTPHandler, FileHandler, HTTPErrorProcessor,
+ DataHandler]
if hasattr(http.client, "HTTPSConnection"):
default_classes.append(HTTPSHandler)
skip = set()
@@ -1250,11 +1231,17 @@ class AbstractHTTPHandler(BaseHandler):
try:
h.request(req.get_method(), req.selector, req.data, headers)
- except socket.error as err: # timeout error
+ except OSError as err: # timeout error
h.close()
raise URLError(err)
else:
r = h.getresponse()
+ # If the server does not send us a 'Connection: close' header,
+ # HTTPConnection assumes the socket should be left open. Manually
+ # mark the socket to be closed when this response object goes away.
+ if h.sock:
+ h.sock.close()
+ h.sock = None
r.url = req.get_full_url()
# This line replaces the .msg attribute of the HTTPResponse
@@ -1449,7 +1436,7 @@ class FTPHandler(BaseHandler):
try:
host = socket.gethostbyname(host)
- except socket.error as msg:
+ except OSError as msg:
raise URLError(msg)
path, attrs = splitattr(req.selector)
dirs = path.split('/')
@@ -1535,6 +1522,36 @@ class CacheFTPHandler(FTPHandler):
self.cache.clear()
self.timeout.clear()
+class DataHandler(BaseHandler):
+ def data_open(self, req):
+ # data URLs as specified in RFC 2397.
+ #
+ # ignores POSTed data
+ #
+ # syntax:
+ # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
+ # mediatype := [ type "/" subtype ] *( ";" parameter )
+ # data := *urlchar
+ # parameter := attribute "=" value
+ url = req.full_url
+
+ scheme, data = url.split(":",1)
+ mediatype, data = data.split(",",1)
+
+ # even base64 encoded data URLs might be quoted so unquote in any case:
+ data = unquote_to_bytes(data)
+ if mediatype.endswith(";base64"):
+ data = base64.decodebytes(data)
+ mediatype = mediatype[:-7]
+
+ if not mediatype:
+ mediatype = "text/plain;charset=US-ASCII"
+
+ headers = email.message_from_string("Content-type: %s\nContent-length: %d\n" %
+ (mediatype, len(data)))
+
+ return addinfourl(io.BytesIO(data), headers, url)
+
# Code move from the old urllib module
@@ -1658,20 +1675,20 @@ class URLopener:
return getattr(self, name)(url)
else:
return getattr(self, name)(url, data)
- except HTTPError:
+ except (HTTPError, URLError):
raise
- except socket.error as msg:
- raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
+ except OSError as msg:
+ raise OSError('socket error', msg).with_traceback(sys.exc_info()[2])
def open_unknown(self, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
type, url = splittype(fullurl)
- raise IOError('url error', 'unknown url type', type)
+ raise OSError('url error', 'unknown url type', type)
def open_unknown_proxy(self, proxy, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
type, url = splittype(fullurl)
- raise IOError('url error', 'invalid proxy for %s' % type, proxy)
+ raise OSError('url error', 'invalid proxy for %s' % type, proxy)
# External interface
def retrieve(self, url, filename=None, reporthook=None, data=None):
@@ -1687,7 +1704,7 @@ class URLopener:
hdrs = fp.info()
fp.close()
return url2pathname(splithost(url1)[1]), hdrs
- except IOError as msg:
+ except OSError as msg:
pass
fp = self.open(url, data)
try:
@@ -1780,7 +1797,7 @@ class URLopener:
if proxy_bypass(realhost):
host = realhost
- if not host: raise IOError('http error', 'no host given')
+ if not host: raise OSError('http error', 'no host given')
if proxy_passwd:
proxy_passwd = unquote(proxy_passwd)
@@ -1853,7 +1870,7 @@ class URLopener:
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_default(self, url, fp, errcode, errmsg, headers):
- """Default error handler: close the connection and raise IOError."""
+ """Default error handler: close the connection and raise OSError."""
fp.close()
raise HTTPError(url, errcode, errmsg, headers, None)
@@ -1980,7 +1997,7 @@ class URLopener:
try:
[type, data] = url.split(',', 1)
except ValueError:
- raise IOError('data error', 'bad data URL')
+ raise OSError('data error', 'bad data URL')
if not type:
type = 'text/plain;charset=US-ASCII'
semi = type.rfind(';')
@@ -2426,7 +2443,7 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings):
try:
hostIP = socket.gethostbyname(hostonly)
hostIP = ip2num(hostIP)
- except socket.error:
+ except OSError:
continue
base = ip2num(m.group(1))
@@ -2512,7 +2529,7 @@ elif os.name == 'nt':
proxies['https'] = 'https://%s' % proxyServer
proxies['ftp'] = 'ftp://%s' % proxyServer
internetSettings.Close()
- except (WindowsError, ValueError, TypeError):
+ except (OSError, ValueError, TypeError):
# Either registry key not found etc, or the value in an
# unexpected format.
# proxies already set up to be empty so nothing to do
@@ -2542,7 +2559,7 @@ elif os.name == 'nt':
proxyOverride = str(winreg.QueryValueEx(internetSettings,
'ProxyOverride')[0])
# ^^^^ Returned as Unicode but problems if not converted to ASCII
- except WindowsError:
+ except OSError:
return 0
if not proxyEnable or not proxyOverride:
return 0
@@ -2553,13 +2570,13 @@ elif os.name == 'nt':
addr = socket.gethostbyname(rawHost)
if addr != rawHost:
host.append(addr)
- except socket.error:
+ except OSError:
pass
try:
fqdn = socket.getfqdn(rawHost)
if fqdn != rawHost:
host.append(fqdn)
- except socket.error:
+ except OSError:
pass
# make a check value list from the registry entry: replace the
# '<local>' string by the localhost entry and the corresponding
diff --git a/Lib/uuid.py b/Lib/uuid.py
index 0df0743..5d091b9 100644
--- a/Lib/uuid.py
+++ b/Lib/uuid.py
@@ -329,7 +329,7 @@ def _find_mac(command, args, hw_identifiers, get_index):
if words[i] in hw_identifiers:
return int(
words[get_index(i)].replace(':', ''), 16)
- except IOError:
+ except OSError:
continue
return None
@@ -371,7 +371,7 @@ def _ipconfig_getnode():
for dir in dirs:
try:
pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all')
- except IOError:
+ except OSError:
continue
else:
for line in pipe:
diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py
index 3920747..ecdb68e 100644
--- a/Lib/venv/__init__.py
+++ b/Lib/venv/__init__.py
@@ -92,6 +92,14 @@ class EnvBuilder:
self.setup_scripts(context)
self.post_setup(context)
+ def clear_directory(self, path):
+ for fn in os.listdir(path):
+ fn = os.path.join(path, fn)
+ if os.path.islink(fn) or os.path.isfile(fn):
+ os.remove(fn)
+ elif os.path.isdir(fn):
+ shutil.rmtree(fn)
+
def ensure_directories(self, env_dir):
"""
Create the directories for the environment.
@@ -103,11 +111,11 @@ class EnvBuilder:
def create_if_needed(d):
if not os.path.exists(d):
os.makedirs(d)
+ elif os.path.islink(d) or os.path.isfile(d):
+ raise ValueError('Unable to create directory %r' % d)
- if os.path.exists(env_dir) and not (self.clear or self.upgrade):
- raise ValueError('Directory exists: %s' % env_dir)
if os.path.exists(env_dir) and self.clear:
- shutil.rmtree(env_dir)
+ self.clear_directory(env_dir)
context = Context()
context.env_dir = env_dir
context.env_name = os.path.split(env_dir)[1]
@@ -378,11 +386,10 @@ def main(args=None):
'when symlinks are not the default for '
'the platform.')
parser.add_argument('--clear', default=False, action='store_true',
- dest='clear', help='Delete the environment '
- 'directory if it already '
- 'exists. If not specified and '
- 'the directory exists, an error'
- ' is raised.')
+ dest='clear', help='Delete the contents of the '
+ 'environment directory if it '
+ 'already exists, before '
+ 'environment creation.')
parser.add_argument('--upgrade', default=False, action='store_true',
dest='upgrade', help='Upgrade the environment '
'directory to use this version '
diff --git a/Lib/venv/scripts/posix/activate.csh b/Lib/venv/scripts/posix/activate.csh
new file mode 100644
index 0000000..99d79e0
--- /dev/null
+++ b/Lib/venv/scripts/posix/activate.csh
@@ -0,0 +1,37 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb@gmail.com>.
+# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
+
+# Unset irrelavent variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "__VENV_DIR__"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/__VENV_BIN_NAME__:$PATH"
+
+
+set _OLD_VIRTUAL_PROMPT="$prompt"
+
+if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
+ if ("__VENV_NAME__" != "") then
+ set env_name = "__VENV_NAME__"
+ else
+ if (`basename "VIRTUAL_ENV"` == "__") then
+ # special case for Aspen magic directories
+ # see http://www.zetadev.com/software/aspen/
+ set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
+ else
+ set env_name = `basename "$VIRTUAL_ENV"`
+ endif
+ endif
+ set prompt = "[$env_name] $prompt"
+ unset env_name
+endif
+
+alias pydoc python -m pydoc
+
+rehash
diff --git a/Lib/venv/scripts/posix/activate.fish b/Lib/venv/scripts/posix/activate.fish
new file mode 100644
index 0000000..5ac1638
--- /dev/null
+++ b/Lib/venv/scripts/posix/activate.fish
@@ -0,0 +1,74 @@
+# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
+# you cannot run it directly
+
+function deactivate -d "Exit virtualenv and return to normal shell environment"
+ # reset old environment variables
+ if test -n "$_OLD_VIRTUAL_PATH"
+ set -gx PATH $_OLD_VIRTUAL_PATH
+ set -e _OLD_VIRTUAL_PATH
+ end
+ if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+ set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
+ set -e _OLD_VIRTUAL_PYTHONHOME
+ end
+
+ if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+ functions -e fish_prompt
+ set -e _OLD_FISH_PROMPT_OVERRIDE
+ . ( begin
+ printf "function fish_prompt\n\t#"
+ functions _old_fish_prompt
+ end | psub )
+ functions -e _old_fish_prompt
+ end
+
+ set -e VIRTUAL_ENV
+ if test "$argv[1]" != "nondestructive"
+ # Self destruct!
+ functions -e deactivate
+ end
+end
+
+# unset irrelavent variables
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV "__VENV_DIR__"
+
+set -gx _OLD_VIRTUAL_PATH $PATH
+set -gx PATH "$VIRTUAL_ENV/__VENV_BIN_NAME__" $PATH
+
+# unset PYTHONHOME if set
+if set -q PYTHONHOME
+ set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+ set -e PYTHONHOME
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+ # fish uses a function instead of an env var to generate the prompt.
+
+ # save the current fish_prompt function as the function _old_fish_prompt
+ . ( begin
+ printf "function _old_fish_prompt\n\t#"
+ functions fish_prompt
+ end | psub )
+
+ # with the original prompt function renamed, we can override with our own.
+ function fish_prompt
+ # Prompt override?
+ if test -n "__VENV_NAME__"
+ printf "%s%s%s" "__VENV_NAME__" (set_color normal) (_old_fish_prompt)
+ return
+ end
+ # ...Otherwise, prepend env
+ set -l _checkbase (basename "$VIRTUAL_ENV")
+ if test $_checkbase = "__"
+ # special case for Aspen magic directories
+ # see http://www.zetadev.com/software/aspen/
+ printf "%s[%s]%s %s" (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) (_old_fish_prompt)
+ else
+ printf "%s(%s)%s%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) (_old_fish_prompt)
+ end
+ end
+
+ set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+end
diff --git a/Lib/warnings.py b/Lib/warnings.py
index edbbb5e..b05a08e 100644
--- a/Lib/warnings.py
+++ b/Lib/warnings.py
@@ -16,7 +16,7 @@ def showwarning(message, category, filename, lineno, file=None, line=None):
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
- except IOError:
+ except OSError:
pass # the file (probably stderr) is invalid - this warning gets lost.
def formatwarning(message, category, filename, lineno, line=None):
diff --git a/Lib/weakref.py b/Lib/weakref.py
index fcb6b74..8f9c107 100644
--- a/Lib/weakref.py
+++ b/Lib/weakref.py
@@ -27,7 +27,61 @@ ProxyTypes = (ProxyType, CallableProxyType)
__all__ = ["ref", "proxy", "getweakrefcount", "getweakrefs",
"WeakKeyDictionary", "ReferenceType", "ProxyType",
"CallableProxyType", "ProxyTypes", "WeakValueDictionary",
- "WeakSet"]
+ "WeakSet", "WeakMethod"]
+
+
+class WeakMethod(ref):
+ """
+ A custom `weakref.ref` subclass which simulates a weak reference to
+ a bound method, working around the lifetime problem of bound methods.
+ """
+
+ __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__"
+
+ def __new__(cls, meth, callback=None):
+ try:
+ obj = meth.__self__
+ func = meth.__func__
+ except AttributeError:
+ raise TypeError("argument should be a bound method, not {}"
+ .format(type(meth))) from None
+ def _cb(arg):
+ # The self-weakref trick is needed to avoid creating a reference
+ # cycle.
+ self = self_wr()
+ if self._alive:
+ self._alive = False
+ if callback is not None:
+ callback(self)
+ self = ref.__new__(cls, obj, _cb)
+ self._func_ref = ref(func, _cb)
+ self._meth_type = type(meth)
+ self._alive = True
+ self_wr = ref(self)
+ return self
+
+ def __call__(self):
+ obj = super().__call__()
+ func = self._func_ref()
+ if obj is None or func is None:
+ return None
+ return self._meth_type(func, obj)
+
+ def __eq__(self, other):
+ if isinstance(other, WeakMethod):
+ if not self._alive or not other._alive:
+ return self is other
+ return ref.__eq__(self, other) and self._func_ref == other._func_ref
+ return False
+
+ def __ne__(self, other):
+ if isinstance(other, WeakMethod):
+ if not self._alive or not other._alive:
+ return self is not other
+ return ref.__ne__(self, other) or self._func_ref != other._func_ref
+ return True
+
+ __hash__ = ref.__hash__
class WeakValueDictionary(collections.MutableMapping):
@@ -153,8 +207,7 @@ class WeakValueDictionary(collections.MutableMapping):
"""
with _IterationGuard(self):
- for wr in self.data.values():
- yield wr
+ yield from self.data.values()
def values(self):
with _IterationGuard(self):
diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py
index 94d4ad4..11ecce0 100644
--- a/Lib/webbrowser.py
+++ b/Lib/webbrowser.py
@@ -5,6 +5,7 @@
import io
import os
import shlex
+import shutil
import sys
import stat
import subprocess
@@ -83,7 +84,7 @@ def _synthesize(browser, update_tryorder=1):
"""
cmd = browser.split()[0]
- if not _iscommand(cmd):
+ if not shutil.which(cmd):
return [None, None]
name = os.path.basename(cmd)
try:
@@ -102,38 +103,6 @@ def _synthesize(browser, update_tryorder=1):
return [None, None]
-if sys.platform[:3] == "win":
- def _isexecutable(cmd):
- cmd = cmd.lower()
- if os.path.isfile(cmd) and cmd.endswith((".exe", ".bat")):
- return True
- for ext in ".exe", ".bat":
- if os.path.isfile(cmd + ext):
- return True
- return False
-else:
- def _isexecutable(cmd):
- if os.path.isfile(cmd):
- mode = os.stat(cmd)[stat.ST_MODE]
- if mode & stat.S_IXUSR or mode & stat.S_IXGRP or mode & stat.S_IXOTH:
- return True
- return False
-
-def _iscommand(cmd):
- """Return True if cmd is executable or can be found on the executable
- search path."""
- if _isexecutable(cmd):
- return True
- path = os.environ.get("PATH")
- if not path:
- return False
- for d in path.split(os.pathsep):
- exe = os.path.join(d, cmd)
- if _isexecutable(exe):
- return True
- return False
-
-
# General parent classes
class BaseBrowser(object):
@@ -418,11 +387,11 @@ class Grail(BaseBrowser):
# need to PING each one until we find one that's live
try:
s.connect(fn)
- except socket.error:
+ except OSError:
# no good; attempt to clean it out, but don't fail:
try:
os.unlink(fn)
- except IOError:
+ except OSError:
pass
else:
return s
@@ -453,58 +422,58 @@ class Grail(BaseBrowser):
def register_X_browsers():
# use xdg-open if around
- if _iscommand("xdg-open"):
+ if shutil.which("xdg-open"):
register("xdg-open", None, BackgroundBrowser("xdg-open"))
# The default GNOME3 browser
- if "GNOME_DESKTOP_SESSION_ID" in os.environ and _iscommand("gvfs-open"):
+ if "GNOME_DESKTOP_SESSION_ID" in os.environ and shutil.which("gvfs-open"):
register("gvfs-open", None, BackgroundBrowser("gvfs-open"))
# The default GNOME browser
- if "GNOME_DESKTOP_SESSION_ID" in os.environ and _iscommand("gnome-open"):
+ if "GNOME_DESKTOP_SESSION_ID" in os.environ and shutil.which("gnome-open"):
register("gnome-open", None, BackgroundBrowser("gnome-open"))
# The default KDE browser
- if "KDE_FULL_SESSION" in os.environ and _iscommand("kfmclient"):
+ if "KDE_FULL_SESSION" in os.environ and shutil.which("kfmclient"):
register("kfmclient", Konqueror, Konqueror("kfmclient"))
# The Mozilla/Netscape browsers
for browser in ("mozilla-firefox", "firefox",
"mozilla-firebird", "firebird",
"seamonkey", "mozilla", "netscape"):
- if _iscommand(browser):
+ if shutil.which(browser):
register(browser, None, Mozilla(browser))
# Konqueror/kfm, the KDE browser.
- if _iscommand("kfm"):
+ if shutil.which("kfm"):
register("kfm", Konqueror, Konqueror("kfm"))
- elif _iscommand("konqueror"):
+ elif shutil.which("konqueror"):
register("konqueror", Konqueror, Konqueror("konqueror"))
# Gnome's Galeon and Epiphany
for browser in ("galeon", "epiphany"):
- if _iscommand(browser):
+ if shutil.which(browser):
register(browser, None, Galeon(browser))
# Skipstone, another Gtk/Mozilla based browser
- if _iscommand("skipstone"):
+ if shutil.which("skipstone"):
register("skipstone", None, BackgroundBrowser("skipstone"))
# Google Chrome/Chromium browsers
for browser in ("google-chrome", "chrome", "chromium", "chromium-browser"):
- if _iscommand(browser):
+ if shutil.which(browser):
register(browser, None, Chrome(browser))
# Opera, quite popular
- if _iscommand("opera"):
+ if shutil.which("opera"):
register("opera", None, Opera("opera"))
# Next, Mosaic -- old but still in use.
- if _iscommand("mosaic"):
+ if shutil.which("mosaic"):
register("mosaic", None, BackgroundBrowser("mosaic"))
# Grail, the Python browser. Does anybody still use it?
- if _iscommand("grail"):
+ if shutil.which("grail"):
register("grail", Grail, None)
# Prefer X browsers if present
@@ -514,15 +483,15 @@ if os.environ.get("DISPLAY"):
# Also try console browsers
if os.environ.get("TERM"):
# The Links/elinks browsers <http://artax.karlin.mff.cuni.cz/~mikulas/links/>
- if _iscommand("links"):
+ if shutil.which("links"):
register("links", None, GenericBrowser("links"))
- if _iscommand("elinks"):
+ if shutil.which("elinks"):
register("elinks", None, Elinks("elinks"))
# The Lynx browser <http://lynx.isc.org/>, <http://lynx.browser.org/>
- if _iscommand("lynx"):
+ if shutil.which("lynx"):
register("lynx", None, GenericBrowser("lynx"))
# The w3m browser <http://w3m.sourceforge.net/>
- if _iscommand("w3m"):
+ if shutil.which("w3m"):
register("w3m", None, GenericBrowser("w3m"))
#
@@ -534,7 +503,7 @@ if sys.platform[:3] == "win":
def open(self, url, new=0, autoraise=True):
try:
os.startfile(url)
- except WindowsError:
+ except OSError:
# [Error 22] No application is associated with the specified
# file for this operation: '<URL>'
return False
@@ -552,7 +521,7 @@ if sys.platform[:3] == "win":
"Internet Explorer\\IEXPLORE.EXE")
for browser in ("firefox", "firebird", "seamonkey", "mozilla",
"netscape", "opera", iexplore):
- if _iscommand(browser):
+ if shutil.which(browser):
register(browser, None, BackgroundBrowser(browser))
#
@@ -638,17 +607,6 @@ if sys.platform == 'darwin':
register("MacOSX", None, MacOSXOSAScript('default'), -1)
-#
-# Platform support for OS/2
-#
-
-if sys.platform[:3] == "os2" and _iscommand("netscape"):
- _tryorder = []
- _browsers = {}
- register("os2netscape", None,
- GenericBrowser(["start", "netscape", "%s"]), -1)
-
-
# OK, now that we know what the default preference orders for each
# platform are, allow user to override them with the BROWSER variable.
if "BROWSER" in os.environ:
diff --git a/Lib/xml/dom/expatbuilder.py b/Lib/xml/dom/expatbuilder.py
index f074ab9..81e2df7 100644
--- a/Lib/xml/dom/expatbuilder.py
+++ b/Lib/xml/dom/expatbuilder.py
@@ -905,11 +905,8 @@ def parse(file, namespaces=True):
builder = ExpatBuilder()
if isinstance(file, str):
- fp = open(file, 'rb')
- try:
+ with open(file, 'rb') as fp:
result = builder.parseFile(fp)
- finally:
- fp.close()
else:
result = builder.parseFile(file)
return result
@@ -939,11 +936,8 @@ def parseFragment(file, context, namespaces=True):
builder = FragmentBuilder(context)
if isinstance(file, str):
- fp = open(file, 'rb')
- try:
+ with open(file, 'rb') as fp:
result = builder.parseFile(fp)
- finally:
- fp.close()
else:
result = builder.parseFile(file)
return result
diff --git a/Lib/xml/etree/ElementInclude.py b/Lib/xml/etree/ElementInclude.py
index 6cc1b44..73e491e 100644
--- a/Lib/xml/etree/ElementInclude.py
+++ b/Lib/xml/etree/ElementInclude.py
@@ -71,8 +71,8 @@ class FatalIncludeError(SyntaxError):
# @return The expanded resource. If the parse mode is "xml", this
# is an ElementTree instance. If the parse mode is "text", this
# is a Unicode string. If the loader fails, it can return None
-# or raise an IOError exception.
-# @throws IOError If the loader fails to load the resource.
+# or raise an OSError exception.
+# @throws OSError If the loader fails to load the resource.
def default_loader(href, parse, encoding=None):
if parse == "xml":
@@ -95,7 +95,7 @@ def default_loader(href, parse, encoding=None):
# that implements the same interface as <b>default_loader</b>.
# @throws FatalIncludeError If the function fails to include a given
# resource, or if the tree contains malformed XInclude elements.
-# @throws IOError If the function fails to load a given resource.
+# @throws OSError If the function fails to load a given resource.
def include(elem, loader=None):
if loader is None:
diff --git a/Lib/xml/etree/ElementPath.py b/Lib/xml/etree/ElementPath.py
index 52e65f0..bf984b9 100644
--- a/Lib/xml/etree/ElementPath.py
+++ b/Lib/xml/etree/ElementPath.py
@@ -105,14 +105,12 @@ def prepare_child(next, token):
def prepare_star(next, token):
def select(context, result):
for elem in result:
- for e in elem:
- yield e
+ yield from elem
return select
def prepare_self(next, token):
def select(context, result):
- for elem in result:
- yield elem
+ yield from result
return select
def prepare_descendant(next, token):
@@ -176,7 +174,7 @@ def prepare_predicate(next, token):
if elem.get(key) == value:
yield elem
return select
- if signature == "-" and not re.match("\d+$", predicate[0]):
+ if signature == "-" and not re.match("\-?\d+$", predicate[0]):
# [tag]
tag = predicate[0]
def select(context, result):
@@ -184,7 +182,7 @@ def prepare_predicate(next, token):
if elem.find(tag) is not None:
yield elem
return select
- if signature == "-='" and not re.match("\d+$", predicate[0]):
+ if signature == "-='" and not re.match("\-?\d+$", predicate[0]):
# [tag='value']
tag = predicate[0]
value = predicate[-1]
@@ -198,7 +196,10 @@ def prepare_predicate(next, token):
if signature == "-" or signature == "-()" or signature == "-()-":
# [index] or [last()] or [last()-index]
if signature == "-":
+ # [index]
index = int(predicate[0]) - 1
+ if index < 0:
+ raise SyntaxError("XPath position >= 1 expected")
else:
if predicate[0] != "last":
raise SyntaxError("unsupported function")
@@ -207,6 +208,8 @@ def prepare_predicate(next, token):
index = int(predicate[2]) - 1
except ValueError:
raise SyntaxError("unsupported expression")
+ if index > -2:
+ raise SyntaxError("XPath offset from last() must be negative")
else:
index = -1
def select(context, result):
diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py
index 4c73303..67e4d1d 100644
--- a/Lib/xml/etree/ElementTree.py
+++ b/Lib/xml/etree/ElementTree.py
@@ -1,23 +1,40 @@
+"""Lightweight XML support for Python.
+
+ XML is an inherently hierarchical data format, and the most natural way to
+ represent it is with a tree. This module has two classes for this purpose:
+
+ 1. ElementTree represents the whole XML document as a tree and
+
+ 2. Element represents a single node in this tree.
+
+ Interactions with the whole document (reading and writing to/from files) are
+ usually done on the ElementTree level. Interactions with a single XML element
+ and its sub-elements are done on the Element level.
+
+ Element is a flexible container object designed to store hierarchical data
+ structures in memory. It can be described as a cross between a list and a
+ dictionary. Each Element has a number of properties associated with it:
+
+ 'tag' - a string containing the element's name.
+
+ 'attributes' - a Python dictionary storing the element's attributes.
+
+ 'text' - a string containing the element's text content.
+
+ 'tail' - an optional string containing text after the element's end tag.
+
+ And a number of child elements stored in a Python sequence.
+
+ To create an element instance, use the Element constructor,
+ or the SubElement factory function.
+
+ You can also use the ElementTree class to wrap an element structure
+ and convert it to and from XML.
+
+"""
+
#
# ElementTree
-# $Id: ElementTree.py 3440 2008-07-18 14:45:01Z fredrik $
-#
-# light-weight XML support for Python 2.3 and later.
-#
-# history (since 1.2.6):
-# 2005-11-12 fl added tostringlist/fromstringlist helpers
-# 2006-07-05 fl merged in selected changes from the 1.3 sandbox
-# 2006-07-05 fl removed support for 2.1 and earlier
-# 2007-06-21 fl added deprecation/future warnings
-# 2007-08-25 fl added doctype hook, added parser version attribute etc
-# 2007-08-26 fl added new serializer code (better namespace handling, etc)
-# 2007-08-27 fl warn for broken /tag searches on tree level
-# 2007-09-02 fl added html/text methods to serializer (experimental)
-# 2007-09-05 fl added method argument to tostring/tostringlist
-# 2007-09-06 fl improved error handling
-# 2007-09-13 fl added itertext, iterfind; assorted cleanups
-# 2007-12-15 fl added C14N hooks, copy method (experimental)
-#
# Copyright (c) 1999-2008 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
@@ -75,28 +92,6 @@ __all__ = [
VERSION = "1.3.0"
-##
-# The <b>Element</b> type is a flexible container object, designed to
-# store hierarchical data structures in memory. The type can be
-# described as a cross between a list and a dictionary.
-# <p>
-# Each element has a number of properties associated with it:
-# <ul>
-# <li>a <i>tag</i>. This is a string identifying what kind of data
-# this element represents (the element type, in other words).</li>
-# <li>a number of <i>attributes</i>, stored in a Python dictionary.</li>
-# <li>a <i>text</i> string.</li>
-# <li>an optional <i>tail</i> string.</li>
-# <li>a number of <i>child elements</i>, stored in a Python sequence</li>
-# </ul>
-#
-# To create an element instance, use the {@link #Element} constructor
-# or the {@link #SubElement} factory function.
-# <p>
-# The {@link #ElementTree} class can be used to wrap an element
-# structure, and convert it from and to XML.
-##
-
import sys
import re
import warnings
@@ -106,81 +101,68 @@ import contextlib
from . import ElementPath
-##
-# Parser error. This is a subclass of <b>SyntaxError</b>.
-# <p>
-# In addition to the exception value, an exception instance contains a
-# specific exception code in the <b>code</b> attribute, and the line and
-# column of the error in the <b>position</b> attribute.
-
class ParseError(SyntaxError):
+ """An error when parsing an XML document.
+
+ In addition to its exception value, a ParseError contains
+ two extra attributes:
+ 'code' - the specific exception code
+ 'position' - the line and column of the error
+
+ """
pass
# --------------------------------------------------------------------
-##
-# Checks if an object appears to be a valid element object.
-#
-# @param An element instance.
-# @return A true value if this is an element object.
-# @defreturn flag
def iselement(element):
- # FIXME: not sure about this;
- # isinstance(element, Element) or look for tag/attrib/text attributes
+ """Return True if *element* appears to be an Element."""
return hasattr(element, 'tag')
-##
-# Element class. This class defines the Element interface, and
-# provides a reference implementation of this interface.
-# <p>
-# The element name, attribute names, and attribute values can be
-# either ASCII strings (ordinary Python strings containing only 7-bit
-# ASCII characters) or Unicode strings.
-#
-# @param tag The element name.
-# @param attrib An optional dictionary, containing element attributes.
-# @param **extra Additional attributes, given as keyword arguments.
-# @see Element
-# @see SubElement
-# @see Comment
-# @see ProcessingInstruction
class Element:
- # <tag attrib>text<child/>...</tag>tail
+ """An XML element.
- ##
- # (Attribute) Element tag.
+ This class is the reference implementation of the Element interface.
- tag = None
+ An element's length is its number of subelements. That means if you
+ you want to check if an element is truly empty, you should check BOTH
+ its length AND its text attribute.
- ##
- # (Attribute) Element attribute dictionary. Where possible, use
- # {@link #Element.get},
- # {@link #Element.set},
- # {@link #Element.keys}, and
- # {@link #Element.items} to access
- # element attributes.
+ The element tag, attribute names, and attribute values can be either
+ bytes or strings.
- attrib = None
+ *tag* is the element name. *attrib* is an optional dictionary containing
+ element attributes. *extra* are additional element attributes given as
+ keyword arguments.
+
+ Example form:
+ <tag attrib>text<child/>...</tag>tail
+
+ """
+
+ tag = None
+ """The element's name."""
- ##
- # (Attribute) Text before first subelement. This is either a
- # string or the value None. Note that if there was no text, this
- # attribute may be either None or an empty string, depending on
- # the parser.
+ attrib = None
+ """Dictionary of the element's attributes."""
text = None
+ """
+ Text before first subelement. This is either a string or the value None.
+ Note that if there is no text, this attribute may be either
+ None or the empty string, depending on the parser.
- ##
- # (Attribute) Text after this element's end tag, but before the
- # next sibling element's start tag. This is either a string or
- # the value None. Note that if there was no text, this attribute
- # may be either None or an empty string, depending on the parser.
+ """
- tail = None # text after end tag, if any
+ tail = None
+ """
+ Text after this element's end tag, but before the next sibling element's
+ start tag. This is either a string or the value None. Note that if there
+ was no text, this attribute may be either None or an empty string,
+ depending on the parser.
- # constructor
+ """
def __init__(self, tag, attrib={}, **extra):
if not isinstance(attrib, dict):
@@ -195,36 +177,30 @@ class Element:
def __repr__(self):
return "<Element %s at 0x%x>" % (repr(self.tag), id(self))
- ##
- # Creates a new element object of the same type as this element.
- #
- # @param tag Element tag.
- # @param attrib Element attributes, given as a dictionary.
- # @return A new element instance.
-
def makeelement(self, tag, attrib):
- return self.__class__(tag, attrib)
+ """Create a new element with the same type.
+
+ *tag* is a string containing the element name.
+ *attrib* is a dictionary containing the element attributes.
- ##
- # (Experimental) Copies the current element. This creates a
- # shallow copy; subelements will be shared with the original tree.
- #
- # @return A new element instance.
+ Do not call this method, use the SubElement factory function instead.
+
+ """
+ return self.__class__(tag, attrib)
def copy(self):
+ """Return copy of current element.
+
+ This creates a shallow copy. Subelements will be shared with the
+ original tree.
+
+ """
elem = self.makeelement(self.tag, self.attrib)
elem.text = self.text
elem.tail = self.tail
elem[:] = self
return elem
- ##
- # Returns the number of subelements. Note that this only counts
- # full elements; to check if there's any content in an element, you
- # have to check both the length and the <b>text</b> attribute.
- #
- # @return The number of subelements.
-
def __len__(self):
return len(self._children)
@@ -236,23 +212,9 @@ class Element:
)
return len(self._children) != 0 # emulate old behaviour, for now
- ##
- # Returns the given subelement, by index.
- #
- # @param index What subelement to return.
- # @return The given subelement.
- # @exception IndexError If the given element does not exist.
-
def __getitem__(self, index):
return self._children[index]
- ##
- # Replaces the given subelement, by index.
- #
- # @param index What subelement to replace.
- # @param element The new element value.
- # @exception IndexError If the given element does not exist.
-
def __setitem__(self, index, element):
# if isinstance(index, slice):
# for elt in element:
@@ -261,46 +223,35 @@ class Element:
# assert iselement(element)
self._children[index] = element
- ##
- # Deletes the given subelement, by index.
- #
- # @param index What subelement to delete.
- # @exception IndexError If the given element does not exist.
-
def __delitem__(self, index):
del self._children[index]
- ##
- # Adds a subelement to the end of this element. In document order,
- # the new element will appear after the last existing subelement (or
- # directly after the text, if it's the first subelement), but before
- # the end tag for this element.
- #
- # @param element The element to add.
+ def append(self, subelement):
+ """Add *subelement* to the end of this element.
- def append(self, element):
- self._assert_is_element(element)
- self._children.append(element)
+ The new element will appear in document order after the last existing
+ subelement (or directly after the text, if it's the first subelement),
+ but before the end tag for this element.
- ##
- # Appends subelements from a sequence.
- #
- # @param elements A sequence object with zero or more elements.
- # @since 1.3
+ """
+ self._assert_is_element(subelement)
+ self._children.append(subelement)
def extend(self, elements):
+ """Append subelements from a sequence.
+
+ *elements* is a sequence with zero or more elements.
+
+ """
for element in elements:
self._assert_is_element(element)
self._children.extend(elements)
- ##
- # Inserts a subelement at the given position in this element.
- #
- # @param index Where to insert the new subelement.
- def insert(self, index, element):
- self._assert_is_element(element)
- self._children.insert(index, element)
+ def insert(self, index, subelement):
+ """Insert *subelement* at position *index*."""
+ self._assert_is_element(subelement)
+ self._children.insert(index, subelement)
def _assert_is_element(self, e):
# Need to refer to the actual Python implementation, not the
@@ -308,29 +259,28 @@ class Element:
if not isinstance(e, _Element):
raise TypeError('expected an Element, not %s' % type(e).__name__)
- ##
- # Removes a matching subelement. Unlike the <b>find</b> methods,
- # this method compares elements based on identity, not on tag
- # value or contents. To remove subelements by other means, the
- # easiest way is often to use a list comprehension to select what
- # elements to keep, and use slice assignment to update the parent
- # element.
- #
- # @param element What element to remove.
- # @exception ValueError If a matching element could not be found.
-
- def remove(self, element):
- # assert iselement(element)
- self._children.remove(element)
- ##
- # (Deprecated) Returns all subelements. The elements are returned
- # in document order.
- #
- # @return A list of subelements.
- # @defreturn list of Element instances
+ def remove(self, subelement):
+ """Remove matching subelement.
+
+ Unlike the find methods, this method compares elements based on
+ identity, NOT ON tag value or contents. To remove subelements by
+ other means, the easiest way is to use a list comprehension to
+ select what elements to keep, and then use slice assignment to update
+ the parent element.
+
+ ValueError is raised if a matching element could not be found.
+
+ """
+ # assert iselement(element)
+ self._children.remove(subelement)
def getchildren(self):
+ """(Deprecated) Return all subelements.
+
+ Elements are returned in document order.
+
+ """
warnings.warn(
"This method will be removed in future versions. "
"Use 'list(elem)' or iteration over elem instead.",
@@ -338,131 +288,138 @@ class Element:
)
return self._children
- ##
- # Finds the first matching subelement, by tag name or path.
- #
- # @param path What element to look for.
- # @keyparam namespaces Optional namespace prefix map.
- # @return The first matching element, or None if no element was found.
- # @defreturn Element or None
def find(self, path, namespaces=None):
+ """Find first matching element by tag name or path.
+
+ *path* is a string having either an element tag or an XPath,
+ *namespaces* is an optional mapping from namespace prefix to full name.
+
+ Return the first matching element, or None if no element was found.
+
+ """
return ElementPath.find(self, path, namespaces)
- ##
- # Finds text for the first matching subelement, by tag name or path.
- #
- # @param path What element to look for.
- # @param default What to return if the element was not found.
- # @keyparam namespaces Optional namespace prefix map.
- # @return The text content of the first matching element, or the
- # default value no element was found. Note that if the element
- # is found, but has no text content, this method returns an
- # empty string.
- # @defreturn string
def findtext(self, path, default=None, namespaces=None):
+ """Find text for first matching element by tag name or path.
+
+ *path* is a string having either an element tag or an XPath,
+ *default* is the value to return if the element was not found,
+ *namespaces* is an optional mapping from namespace prefix to full name.
+
+ Return text content of first matching element, or default value if
+ none was found. Note that if an element is found having no text
+ content, the empty string is returned.
+
+ """
return ElementPath.findtext(self, path, default, namespaces)
- ##
- # Finds all matching subelements, by tag name or path.
- #
- # @param path What element to look for.
- # @keyparam namespaces Optional namespace prefix map.
- # @return A list or other sequence containing all matching elements,
- # in document order.
- # @defreturn list of Element instances
def findall(self, path, namespaces=None):
+ """Find all matching subelements by tag name or path.
+
+ *path* is a string having either an element tag or an XPath,
+ *namespaces* is an optional mapping from namespace prefix to full name.
+
+ Returns list containing all matching elements in document order.
+
+ """
return ElementPath.findall(self, path, namespaces)
- ##
- # Finds all matching subelements, by tag name or path.
- #
- # @param path What element to look for.
- # @keyparam namespaces Optional namespace prefix map.
- # @return An iterator or sequence containing all matching elements,
- # in document order.
- # @defreturn a generated sequence of Element instances
def iterfind(self, path, namespaces=None):
+ """Find all matching subelements by tag name or path.
+
+ *path* is a string having either an element tag or an XPath,
+ *namespaces* is an optional mapping from namespace prefix to full name.
+
+ Return an iterable yielding all matching elements in document order.
+
+ """
return ElementPath.iterfind(self, path, namespaces)
- ##
- # Resets an element. This function removes all subelements, clears
- # all attributes, and sets the <b>text</b> and <b>tail</b> attributes
- # to None.
def clear(self):
+ """Reset element.
+
+ This function removes all subelements, clears all attributes, and sets
+ the text and tail attributes to None.
+
+ """
self.attrib.clear()
self._children = []
self.text = self.tail = None
- ##
- # Gets an element attribute. Equivalent to <b>attrib.get</b>, but
- # some implementations may handle this a bit more efficiently.
- #
- # @param key What attribute to look for.
- # @param default What to return if the attribute was not found.
- # @return The attribute value, or the default value, if the
- # attribute was not found.
- # @defreturn string or None
def get(self, key, default=None):
+ """Get element attribute.
+
+ Equivalent to attrib.get, but some implementations may handle this a
+ bit more efficiently. *key* is what attribute to look for, and
+ *default* is what to return if the attribute was not found.
+
+ Returns a string containing the attribute value, or the default if
+ attribute was not found.
+
+ """
return self.attrib.get(key, default)
- ##
- # Sets an element attribute. Equivalent to <b>attrib[key] = value</b>,
- # but some implementations may handle this a bit more efficiently.
- #
- # @param key What attribute to set.
- # @param value The attribute value.
def set(self, key, value):
+ """Set element attribute.
+
+ Equivalent to attrib[key] = value, but some implementations may handle
+ this a bit more efficiently. *key* is what attribute to set, and
+ *value* is the attribute value to set it to.
+
+ """
self.attrib[key] = value
- ##
- # Gets a list of attribute names. The names are returned in an
- # arbitrary order (just like for an ordinary Python dictionary).
- # Equivalent to <b>attrib.keys()</b>.
- #
- # @return A list of element attribute names.
- # @defreturn list of strings
def keys(self):
+ """Get list of attribute names.
+
+ Names are returned in an arbitrary order, just like an ordinary
+ Python dict. Equivalent to attrib.keys()
+
+ """
return self.attrib.keys()
- ##
- # Gets element attributes, as a sequence. The attributes are
- # returned in an arbitrary order. Equivalent to <b>attrib.items()</b>.
- #
- # @return A list of (name, value) tuples for all attributes.
- # @defreturn list of (string, string) tuples
def items(self):
+ """Get element attributes as a sequence.
+
+ The attributes are returned in arbitrary order. Equivalent to
+ attrib.items().
+
+ Return a list of (name, value) tuples.
+
+ """
return self.attrib.items()
- ##
- # Creates a tree iterator. The iterator loops over this element
- # and all subelements, in document order, and returns all elements
- # with a matching tag.
- # <p>
- # If the tree structure is modified during iteration, new or removed
- # elements may or may not be included. To get a stable set, use the
- # list() function on the iterator, and loop over the resulting list.
- #
- # @param tag What tags to look for (default is to return all elements).
- # @return An iterator containing all the matching elements.
- # @defreturn iterator
def iter(self, tag=None):
+ """Create tree iterator.
+
+ The iterator loops over the element and all subelements in document
+ order, returning all elements with a matching tag.
+
+ If the tree structure is modified during iteration, new or removed
+ elements may or may not be included. To get a stable set, use the
+ list() function on the iterator, and loop over the resulting list.
+
+ *tag* is what tags to look for (default is to return all elements)
+
+ Return an iterator containing all the matching elements.
+
+ """
if tag == "*":
tag = None
if tag is None or self.tag == tag:
yield self
for e in self._children:
- for e in e.iter(tag):
- yield e
+ yield from e.iter(tag)
# compatibility
def getiterator(self, tag=None):
@@ -474,78 +431,71 @@ class Element:
)
return list(self.iter(tag))
- ##
- # Creates a text iterator. The iterator loops over this element
- # and all subelements, in document order, and returns all inner
- # text.
- #
- # @return An iterator containing all inner text.
- # @defreturn iterator
def itertext(self):
+ """Create text iterator.
+
+ The iterator loops over the element and all subelements in document
+ order, returning all inner text.
+
+ """
tag = self.tag
if not isinstance(tag, str) and tag is not None:
return
if self.text:
yield self.text
for e in self:
- for s in e.itertext():
- yield s
+ yield from e.itertext()
if e.tail:
yield e.tail
# compatibility
_Element = _ElementInterface = Element
-##
-# Subelement factory. This function creates an element instance, and
-# appends it to an existing element.
-# <p>
-# The element name, attribute names, and attribute values can be
-# either 8-bit ASCII strings or Unicode strings.
-#
-# @param parent The parent element.
-# @param tag The subelement name.
-# @param attrib An optional dictionary, containing element attributes.
-# @param **extra Additional attributes, given as keyword arguments.
-# @return An element instance.
-# @defreturn Element
def SubElement(parent, tag, attrib={}, **extra):
+ """Subelement factory which creates an element instance, and appends it
+ to an existing parent.
+
+ The element tag, attribute names, and attribute values can be either
+ bytes or Unicode strings.
+
+ *parent* is the parent element, *tag* is the subelements name, *attrib* is
+ an optional directory containing element attributes, *extra* are
+ additional attributes given as keyword arguments.
+
+ """
attrib = attrib.copy()
attrib.update(extra)
element = parent.makeelement(tag, attrib)
parent.append(element)
return element
-##
-# Comment element factory. This factory function creates a special
-# element that will be serialized as an XML comment by the standard
-# serializer.
-# <p>
-# The comment string can be either an 8-bit ASCII string or a Unicode
-# string.
-#
-# @param text A string containing the comment string.
-# @return An element instance, representing a comment.
-# @defreturn Element
def Comment(text=None):
+ """Comment element factory.
+
+ This function creates a special element which the standard serializer
+ serializes as an XML comment.
+
+ *text* is a string containing the comment string.
+
+ """
element = Element(Comment)
element.text = text
return element
-##
-# PI element factory. This factory function creates a special element
-# that will be serialized as an XML processing instruction by the standard
-# serializer.
-#
-# @param target A string containing the PI target.
-# @param text A string containing the PI contents, if any.
-# @return An element instance, representing a PI.
-# @defreturn Element
def ProcessingInstruction(target, text=None):
+ """Processing Instruction element factory.
+
+ This function creates a special element which the standard serializer
+ serializes as an XML comment.
+
+ *target* is a string containing the processing instruction, *text* is a
+ string containing the processing instruction contents, if any.
+
+ """
element = Element(ProcessingInstruction)
element.text = target
if text:
@@ -554,17 +504,21 @@ def ProcessingInstruction(target, text=None):
PI = ProcessingInstruction
-##
-# QName wrapper. This can be used to wrap a QName attribute value, in
-# order to get proper namespace handling on output.
-#
-# @param text A string containing the QName value, in the form {uri}local,
-# or, if the tag argument is given, the URI part of a QName.
-# @param tag Optional tag. If given, the first argument is interpreted as
-# an URI, and this argument is interpreted as a local name.
-# @return An opaque object, representing the QName.
class QName:
+ """Qualified name wrapper.
+
+ This class can be used to wrap a QName attribute value in order to get
+ proper namespace handing on output.
+
+ *text_or_uri* is a string containing the QName value either in the form
+ {uri}local, or if the tag argument is given, the URI part of a QName.
+
+ *tag* is an optional argument which if given, will make the first
+ argument (text_or_uri) be interpreted as a URI, and this argument (tag)
+ be interpreted as a local name.
+
+ """
def __init__(self, text_or_uri, tag=None):
if tag:
text_or_uri = "{%s}%s" % (text_or_uri, tag)
@@ -602,55 +556,49 @@ class QName:
# --------------------------------------------------------------------
-##
-# ElementTree wrapper class. This class represents an entire element
-# hierarchy, and adds some extra support for serialization to and from
-# standard XML.
-#
-# @param element Optional root element.
-# @keyparam file Optional file handle or file name. If given, the
-# tree is initialized with the contents of this XML file.
class ElementTree:
+ """An XML element hierarchy.
+
+ This class also provides support for serialization to and from
+ standard XML.
+ *element* is an optional root element node,
+ *file* is an optional file handle or file name of an XML file whose
+ contents will be used to initialize the tree with.
+
+ """
def __init__(self, element=None, file=None):
# assert element is None or iselement(element)
self._root = element # first node
if file:
self.parse(file)
- ##
- # Gets the root element for this tree.
- #
- # @return An element instance.
- # @defreturn Element
-
def getroot(self):
+ """Return root element of this tree."""
return self._root
- ##
- # Replaces the root element for this tree. This discards the
- # current contents of the tree, and replaces it with the given
- # element. Use with care.
- #
- # @param element An element instance.
-
def _setroot(self, element):
+ """Replace root element of this tree.
+
+ This will discard the current contents of the tree and replace it
+ with the given element. Use with care!
+
+ """
# assert iselement(element)
self._root = element
- ##
- # Loads an external XML document into this element tree.
- #
- # @param source A file name or file object. If a file object is
- # given, it only has to implement a <b>read(n)</b> method.
- # @keyparam parser An optional parser instance. If not given, the
- # standard {@link XMLParser} parser is used.
- # @return The document root element.
- # @defreturn Element
- # @exception ParseError If the parser fails to parse the document.
-
def parse(self, source, parser=None):
+ """Load external XML document into element tree.
+
+ *source* is a file name or file object, *parser* is an optional parser
+ instance that defaults to XMLParser.
+
+ ParseError is raised if the parser fails to parse the document.
+
+ Returns the root element of the given source document.
+
+ """
close_source = False
if not hasattr(source, "read"):
source = open(source, "rb")
@@ -669,15 +617,15 @@ class ElementTree:
if close_source:
source.close()
- ##
- # Creates a tree iterator for the root element. The iterator loops
- # over all elements in this tree, in document order.
- #
- # @param tag What tags to look for (default is to return all elements)
- # @return An iterator.
- # @defreturn iterator
-
def iter(self, tag=None):
+ """Create and return tree iterator for the root element.
+
+ The iterator loops over all elements in this tree, in document order.
+
+ *tag* is a string with the tag name to iterate over
+ (default is to return all elements).
+
+ """
# assert self._root is not None
return self._root.iter(tag)
@@ -691,15 +639,17 @@ class ElementTree:
)
return list(self.iter(tag))
- ##
- # Same as getroot().find(path), starting at the root of the tree.
- #
- # @param path What element to look for.
- # @keyparam namespaces Optional namespace prefix map.
- # @return The first matching element, or None if no element was found.
- # @defreturn Element or None
-
def find(self, path, namespaces=None):
+ """Find first matching element by tag name or path.
+
+ Same as getroot().find(path), which is Element.find()
+
+ *path* is a string having either an element tag or an XPath,
+ *namespaces* is an optional mapping from namespace prefix to full name.
+
+ Return the first matching element, or None if no element was found.
+
+ """
# assert self._root is not None
if path[:1] == "/":
path = "." + path
@@ -711,19 +661,17 @@ class ElementTree:
)
return self._root.find(path, namespaces)
- ##
- # Same as getroot().findtext(path), starting at the root of the tree.
- #
- # @param path What element to look for.
- # @param default What to return if the element was not found.
- # @keyparam namespaces Optional namespace prefix map.
- # @return The text content of the first matching element, or the
- # default value no element was found. Note that if the element
- # is found, but has no text content, this method returns an
- # empty string.
- # @defreturn string
-
def findtext(self, path, default=None, namespaces=None):
+ """Find first matching element by tag name or path.
+
+ Same as getroot().findtext(path), which is Element.findtext()
+
+ *path* is a string having either an element tag or an XPath,
+ *namespaces* is an optional mapping from namespace prefix to full name.
+
+ Return the first matching element, or None if no element was found.
+
+ """
# assert self._root is not None
if path[:1] == "/":
path = "." + path
@@ -735,16 +683,17 @@ class ElementTree:
)
return self._root.findtext(path, default, namespaces)
- ##
- # Same as getroot().findall(path), starting at the root of the tree.
- #
- # @param path What element to look for.
- # @keyparam namespaces Optional namespace prefix map.
- # @return A list or iterator containing all matching elements,
- # in document order.
- # @defreturn list of Element instances
-
def findall(self, path, namespaces=None):
+ """Find all matching subelements by tag name or path.
+
+ Same as getroot().findall(path), which is Element.findall().
+
+ *path* is a string having either an element tag or an XPath,
+ *namespaces* is an optional mapping from namespace prefix to full name.
+
+ Return list containing all matching elements in document order.
+
+ """
# assert self._root is not None
if path[:1] == "/":
path = "." + path
@@ -756,17 +705,17 @@ class ElementTree:
)
return self._root.findall(path, namespaces)
- ##
- # Finds all matching subelements, by tag name or path.
- # Same as getroot().iterfind(path).
- #
- # @param path What element to look for.
- # @keyparam namespaces Optional namespace prefix map.
- # @return An iterator or sequence containing all matching elements,
- # in document order.
- # @defreturn a generated sequence of Element instances
-
def iterfind(self, path, namespaces=None):
+ """Find all matching subelements by tag name or path.
+
+ Same as getroot().iterfind(path), which is element.iterfind()
+
+ *path* is a string having either an element tag or an XPath,
+ *namespaces* is an optional mapping from namespace prefix to full name.
+
+ Return an iterable yielding all matching elements in document order.
+
+ """
# assert self._root is not None
if path[:1] == "/":
path = "." + path
@@ -778,26 +727,35 @@ class ElementTree:
)
return self._root.iterfind(path, namespaces)
- ##
- # Writes the element tree to a file, as XML.
- #
- # @def write(file, **options)
- # @param file A file name, or a file object opened for writing.
- # @param **options Options, given as keyword arguments.
- # @keyparam encoding Optional output encoding (default is US-ASCII).
- # Use "unicode" to return a Unicode string.
- # @keyparam xml_declaration Controls if an XML declaration should
- # be added to the file. Use False for never, True for always,
- # None for only if not US-ASCII or UTF-8 or Unicode. None is default.
- # @keyparam default_namespace Sets the default XML namespace (for "xmlns").
- # @keyparam method Optional output method ("xml", "html", "text" or
- # "c14n"; default is "xml").
-
def write(self, file_or_filename,
encoding=None,
xml_declaration=None,
default_namespace=None,
- method=None):
+ method=None, *,
+ short_empty_elements=True):
+ """Write element tree to a file as XML.
+
+ Arguments:
+ *file_or_filename* -- file name or a file object opened for writing
+
+ *encoding* -- the output encoding (default: US-ASCII)
+
+ *xml_declaration* -- bool indicating if an XML declaration should be
+ added to the output. If None, an XML declaration
+ is added if encoding IS NOT either of:
+ US-ASCII, UTF-8, or Unicode
+
+ *default_namespace* -- sets the default XML namespace (for "xmlns")
+
+ *method* -- either "xml" (default), "html, "text", or "c14n"
+
+ *short_empty_elements* -- controls the formatting of elements
+ that contain no content. If True (default)
+ they are emitted as a single self-closed
+ tag, otherwise they are emitted as a pair
+ of start/end tags
+
+ """
if not method:
method = "xml"
elif method not in _serialize:
@@ -825,7 +783,8 @@ class ElementTree:
else:
qnames, namespaces = _namespaces(self._root, default_namespace)
serialize = _serialize[method]
- serialize(write, self._root, qnames, namespaces)
+ serialize(write, self._root, qnames, namespaces,
+ short_empty_elements=short_empty_elements)
def write_c14n(self, file):
# lxml.etree compatibility. use output method instead
@@ -947,7 +906,8 @@ def _namespaces(elem, default_namespace=None):
add_qname(text.text)
return qnames, namespaces
-def _serialize_xml(write, elem, qnames, namespaces):
+def _serialize_xml(write, elem, qnames, namespaces,
+ short_empty_elements, **kwargs):
tag = elem.tag
text = elem.text
if tag is Comment:
@@ -960,7 +920,8 @@ def _serialize_xml(write, elem, qnames, namespaces):
if text:
write(_escape_cdata(text))
for e in elem:
- _serialize_xml(write, e, qnames, None)
+ _serialize_xml(write, e, qnames, None,
+ short_empty_elements=short_empty_elements)
else:
write("<" + tag)
items = list(elem.items())
@@ -982,12 +943,13 @@ def _serialize_xml(write, elem, qnames, namespaces):
else:
v = _escape_attrib(v)
write(" %s=\"%s\"" % (qnames[k], v))
- if text or len(elem):
+ if text or len(elem) or not short_empty_elements:
write(">")
if text:
write(_escape_cdata(text))
for e in elem:
- _serialize_xml(write, e, qnames, None)
+ _serialize_xml(write, e, qnames, None,
+ short_empty_elements=short_empty_elements)
write("</" + tag + ">")
else:
write(" />")
@@ -1002,7 +964,7 @@ try:
except NameError:
pass
-def _serialize_html(write, elem, qnames, namespaces):
+def _serialize_html(write, elem, qnames, namespaces, **kwargs):
tag = elem.tag
text = elem.text
if tag is Comment:
@@ -1066,18 +1028,19 @@ _serialize = {
# "c14n": _serialize_c14n,
}
-##
-# Registers a namespace prefix. The registry is global, and any
-# existing mapping for either the given prefix or the namespace URI
-# will be removed.
-#
-# @param prefix Namespace prefix.
-# @param uri Namespace uri. Tags and attributes in this namespace
-# will be serialized with the given prefix, if at all possible.
-# @exception ValueError If the prefix is reserved, or is otherwise
-# invalid.
def register_namespace(prefix, uri):
+ """Register a namespace prefix.
+
+ The registry is global, and any existing mapping for either the
+ given prefix or the namespace URI will be removed.
+
+ *prefix* is the namespace prefix, *uri* is a namespace uri. Tags and
+ attributes in this namespace will be serialized with prefix if possible.
+
+ ValueError is raised if prefix is reserved or is invalid.
+
+ """
if re.match("ns\d+$", prefix):
raise ValueError("Prefix format reserved for internal use")
for k, v in list(_namespace_map.items()):
@@ -1153,40 +1116,27 @@ def _escape_attrib_html(text):
# --------------------------------------------------------------------
-##
-# Generates a string representation of an XML element, including all
-# subelements. If encoding is "unicode", the return type is a string;
-# otherwise it is a bytes array.
-#
-# @param element An Element instance.
-# @keyparam encoding Optional output encoding (default is US-ASCII).
-# Use "unicode" to return a Unicode string.
-# @keyparam method Optional output method ("xml", "html", "text" or
-# "c14n"; default is "xml").
-# @return An (optionally) encoded string containing the XML data.
-# @defreturn string
-
-def tostring(element, encoding=None, method=None):
+def tostring(element, encoding=None, method=None, *,
+ short_empty_elements=True):
+ """Generate string representation of XML element.
+
+ All subelements are included. If encoding is "unicode", a string
+ is returned. Otherwise a bytestring is returned.
+
+ *element* is an Element instance, *encoding* is an optional output
+ encoding defaulting to US-ASCII, *method* is an optional output which can
+ be one of "xml" (default), "html", "text" or "c14n".
+
+ Returns an (optionally) encoded string containing the XML data.
+
+ """
stream = io.StringIO() if encoding == 'unicode' else io.BytesIO()
- ElementTree(element).write(stream, encoding, method=method)
+ ElementTree(element).write(stream, encoding, method=method,
+ short_empty_elements=short_empty_elements)
return stream.getvalue()
-##
-# Generates a string representation of an XML element, including all
-# subelements.
-#
-# @param element An Element instance.
-# @keyparam encoding Optional output encoding (default is US-ASCII).
-# Use "unicode" to return a Unicode string.
-# @keyparam method Optional output method ("xml", "html", "text" or
-# "c14n"; default is "xml").
-# @return A sequence object containing the XML data.
-# @defreturn sequence
-# @since 1.3
-
class _ListDataStream(io.BufferedIOBase):
- """ An auxiliary stream accumulating into a list reference
- """
+ """An auxiliary stream accumulating into a list reference."""
def __init__(self, lst):
self.lst = lst
@@ -1202,22 +1152,25 @@ class _ListDataStream(io.BufferedIOBase):
def tell(self):
return len(self.lst)
-def tostringlist(element, encoding=None, method=None):
+def tostringlist(element, encoding=None, method=None, *,
+ short_empty_elements=True):
lst = []
stream = _ListDataStream(lst)
- ElementTree(element).write(stream, encoding, method=method)
+ ElementTree(element).write(stream, encoding, method=method,
+ short_empty_elements=short_empty_elements)
return lst
-##
-# Writes an element tree or element structure to sys.stdout. This
-# function should be used for debugging only.
-# <p>
-# The exact output format is implementation dependent. In this
-# version, it's written as an ordinary XML file.
-#
-# @param elem An element tree or an individual element.
def dump(elem):
+ """Write element tree or element structure to sys.stdout.
+
+ This function should be used for debugging only.
+
+ *elem* is either an ElementTree, or a single Element. The exact output
+ format is implementation dependent. In this version, it's written as an
+ ordinary XML file.
+
+ """
# debugging
if not isinstance(elem, ElementTree):
elem = ElementTree(elem)
@@ -1229,31 +1182,36 @@ def dump(elem):
# --------------------------------------------------------------------
# parsing
-##
-# Parses an XML document into an element tree.
-#
-# @param source A filename or file object containing XML data.
-# @param parser An optional parser instance. If not given, the
-# standard {@link XMLParser} parser is used.
-# @return An ElementTree instance
def parse(source, parser=None):
+ """Parse XML document into element tree.
+
+ *source* is a filename or file object containing XML data,
+ *parser* is an optional parser instance defaulting to XMLParser.
+
+ Return an ElementTree instance.
+
+ """
tree = ElementTree()
tree.parse(source, parser)
return tree
-##
-# Parses an XML document into an element tree incrementally, and reports
-# what's going on to the user.
-#
-# @param source A filename or file object containing XML data.
-# @param events A list of events to report back. If omitted, only "end"
-# events are reported.
-# @param parser An optional parser instance. If not given, the
-# standard {@link XMLParser} parser is used.
-# @return A (event, elem) iterator.
def iterparse(source, events=None, parser=None):
+ """Incrementally parse XML document into ElementTree.
+
+ This class also reports what's going on to the user based on the
+ *events* it is initialized with. The supported events are the strings
+ "start", "end", "start-ns" and "end-ns" (the "ns" events are used to get
+ detailed namespace information). If *events* is omitted, only
+ "end" events are reported.
+
+ *source* is a filename or file object containing XML data, *events* is
+ a list of events to report back, *parser* is an optional parser instance.
+
+ Returns an iterator providing (event, elem) pairs.
+
+ """
close_source = False
if not hasattr(source, "read"):
source = open(source, "rb")
@@ -1340,33 +1298,34 @@ class _IterParseIterator:
def __iter__(self):
return self
-##
-# Parses an XML document from a string constant. This function can
-# be used to embed "XML literals" in Python code.
-#
-# @param source A string containing XML data.
-# @param parser An optional parser instance. If not given, the
-# standard {@link XMLParser} parser is used.
-# @return An Element instance.
-# @defreturn Element
def XML(text, parser=None):
+ """Parse XML document from string constant.
+
+ This function can be used to embed "XML Literals" in Python code.
+
+ *text* is a string containing XML data, *parser* is an
+ optional parser instance, defaulting to the standard XMLParser.
+
+ Returns an Element instance.
+
+ """
if not parser:
parser = XMLParser(target=TreeBuilder())
parser.feed(text)
return parser.close()
-##
-# Parses an XML document from a string constant, and also returns
-# a dictionary which maps from element id:s to elements.
-#
-# @param source A string containing XML data.
-# @param parser An optional parser instance. If not given, the
-# standard {@link XMLParser} parser is used.
-# @return A tuple containing an Element instance and a dictionary.
-# @defreturn (Element, dictionary)
def XMLID(text, parser=None):
+ """Parse XML document from string constant for its IDs.
+
+ *text* is a string containing XML data, *parser* is an
+ optional parser instance, defaulting to the standard XMLParser.
+
+ Returns an (Element, dict) tuple, in which the
+ dict maps element id:s to elements.
+
+ """
if not parser:
parser = XMLParser(target=TreeBuilder())
parser.feed(text)
@@ -1378,27 +1337,18 @@ def XMLID(text, parser=None):
ids[id] = elem
return tree, ids
-##
-# Parses an XML document from a string constant. Same as {@link #XML}.
-#
-# @def fromstring(text)
-# @param source A string containing XML data.
-# @return An Element instance.
-# @defreturn Element
-
fromstring = XML
-
-##
-# Parses an XML document from a sequence of string fragments.
-#
-# @param sequence A list or other sequence containing XML data fragments.
-# @param parser An optional parser instance. If not given, the
-# standard {@link XMLParser} parser is used.
-# @return An Element instance.
-# @defreturn Element
-# @since 1.3
+"""Parse XML document from string constant. Alias for XML()."""
def fromstringlist(sequence, parser=None):
+ """Parse XML document from sequence of string fragments.
+
+ *sequence* is a list of other sequence, *parser* is an optional parser
+ instance, defaulting to the standard XMLParser.
+
+ Returns an Element instance.
+
+ """
if not parser:
parser = XMLParser(target=TreeBuilder())
for text in sequence:
@@ -1407,19 +1357,20 @@ def fromstringlist(sequence, parser=None):
# --------------------------------------------------------------------
-##
-# Generic element structure builder. This builder converts a sequence
-# of {@link #TreeBuilder.start}, {@link #TreeBuilder.data}, and {@link
-# #TreeBuilder.end} method calls to a well-formed element structure.
-# <p>
-# You can use this class to build an element structure using a custom XML
-# parser, or a parser for some other XML-like format.
-#
-# @param element_factory Optional element factory. This factory
-# is called to create new Element instances, as necessary.
class TreeBuilder:
+ """Generic element structure builder.
+
+ This builder converts a sequence of start, data, and end method
+ calls to a well-formed element structure.
+
+ You can use this class to build an element structure using a custom XML
+ parser, or a parser for some other XML-like format.
+ *element_factory* is an optional element factory which is called
+ to create new Element instances, as necessary.
+
+ """
def __init__(self, element_factory=None):
self._data = [] # data collector
self._elem = [] # element stack
@@ -1429,14 +1380,8 @@ class TreeBuilder:
element_factory = Element
self._factory = element_factory
- ##
- # Flushes the builder buffers, and returns the toplevel document
- # element.
- #
- # @return An Element instance.
- # @defreturn Element
-
def close(self):
+ """Flush builder buffers and return toplevel document Element."""
assert len(self._elem) == 0, "missing end tags"
assert self._last is not None, "missing toplevel element"
return self._last
@@ -1453,24 +1398,19 @@ class TreeBuilder:
self._last.text = text
self._data = []
- ##
- # Adds text to the current element.
- #
- # @param data A string. This should be either an 8-bit string
- # containing ASCII text, or a Unicode string.
def data(self, data):
+ """Add text to current element."""
self._data.append(data)
- ##
- # Opens a new element.
- #
- # @param tag The element name.
- # @param attrib A dictionary containing element attributes.
- # @return The opened element.
- # @defreturn Element
def start(self, tag, attrs):
+ """Open new element and return it.
+
+ *tag* is the element name, *attrs* is a dict containing element
+ attributes.
+
+ """
self._flush()
self._last = elem = self._factory(tag, attrs)
if self._elem:
@@ -1479,14 +1419,13 @@ class TreeBuilder:
self._tail = 0
return elem
- ##
- # Closes the current element.
- #
- # @param tag The element name.
- # @return The closed element.
- # @defreturn Element
def end(self, tag):
+ """Close and return current Element.
+
+ *tag* is the element name.
+
+ """
self._flush()
self._last = self._elem.pop()
assert self._last.tag == tag,\
@@ -1495,20 +1434,18 @@ class TreeBuilder:
self._tail = 1
return self._last
-##
-# Element structure builder for XML source data, based on the
-# <b>expat</b> parser.
-#
-# @keyparam target Target object. If omitted, the builder uses an
-# instance of the standard {@link #TreeBuilder} class.
-# @keyparam html Predefine HTML entities. This flag is not supported
-# by the current implementation.
-# @keyparam encoding Optional encoding. If given, the value overrides
-# the encoding specified in the XML file.
-# @see #ElementTree
-# @see #TreeBuilder
+# also see ElementTree and TreeBuilder
class XMLParser:
+ """Element structure builder for XML source data based on the expat parser.
+
+ *html* are predefined HTML entities (not supported currently),
+ *target* is an optional target object which defaults to an instance of the
+ standard TreeBuilder class, *encoding* is an optional encoding string
+ which if given, overrides the encoding specified in the XML file:
+ http://www.iana.org/assignments/character-sets
+
+ """
def __init__(self, html=0, target=None, encoding=None):
try:
@@ -1650,15 +1587,13 @@ class XMLParser:
self.doctype(name, pubid, system[1:-1])
self._doctype = None
- ##
- # (Deprecated) Handles a doctype declaration.
- #
- # @param name Doctype name.
- # @param pubid Public identifier.
- # @param system System identifier.
-
def doctype(self, name, pubid, system):
- """This method of XMLParser is deprecated."""
+ """(Deprecated) Handle doctype declaration
+
+ *name* is the Doctype name, *pubid* is the public identifier,
+ and *system* is the system identifier.
+
+ """
warnings.warn(
"This method of XMLParser is deprecated. Define doctype() "
"method on the TreeBuilder target.",
@@ -1668,24 +1603,15 @@ class XMLParser:
# sentinel, if doctype is redefined in a subclass
__doctype = doctype
- ##
- # Feeds data to the parser.
- #
- # @param data Encoded data.
-
def feed(self, data):
+ """Feed encoded data to parser."""
try:
self.parser.Parse(data, 0)
except self._error as v:
self._raiseerror(v)
- ##
- # Finishes feeding data to the parser.
- #
- # @return An element structure.
- # @defreturn Element
-
def close(self):
+ """Finish feeding data to parser and return element structure."""
try:
self.parser.Parse("", 1) # end of data
except self._error as v:
@@ -1712,7 +1638,9 @@ else:
# Overwrite 'ElementTree.parse' and 'iterparse' to use the C XMLParser
class ElementTree(ElementTree):
+ __doc__ = ElementTree.__doc__
def parse(self, source, parser=None):
+ __doc__ = ElementTree.parse.__doc__
close_source = False
if not hasattr(source, 'read'):
source = open(source, 'rb')
@@ -1734,25 +1662,14 @@ else:
source.close()
class iterparse:
- """Parses an XML section into an element tree incrementally.
-
- Reports what’s going on to the user. 'source' is a filename or file
- object containing XML data. 'events' is a list of events to report back.
- The supported events are the strings "start", "end", "start-ns" and
- "end-ns" (the "ns" events are used to get detailed namespace
- information). If 'events' is omitted, only "end" events are reported.
- 'parser' is an optional parser instance. If not given, the standard
- XMLParser parser is used. Returns an iterator providing
- (event, elem) pairs.
- """
-
+ __doc__ = iterparse.__doc__
root = None
- def __init__(self, file, events=None, parser=None):
+ def __init__(self, source, events=None, parser=None):
self._close_file = False
- if not hasattr(file, 'read'):
- file = open(file, 'rb')
+ if not hasattr(source, 'read'):
+ source = open(source, 'rb')
self._close_file = True
- self._file = file
+ self._file = source
self._events = []
self._index = 0
self._error = None
diff --git a/Lib/xmlrpc/client.py b/Lib/xmlrpc/client.py
index d7b2db3..ff42265 100644
--- a/Lib/xmlrpc/client.py
+++ b/Lib/xmlrpc/client.py
@@ -1045,7 +1045,7 @@ def gzip_decode(data):
gzf = gzip.GzipFile(mode="rb", fileobj=f)
try:
decoded = gzf.read()
- except IOError:
+ except OSError:
raise ValueError("invalid data")
f.close()
gzf.close()
@@ -1130,8 +1130,9 @@ class Transport:
for i in (0, 1):
try:
return self.single_request(host, handler, request_body, verbose)
- except socket.error as e:
- if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE):
+ except OSError as e:
+ if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED,
+ errno.EPIPE):
raise
except http.client.BadStatusLine: #close after we sent request
if i:
@@ -1385,7 +1386,7 @@ class ServerProxy:
# get the url
type, uri = urllib.parse.splittype(uri)
if type not in ("http", "https"):
- raise IOError("unsupported XML-RPC protocol")
+ raise OSError("unsupported XML-RPC protocol")
self.__host, self.__handler = urllib.parse.splithost(uri)
if not self.__handler:
self.__handler = "/RPC2"
diff --git a/Lib/zipfile.py b/Lib/zipfile.py
index 3448c61..b90af55 100644
--- a/Lib/zipfile.py
+++ b/Lib/zipfile.py
@@ -164,7 +164,7 @@ def _check_zipfile(fp):
try:
if _EndRecData(fp):
return True # file has correct magic number
- except IOError:
+ except OSError:
pass
return False
@@ -180,7 +180,7 @@ def is_zipfile(filename):
else:
with open(filename, "rb") as fp:
result = _check_zipfile(fp)
- except IOError:
+ except OSError:
pass
return result
@@ -190,7 +190,7 @@ def _EndRecData64(fpin, offset, endrec):
"""
try:
fpin.seek(offset - sizeEndCentDir64Locator, 2)
- except IOError:
+ except OSError:
# If the seek fails, the file is not large enough to contain a ZIP64
# end-of-archive record, so just return the end record we were given.
return endrec
@@ -242,7 +242,7 @@ def _EndRecData(fpin):
# file if this is the case).
try:
fpin.seek(-sizeEndCentDir, 2)
- except IOError:
+ except OSError:
return None
data = fpin.read()
if (len(data) == sizeEndCentDir and
@@ -910,7 +910,7 @@ class ZipFile:
modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'}
try:
self.fp = io.open(file, modeDict[mode])
- except IOError:
+ except OSError:
if mode == 'a':
mode = key = 'w'
self.fp = io.open(file, modeDict[mode])
@@ -961,7 +961,7 @@ class ZipFile:
fp = self.fp
try:
endrec = _EndRecData(fp)
- except IOError:
+ except OSError:
raise BadZipFile("File is not a zip file")
if not endrec:
raise BadZipFile("File is not a zip file")