summaryrefslogtreecommitdiffstats
path: root/Modules/expat/pyexpatns.h
Commit message (Expand)AuthorAgeFilesLines
* Merged revisions 46753-51188 via svnmerge fromThomas Wouters2006-08-111-0/+124
orm>
Diffstat
-rw-r--r--Doc/c-api/object.rst9
-rw-r--r--Doc/faq/library.rst8
-rw-r--r--Doc/howto/logging-cookbook.rst4
-rw-r--r--Doc/library/2to3.rst4
-rw-r--r--Doc/library/abc.rst21
-rw-r--r--Doc/library/argparse.rst20
-rw-r--r--Doc/library/doctest.rst10
-rw-r--r--Doc/library/exceptions.rst6
-rw-r--r--Doc/library/functions.rst4
-rw-r--r--Doc/library/gc.rst18
-rw-r--r--Doc/library/hashlib.rst10
-rw-r--r--Doc/library/http.server.rst3
-rw-r--r--Doc/library/importlib.rst155
-rw-r--r--Doc/library/json.rst31
-rw-r--r--Doc/library/logging.config.rst51
-rw-r--r--Doc/library/nntplib.rst3
-rw-r--r--Doc/library/operator.rst8
-rw-r--r--Doc/library/os.path.rst1
-rw-r--r--Doc/library/os.rst2
-rw-r--r--Doc/library/poplib.rst27
-rw-r--r--Doc/library/pty.rst3
-rw-r--r--Doc/library/select.rst5
-rw-r--r--Doc/library/shutil.rst22
-rw-r--r--Doc/library/sys.rst18
-rw-r--r--Doc/library/sysconfig.rst2
-rw-r--r--Doc/library/timeit.rst2
-rw-r--r--Doc/library/tkinter.rst26
-rw-r--r--Doc/library/traceback.rst2
-rw-r--r--Doc/library/unicodedata.rst4
-rw-r--r--Doc/library/urllib.error.rst7
-rw-r--r--Doc/library/urllib.request.rst38
-rw-r--r--Doc/library/venv.rst5
-rw-r--r--Doc/library/weakref.rst29
-rw-r--r--Doc/license.rst21
-rw-r--r--Doc/reference/datamodel.rst9
-rw-r--r--Doc/tools/sphinxext/indexsidebar.html2
-rw-r--r--Doc/tools/sphinxext/pyspecific.py2
-rw-r--r--Doc/tutorial/interpreter.rst14
-rw-r--r--Doc/tutorial/modules.rst35
-rw-r--r--Doc/tutorial/stdlib.rst2
-rw-r--r--Doc/tutorial/stdlib2.rst2
-rw-r--r--Doc/using/venv-create.inc29
-rw-r--r--Doc/whatsnew/3.4.rst209
-rw-r--r--Doc/whatsnew/index.rst1
-rw-r--r--Include/Python-ast.h12
-rw-r--r--Include/abstract.h3
-rw-r--r--Include/asdl.h1
-rw-r--r--Include/grammar.h2
-rw-r--r--Include/methodobject.h2
-rw-r--r--Include/object.h41
-rw-r--r--Include/objimpl.h4
-rw-r--r--Include/osdefs.h8
-rw-r--r--Include/patchlevel.h6
-rw-r--r--Include/pyport.h17
-rw-r--r--Include/sliceobject.h3
-rw-r--r--Include/symtable.h1
-rw-r--r--Include/token.h2
-rw-r--r--Include/unicodeobject.h29
-rw-r--r--LICENSE1
-rw-r--r--Lib/_osx_support.py2
-rw-r--r--Lib/_pyio.py2
-rw-r--r--Lib/abc.py6
-rw-r--r--Lib/argparse.py19
-rw-r--r--Lib/ast.py6
-rw-r--r--Lib/asynchat.py4
-rw-r--r--Lib/asyncore.py18
-rw-r--r--Lib/bz2.py44
-rwxr-xr-xLib/cgi.py4
-rw-r--r--Lib/collections/__init__.py3
-rw-r--r--Lib/collections/abc.py3
-rw-r--r--Lib/compileall.py4
-rw-r--r--Lib/concurrent/futures/_base.py3
-rw-r--r--Lib/concurrent/futures/process.py6
-rw-r--r--Lib/concurrent/futures/thread.py2
-rw-r--r--Lib/ctypes/__init__.py6
-rw-r--r--Lib/ctypes/test/test_checkretval.py2
-rw-r--r--Lib/ctypes/test/test_win32.py2
-rw-r--r--Lib/dbm/__init__.py6
-rw-r--r--Lib/dbm/dumb.py4
-rw-r--r--Lib/decimal.py4
-rw-r--r--Lib/difflib.py21
-rw-r--r--Lib/distutils/__init__.py2
-rw-r--r--Lib/distutils/ccompiler.py5
-rw-r--r--Lib/distutils/command/bdist.py3
-rw-r--r--Lib/distutils/command/bdist_dumb.py8
-rw-r--r--Lib/distutils/command/build_ext.py26
-rw-r--r--Lib/distutils/command/install.py15
-rw-r--r--Lib/distutils/command/upload.py2
-rw-r--r--Lib/distutils/core.py2
-rw-r--r--Lib/distutils/dir_util.py2
-rw-r--r--Lib/distutils/emxccompiler.py315
-rw-r--r--Lib/distutils/file_util.py16
-rw-r--r--Lib/distutils/spawn.py24
-rw-r--r--Lib/distutils/sysconfig.py24
-rw-r--r--Lib/distutils/tests/test_bdist_dumb.py2
-rw-r--r--Lib/distutils/tests/test_install.py2
-rw-r--r--Lib/distutils/tests/test_util.py2
-rw-r--r--Lib/distutils/util.py6
-rw-r--r--Lib/doctest.py8
-rw-r--r--Lib/email/_header_value_parser.py3
-rw-r--r--Lib/email/iterators.py6
-rw-r--r--Lib/filecmp.py6
-rw-r--r--Lib/fileinput.py6
-rw-r--r--Lib/fractions.py14
-rw-r--r--Lib/ftplib.py8
-rw-r--r--Lib/functools.py28
-rw-r--r--Lib/genericpath.py6
-rw-r--r--Lib/getpass.py2
-rw-r--r--Lib/glob.py5
-rw-r--r--Lib/hashlib.py15
-rw-r--r--Lib/http/client.py4
-rw-r--r--Lib/http/cookiejar.py3
-rw-r--r--Lib/http/server.py8
-rw-r--r--Lib/idlelib/EditorWindow.py4
-rw-r--r--Lib/idlelib/FileList.py2
-rw-r--r--Lib/idlelib/GrepDialog.py2
-rw-r--r--Lib/idlelib/IOBinding.py2
-rw-r--r--Lib/idlelib/NEWS.txt2
-rw-r--r--Lib/idlelib/PathBrowser.py2
-rw-r--r--Lib/idlelib/PyShell.py12
-rw-r--r--Lib/idlelib/TreeWidget.py4
-rw-r--r--Lib/idlelib/help.txt2
-rw-r--r--Lib/idlelib/idlever.py2
-rw-r--r--Lib/idlelib/rpc.py8
-rw-r--r--Lib/idlelib/run.py4
-rw-r--r--Lib/imaplib.py8
-rw-r--r--Lib/importlib/_bootstrap.py27
-rw-r--r--Lib/importlib/abc.py177
-rw-r--r--Lib/json/__init__.py25
-rw-r--r--Lib/json/decoder.py14
-rw-r--r--Lib/json/encoder.py29
-rw-r--r--Lib/json/tool.py3
-rw-r--r--Lib/lib2to3/btm_utils.py6
-rw-r--r--Lib/lib2to3/fixer_util.py23
-rw-r--r--Lib/lib2to3/fixes/fix_intern.py21
-rw-r--r--Lib/lib2to3/fixes/fix_reload.py28
-rw-r--r--Lib/lib2to3/main.py4
-rw-r--r--Lib/lib2to3/pytree.py9
-rw-r--r--Lib/lib2to3/refactor.py4
-rwxr-xr-xLib/lib2to3/tests/pytree_idempotency.py2
-rw-r--r--Lib/lib2to3/tests/test_fixers.py59
-rw-r--r--Lib/linecache.py4
-rw-r--r--Lib/logging/__init__.py31
-rw-r--r--Lib/logging/config.py74
-rw-r--r--Lib/logging/handlers.py56
-rw-r--r--Lib/lzma.py130
-rw-r--r--Lib/macpath.py2
-rw-r--r--Lib/mailbox.py12
-rw-r--r--Lib/mimetypes.py6
-rw-r--r--Lib/modulefinder.py2
-rw-r--r--Lib/multiprocessing/__init__.py7
-rw-r--r--Lib/multiprocessing/connection.py2
-rw-r--r--Lib/multiprocessing/forking.py24
-rw-r--r--Lib/multiprocessing/queues.py4
-rw-r--r--Lib/nntplib.py2
-rw-r--r--Lib/ntpath.py12
-rw-r--r--Lib/os.py48
-rw-r--r--Lib/os2emxpath.py158
-rw-r--r--Lib/pkgutil.py6
-rw-r--r--Lib/plat-os2emx/IN.py82
-rw-r--r--Lib/plat-os2emx/SOCKET.py106
-rw-r--r--Lib/plat-os2emx/_emx_link.py79
-rw-r--r--Lib/plat-os2emx/grp.py182
-rw-r--r--Lib/plat-os2emx/pwd.py208
-rwxr-xr-xLib/plat-os2emx/regen7
-rwxr-xr-xLib/platform.py24
-rw-r--r--Lib/poplib.py91
-rw-r--r--Lib/posixpath.py6
-rw-r--r--Lib/pty.py7
-rwxr-xr-xLib/pydoc.py2
-rw-r--r--Lib/random.py7
-rw-r--r--Lib/shelve.py9
-rw-r--r--Lib/shutil.py39
-rw-r--r--Lib/site.py25
-rwxr-xr-xLib/smtpd.py4
-rw-r--r--Lib/smtplib.py8
-rw-r--r--Lib/socketserver.py8
-rw-r--r--Lib/ssl.py10
-rw-r--r--Lib/subprocess.py58
-rw-r--r--Lib/sysconfig.py22
-rw-r--r--Lib/tarfile.py15
-rw-r--r--Lib/telnetlib.py6
-rw-r--r--Lib/tempfile.py7
-rw-r--r--Lib/test/json_tests/test_indent.py4
-rw-r--r--Lib/test/mock_socket.py8
-rwxr-xr-xLib/test/regrtest.py364
-rw-r--r--Lib/test/sortperf.py2
-rw-r--r--Lib/test/ssl_servers.py2
-rw-r--r--Lib/test/support.py26
-rw-r--r--Lib/test/test_abc.py13
-rw-r--r--Lib/test/test_argparse.py32
-rw-r--r--Lib/test/test_ast.py13
-rw-r--r--Lib/test/test_asyncore.py6
-rw-r--r--Lib/test/test_builtin.py19
-rw-r--r--Lib/test/test_bytes.py25
-rw-r--r--Lib/test/test_bz2.py136
-rw-r--r--Lib/test/test_cmd_line.py19
-rw-r--r--Lib/test/test_cmd_line_script.py9
-rw-r--r--Lib/test/test_codecs.py4
-rw-r--r--Lib/test/test_complex.py2
-rw-r--r--Lib/test/test_concurrent_futures.py22
-rw-r--r--Lib/test/test_devpoll.py2
-rw-r--r--Lib/test/test_doctest.py47
-rw-r--r--Lib/test/test_enumerate.py10
-rw-r--r--Lib/test/test_epoll.py9
-rw-r--r--Lib/test/test_exceptions.py17
-rw-r--r--Lib/test/test_fcntl.py15
-rw-r--r--Lib/test/test_fileio.py2
-rw-r--r--Lib/test/test_format.py16
-rw-r--r--Lib/test/test_fractions.py32
-rw-r--r--Lib/test/test_ftplib.py6
-rw-r--r--Lib/test/test_functools.py208
-rw-r--r--Lib/test/test_gc.py26
-rw-r--r--Lib/test/test_generators.py21
-rw-r--r--Lib/test/test_genericpath.py24
-rw-r--r--Lib/test/test_hashlib.py127
-rw-r--r--Lib/test/test_httplib.py4
-rw-r--r--Lib/test/test_httpservers.py11
-rw-r--r--Lib/test/test_imp.py14
-rw-r--r--Lib/test/test_importlib/import_/test_fromlist.py2
-rw-r--r--Lib/test/test_importlib/source/test_abc_loader.py518
-rw-r--r--Lib/test/test_importlib/test_abc.py18
-rw-r--r--Lib/test/test_io.py10
-rw-r--r--Lib/test/test_iterlen.py62
-rw-r--r--Lib/test/test_itertools.py5
-rw-r--r--Lib/test/test_kqueue.py2
-rw-r--r--Lib/test/test_logging.py173
-rw-r--r--Lib/test/test_mailbox.py2
-rw-r--r--Lib/test/test_mimetypes.py2
-rw-r--r--Lib/test/test_mmap.py6
-rw-r--r--Lib/test/test_multiprocessing.py172
-rw-r--r--Lib/test/test_nntplib.py2
-rw-r--r--Lib/test/test_openpty.py2
-rw-r--r--Lib/test/test_operator.py25
-rw-r--r--Lib/test/test_os.py128
-rw-r--r--Lib/test/test_pep277.py4
-rw-r--r--Lib/test/test_poll.py21
-rw-r--r--Lib/test/test_poplib.py157
-rw-r--r--Lib/test/test_pty.py2
-rw-r--r--Lib/test/test_random.py33
-rw-r--r--Lib/test/test_range.py2
-rw-r--r--Lib/test/test_select.py4
-rw-r--r--Lib/test/test_set.py2
-rw-r--r--Lib/test/test_shelve.py13
-rw-r--r--Lib/test/test_shutil.py17
-rw-r--r--Lib/test/test_signal.py9
-rw-r--r--Lib/test/test_site.py6
-rw-r--r--Lib/test/test_slice.py114
-rw-r--r--Lib/test/test_smtplib.py12
-rw-r--r--Lib/test/test_socket.py112
-rw-r--r--Lib/test/test_socketserver.py18
-rw-r--r--Lib/test/test_ssl.py28
-rw-r--r--Lib/test/test_subprocess.py18
-rw-r--r--Lib/test/test_sundry.py34
-rw-r--r--Lib/test/test_syntax.py4
-rw-r--r--Lib/test/test_sys.py34
-rw-r--r--Lib/test/test_sysconfig.py44
-rw-r--r--Lib/test/test_tarfile.py4
-rw-r--r--Lib/test/test_tempfile.py12
-rw-r--r--Lib/test/test_thread.py2
-rw-r--r--Lib/test/test_threading.py6
-rw-r--r--Lib/test/test_threadsignals.py2
-rw-r--r--Lib/test/test_unicode.py38
-rw-r--r--Lib/test/test_unicodedata.py2
-rw-r--r--Lib/test/test_urllib.py92
-rw-r--r--Lib/test/test_urllib2.py86
-rw-r--r--Lib/test/test_urllib2net.py2
-rw-r--r--Lib/test/test_venv.py59
-rw-r--r--Lib/test/test_wait3.py12
-rw-r--r--Lib/test/test_weakref.py140
-rw-r--r--Lib/test/test_winreg.py28
-rw-r--r--Lib/test/test_winsound.py2
-rw-r--r--Lib/test/test_xmlrpc.py28
-rw-r--r--Lib/test/test_xmlrpc_net.py4
-rw-r--r--Lib/test/tf_inherit_check.py2
-rw-r--r--Lib/tkinter/__init__.py39
-rw-r--r--Lib/tkinter/filedialog.py4
-rw-r--r--Lib/traceback.py3
-rw-r--r--Lib/unittest/loader.py3
-rw-r--r--Lib/urllib/error.py6
-rw-r--r--Lib/urllib/request.py91
-rw-r--r--Lib/venv/__init__.py23
-rw-r--r--Lib/venv/scripts/posix/activate.csh37
-rw-r--r--Lib/venv/scripts/posix/activate.fish74
-rw-r--r--Lib/weakref.py59
-rw-r--r--Lib/webbrowser.py15
-rw-r--r--Lib/xml/etree/ElementPath.py6
-rw-r--r--Lib/xml/etree/ElementTree.py6
-rw-r--r--Lib/xmlrpc/client.py5
-rw-r--r--Lib/zipfile.py1
-rw-r--r--Mac/Tools/bundlebuilder.py2
-rw-r--r--Makefile.pre.in1
-rw-r--r--Misc/ACKS11
-rw-r--r--Misc/HISTORY4101
-rw-r--r--Misc/NEWS253
-rw-r--r--Misc/RPM/python-3.4.spec (renamed from Misc/RPM/python-3.3.spec)7
-rw-r--r--Misc/TextMate/Python-Dev.tmbundle/Commands/2 to 3 - Module Deletion.tmCommand64
-rw-r--r--Misc/TextMate/Python-Dev.tmbundle/Commands/Build Docs.tmCommand23
-rw-r--r--Misc/TextMate/Python-Dev.tmbundle/Commands/Build.tmCommand25
-rw-r--r--Misc/TextMate/Python-Dev.tmbundle/Commands/Go to Issue.tmCommand20
-rw-r--r--Misc/TextMate/Python-Dev.tmbundle/Commands/Open Docs.tmCommand32
-rw-r--r--Misc/TextMate/Python-Dev.tmbundle/Commands/Open PEP.tmCommand23
-rw-r--r--Misc/TextMate/Python-Dev.tmbundle/Snippets/2 to 3 - Module Deletion (docs).tmSnippet16
-rw-r--r--Misc/TextMate/Python-Dev.tmbundle/info.plist37
-rw-r--r--Misc/Vim/python.vim148
-rw-r--r--Misc/Vim/syntax_test.py62
-rw-r--r--Misc/Vim/vim_syntax.py229
-rw-r--r--Misc/Vim/vimrc87
-rw-r--r--Modules/_ctypes/callproc.c36
-rw-r--r--Modules/_dbmmodule.c4
-rw-r--r--Modules/_hashopenssl.c22
-rw-r--r--Modules/_io/bufferedio.c5
-rw-r--r--Modules/_io/fileio.c7
-rw-r--r--Modules/_io/textio.c18
-rw-r--r--Modules/_json.c66
-rw-r--r--Modules/_lsprof.c4
-rw-r--r--Modules/_multiprocessing/multiprocessing.c25
-rw-r--r--Modules/_multiprocessing/multiprocessing.h6
-rw-r--r--Modules/_multiprocessing/semaphore.c14
-rw-r--r--Modules/_posixsubprocess.c2
-rw-r--r--Modules/_randommodule.c99
-rwxr-xr-xModules/_sha3/cleanup.py49
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-32-rvk.macros555
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-32-s1.macros1187
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-32-s2.macros1187
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-32.macros26
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-64.macros728
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-int-set.h6
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-interface.h46
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-opt32-settings.h6
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-opt32.c524
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-opt64-settings.h9
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-opt64.c510
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-simd128.macros651
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-simd64.macros517
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-unrolling.macros124
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-xop.macros573
-rw-r--r--Modules/_sha3/keccak/KeccakNISTInterface.c83
-rw-r--r--Modules/_sha3/keccak/KeccakNISTInterface.h72
-rw-r--r--Modules/_sha3/keccak/KeccakSponge.c266
-rw-r--r--Modules/_sha3/keccak/KeccakSponge.h76
-rwxr-xr-xModules/_sha3/keccak/brg_endian.h142
-rw-r--r--Modules/_sha3/keccak/crypto_hash.h0
-rw-r--r--Modules/_sha3/sha3module.c583
-rw-r--r--Modules/_sre.c14
-rw-r--r--Modules/_struct.c21
-rw-r--r--Modules/_testimportmultiple.c57
-rw-r--r--Modules/_threadmodule.c2
-rw-r--r--Modules/_tkinter.c653
-rw-r--r--Modules/arraymodule.c7
-rw-r--r--Modules/cjkcodecs/cjkcodecs.h11
-rw-r--r--Modules/faulthandler.c9
-rw-r--r--Modules/fcntlmodule.c6
-rw-r--r--Modules/gcmodule.c64
-rw-r--r--Modules/getpath.c4
-rw-r--r--Modules/hashlib.h33
-rw-r--r--Modules/main.c6
-rw-r--r--Modules/md5module.c2
-rw-r--r--Modules/operator.c27
-rw-r--r--Modules/posixmodule.c870
-rw-r--r--Modules/pwdmodule.c4
-rw-r--r--Modules/readline.c4
-rw-r--r--Modules/selectmodule.c27
-rw-r--r--Modules/sha1module.c2
-rw-r--r--Modules/sha256module.c22
-rw-r--r--Modules/sha512module.c22
-rw-r--r--Modules/signalmodule.c5
-rw-r--r--Modules/socketmodule.c106
-rw-r--r--Modules/socketmodule.h2
-rw-r--r--Modules/timemodule.c55
-rw-r--r--Modules/unicodedata_db.h616
-rw-r--r--Modules/unicodename_db.h22399
-rw-r--r--Modules/zlibmodule.c4
-rw-r--r--Objects/abstract.c77
-rw-r--r--Objects/bytearrayobject.c74
-rw-r--r--Objects/bytesobject.c111
-rw-r--r--Objects/complexobject.c12
-rw-r--r--Objects/descrobject.c8
-rw-r--r--Objects/dictobject.c7
-rw-r--r--Objects/frameobject.c8
-rw-r--r--Objects/iterobject.c11
-rw-r--r--Objects/listobject.c2
-rw-r--r--Objects/longobject.c15
-rw-r--r--Objects/methodobject.c20
-rw-r--r--Objects/object.c20
-rw-r--r--Objects/obmalloc.c28
-rw-r--r--Objects/rangeobject.c205
-rw-r--r--Objects/sliceobject.c191
-rw-r--r--Objects/stringlib/codecs.h6
-rw-r--r--Objects/stringlib/join.h133
-rw-r--r--Objects/typeobject.c23
-rw-r--r--Objects/unicodeobject.c3024
-rw-r--r--Objects/unicodetype_db.h13
-rw-r--r--PC/VC6/build_ssl.py2
-rw-r--r--PC/VC6/pcbuild.dsw15
-rw-r--r--PC/VC6/pythoncore.dsp4
-rw-r--r--PC/VC6/readme.txt4
-rw-r--r--PC/VC6/w9xpopen.dsp97
-rw-r--r--PC/VS7.1/build_ssl.py2
-rw-r--r--PC/VS7.1/pcbuild.sln4
-rw-r--r--PC/VS7.1/python.iss1
-rw-r--r--PC/VS7.1/python20.wse5
-rw-r--r--PC/VS7.1/pythoncore.vcproj24
-rw-r--r--PC/VS7.1/readme.txt4
-rw-r--r--PC/VS7.1/w9xpopen.vcproj121
-rw-r--r--PC/VS8.0/build_ssl.bat4
-rw-r--r--PC/VS8.0/build_ssl.py2
-rw-r--r--PC/VS8.0/kill_python.c2
-rw-r--r--PC/VS8.0/pcbuild.sln5
-rw-r--r--PC/VS8.0/pyproject.vsprops2
-rw-r--r--PC/VS9.0/_sha3.vcproj (renamed from PC/VS8.0/w9xpopen.vcproj)135
-rw-r--r--PC/VS9.0/_testimportmultiple.vcproj (renamed from PC/VS9.0/w9xpopen.vcproj)141
-rw-r--r--PC/VS9.0/kill_python.c2
-rw-r--r--PC/VS9.0/pcbuild.sln44
-rw-r--r--PC/VS9.0/pyproject.vsprops2
-rw-r--r--PC/example_nt/example.vcproj4
-rw-r--r--PC/getpathp.c2
-rw-r--r--PC/os2emx/Makefile672
-rw-r--r--PC/os2emx/README.os2emx663
-rw-r--r--PC/os2emx/config.c164
-rw-r--r--PC/os2emx/dlfcn.c223
-rw-r--r--PC/os2emx/dlfcn.h51
-rw-r--r--PC/os2emx/dllentry.c42
-rw-r--r--PC/os2emx/getpathp.c418
-rw-r--r--PC/os2emx/pyconfig.h332
-rw-r--r--PC/os2emx/python33.def1314
-rw-r--r--PC/os2emx/pythonpm.c124
-rw-r--r--PC/os2vacpp/_tkinter.def8
-rw-r--r--PC/os2vacpp/config.c99
-rw-r--r--PC/os2vacpp/getpathp.c482
-rw-r--r--PC/os2vacpp/makefile1549
-rw-r--r--PC/os2vacpp/makefile.omk1047
-rw-r--r--PC/os2vacpp/pyconfig.h212
-rw-r--r--PC/os2vacpp/python.def479
-rw-r--r--PC/os2vacpp/readme.txt119
-rw-r--r--PC/pyconfig.h4
-rw-r--r--PC/python.mk5
-rw-r--r--PC/python3.def1395
-rw-r--r--PC/python3.mak10
-rw-r--r--PC/python34gen.py (renamed from PC/python33gen.py)8
-rw-r--r--PC/python34stub.def (renamed from PC/python33stub.def)3
-rw-r--r--PC/readme.txt13
-rw-r--r--PC/w9xpopen.c112
-rw-r--r--PC/winreg.c4
-rw-r--r--PCbuild/_sha3.vcxproj (renamed from PCbuild/w9xpopen.vcxproj)165
-rw-r--r--PCbuild/_testimportmultiple.vcxproj218
-rw-r--r--PCbuild/_testimportmultiple.vcxproj.filters (renamed from PCbuild/w9xpopen.vcxproj.filters)6
-rw-r--r--PCbuild/build_ssl.bat4
-rw-r--r--PCbuild/kill_python.c2
-rw-r--r--PCbuild/pcbuild.sln38
-rw-r--r--PCbuild/pyproject.props2
-rw-r--r--PCbuild/python.vcxproj6
-rw-r--r--PCbuild/readme.txt2
-rw-r--r--Parser/Python.asdl4
-rw-r--r--Parser/asdl.py2
-rwxr-xr-xParser/asdl_c.py12
-rw-r--r--Parser/grammar1.c2
-rw-r--r--Parser/tokenizer.c2
-rw-r--r--Python/Python-ast.c73
-rw-r--r--Python/ast.c22
-rw-r--r--Python/bltinmodule.c8
-rw-r--r--Python/ceval.c2114
-rw-r--r--Python/codecs.c6
-rw-r--r--Python/compile.c25
-rw-r--r--Python/dynload_os2.c42
-rw-r--r--Python/dynload_shlib.c36
-rw-r--r--Python/errors.c21
-rw-r--r--Python/fileutils.c244
-rw-r--r--Python/formatter_unicode.c3
-rw-r--r--Python/import.c38
-rw-r--r--Python/importdl.h5
-rw-r--r--Python/importlib.h8102
-rw-r--r--Python/peephole.c46
-rw-r--r--Python/pythonrun.c7
-rw-r--r--Python/symtable.c58
-rw-r--r--Python/sysmodule.c48
-rw-r--r--Python/thread.c4
-rw-r--r--Python/thread_os2.h267
-rw-r--r--README16
-rw-r--r--Tools/freeze/bkfile.py4
-rwxr-xr-xTools/freeze/freeze.py4
-rw-r--r--Tools/importbench/importbench.py31
-rw-r--r--Tools/msi/msi.py6
-rw-r--r--Tools/parser/unparse.py3
-rwxr-xr-xTools/scripts/byext.py2
-rwxr-xr-xTools/scripts/byteyears.py2
-rwxr-xr-xTools/scripts/checkpyc.py4
-rwxr-xr-xTools/scripts/copytime.py4
-rwxr-xr-xTools/scripts/finddiv.py2
-rwxr-xr-xTools/scripts/findlinksto.py2
-rwxr-xr-xTools/scripts/fixcid.py8
-rwxr-xr-xTools/scripts/ftpmirror.py20
-rwxr-xr-xTools/scripts/linktree.py6
-rw-r--r--Tools/scripts/parse_html5_entities.py105
-rwxr-xr-xTools/scripts/pathfix.py10
-rwxr-xr-xTools/scripts/pindent.py6
-rwxr-xr-xTools/scripts/treesync.py2
-rwxr-xr-xTools/scripts/untabify.py4
-rwxr-xr-xTools/scripts/which.py2
-rw-r--r--Tools/scripts/win_add2path.py2
-rwxr-xr-xTools/stringbench/stringbench.py2
-rw-r--r--Tools/unicode/makeunicodedata.py2
-rwxr-xr-xconfigure20
-rw-r--r--configure.ac2
-rw-r--r--setup.py13
505 files changed, 37331 insertions, 35027 deletions
diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst
index a47183c..02a0d36 100644
--- a/Doc/c-api/object.rst
+++ b/Doc/c-api/object.rst
@@ -342,6 +342,15 @@ is considered sufficient for this determination.
returned. This is the equivalent to the Python expression ``len(o)``.
+.. c:function:: Py_ssize_t PyObject_LengthHint(PyObject *o, Py_ssize_t default)
+
+ Return an estimated length for the object *o*. First trying to return its
+ actual length, then an estimate using ``__length_hint__``, and finally
+ returning the default value. On error ``-1`` is returned. This is the
+ equivalent to the Python expression ``operator.length_hint(o, default)``.
+
+ .. versionadded:: 3.4
+
.. c:function:: PyObject* PyObject_GetItem(PyObject *o, PyObject *key)
Return element of *o* corresponding to the object *key* or *NULL* on failure.
diff --git a/Doc/faq/library.rst b/Doc/faq/library.rst
index cab2d7b..6a2682f 100644
--- a/Doc/faq/library.rst
+++ b/Doc/faq/library.rst
@@ -209,7 +209,7 @@ using curses, but curses is a fairly large module to learn.
try:
c = sys.stdin.read(1)
print("Got character", repr(c))
- except IOError:
+ except OSError:
pass
finally:
termios.tcsetattr(fd, termios.TCSAFLUSH, oldterm)
@@ -222,7 +222,11 @@ using curses, but curses is a fairly large module to learn.
:func:`termios.tcsetattr` turns off stdin's echoing and disables canonical
mode. :func:`fcntl.fnctl` is used to obtain stdin's file descriptor flags
and modify them for non-blocking mode. Since reading stdin when it is empty
- results in an :exc:`IOError`, this error is caught and ignored.
+ results in an :exc:`OSError`, this error is caught and ignored.
+
+ .. versionchanged:: 3.3
+ *sys.stdin.read* used to raise :exc:`IOError`. Starting from Python 3.3
+ :exc:`IOError` is alias for :exc:`OSError`.
Threads
diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst
index 92af0ec..b336a4a 100644
--- a/Doc/howto/logging-cookbook.rst
+++ b/Doc/howto/logging-cookbook.rst
@@ -741,9 +741,7 @@ the basis for code meeting your own specific requirements::
break
logger = logging.getLogger(record.name)
logger.handle(record) # No level or filter logic applied - just do it!
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
+ except Exception:
import sys, traceback
print('Whoops! Problem:', file=sys.stderr)
traceback.print_exc(file=sys.stderr)
diff --git a/Doc/library/2to3.rst b/Doc/library/2to3.rst
index d07aaa1..b324aa6 100644
--- a/Doc/library/2to3.rst
+++ b/Doc/library/2to3.rst
@@ -343,6 +343,10 @@ and off individually. They are described here in more detail.
Handles the move of :func:`reduce` to :func:`functools.reduce`.
+.. 2to3fixer:: reload
+
+ Converts :func:`reload` to :func:`imp.reload`.
+
.. 2to3fixer:: renames
Changes :data:`sys.maxint` to :data:`sys.maxsize`.
diff --git a/Doc/library/abc.rst b/Doc/library/abc.rst
index 6f23596..27abb60 100644
--- a/Doc/library/abc.rst
+++ b/Doc/library/abc.rst
@@ -12,9 +12,9 @@
--------------
This module provides the infrastructure for defining :term:`abstract base
-classes <abstract base class>` (ABCs) in Python, as outlined in :pep:`3119`; see the PEP for why this
-was added to Python. (See also :pep:`3141` and the :mod:`numbers` module
-regarding a type hierarchy for numbers based on ABCs.)
+classes <abstract base class>` (ABCs) in Python, as outlined in :pep:`3119`;
+see the PEP for why this was added to Python. (See also :pep:`3141` and the
+:mod:`numbers` module regarding a type hierarchy for numbers based on ABCs.)
The :mod:`collections` module has some concrete classes that derive from
ABCs; these can, of course, be further derived. In addition the
@@ -23,7 +23,7 @@ a class or instance provides a particular interface, for example, is it
hashable or a mapping.
-This module provides the following class:
+This module provides the following classes:
.. class:: ABCMeta
@@ -127,6 +127,19 @@ This module provides the following class:
available as a method of ``Foo``, so it is provided separately.
+.. class:: ABC
+
+ A helper class that has :class:`ABCMeta` as its metaclass. With this class,
+ an abstract base class can be created by simply deriving from :class:`ABC`,
+ avoiding sometimes confusing metaclass usage.
+
+ Note that the type of :class:`ABC` is still :class:`ABCMeta`, therefore
+ inheriting from :class:`ABC` requires the usual precautions regarding metaclass
+ usage, as multiple inheritance may lead to metaclass conflicts.
+
+ .. versionadded:: 3.4
+
+
The :mod:`abc` module also provides the following decorators:
.. decorator:: abstractmethod
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index 9f6a1ea..584f4f7 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -976,9 +976,9 @@ See the section on the default_ keyword argument for information on when the
``type`` argument is applied to default arguments.
To ease the use of various types of files, the argparse module provides the
-factory FileType which takes the ``mode=`` and ``bufsize=`` arguments of the
-:func:`open` function. For example, ``FileType('w')`` can be used to create a
-writable file::
+factory FileType which takes the ``mode=``, ``bufsize=``, ``encoding=`` and
+``errors=`` arguments of the :func:`open` function. For example,
+``FileType('w')`` can be used to create a writable file::
>>> parser = argparse.ArgumentParser()
>>> parser.add_argument('bar', type=argparse.FileType('w'))
@@ -1617,17 +1617,19 @@ Sub-commands
FileType objects
^^^^^^^^^^^^^^^^
-.. class:: FileType(mode='r', bufsize=None)
+.. class:: FileType(mode='r', bufsize=-1, encoding=None, errors=None)
The :class:`FileType` factory creates objects that can be passed to the type
argument of :meth:`ArgumentParser.add_argument`. Arguments that have
- :class:`FileType` objects as their type will open command-line arguments as files
- with the requested modes and buffer sizes::
+ :class:`FileType` objects as their type will open command-line arguments as
+ files with the requested modes, buffer sizes, encodings and error handling
+ (see the :func:`open` function for more details)::
>>> parser = argparse.ArgumentParser()
- >>> parser.add_argument('--output', type=argparse.FileType('wb', 0))
- >>> parser.parse_args(['--output', 'out'])
- Namespace(output=<_io.BufferedWriter name='out'>)
+ >>> parser.add_argument('--raw', type=argparse.FileType('wb', 0))
+ >>> parser.add_argument('out', type=argparse.FileType('w', encoding='UTF-8'))
+ >>> parser.parse_args(['--raw', 'raw.dat', 'file.txt'])
+ Namespace(out=<_io.TextIOWrapper name='file.txt' mode='w' encoding='UTF-8'>, raw=<_io.FileIO name='raw.dat' mode='wb'>)
FileType objects understand the pseudo-argument ``'-'`` and automatically
convert this into ``sys.stdin`` for readable :class:`FileType` objects and
diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst
index 222c719..200de45 100644
--- a/Doc/library/doctest.rst
+++ b/Doc/library/doctest.rst
@@ -633,6 +633,16 @@ The second group of options controls how test failures are reported:
the output is suppressed.
+.. data:: FAIL_FAST
+
+ When specified, exit after the first failing example and don't attempt to run
+ the remaining examples. Thus, the number of failures reported will be at most
+ 1. This flag may be useful during debugging, since examples after the first
+ failure won't even produce debugging output.
+
+ .. versionadded:: 3.4
+
+
.. data:: REPORTING_FLAGS
A bitmask or'ing together all the reporting flags above.
diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst
index 9595221..ece035d 100644
--- a/Doc/library/exceptions.rst
+++ b/Doc/library/exceptions.rst
@@ -260,6 +260,12 @@ The following exceptions are the exceptions that are usually raised.
:exc:`VMSError`, :exc:`socket.error`, :exc:`select.error` and
:exc:`mmap.error` have been merged into :exc:`OSError`.
+ .. versionchanged:: 3.4
+
+ The :attr:`filename` attribute is now the original file name passed to
+ the function, instead of the name encoded to or decoded from the
+ filesystem encoding.
+
.. exception:: OverflowError
diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst
index 07765ce..c52dc06 100644
--- a/Doc/library/functions.rst
+++ b/Doc/library/functions.rst
@@ -543,6 +543,10 @@ are always available. They are listed here in alphabetical order.
:exc:`TypeError` exception is raised if the method is not found or if either
the *format_spec* or the return value are not strings.
+ .. versionadded:: 3.4
+ ``object().__format__(format_spec)`` raises :exc:`TypeError`
+ if *format_spec* is not empty string.
+
.. _func-frozenset:
.. function:: frozenset([iterable])
diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst
index 41bda1e..95df2f8 100644
--- a/Doc/library/gc.rst
+++ b/Doc/library/gc.rst
@@ -67,6 +67,24 @@ The :mod:`gc` module provides the following functions:
returned.
+.. function:: get_stats()
+
+ Return a list of 3 per-generation dictionaries containing collection
+ statistics since interpreter start. At this moment, each dictionary will
+ contain the following items:
+
+ * ``collections`` is the number of times this generation was collected;
+
+ * ``collected`` is the total number of objects collected inside this
+ generation;
+
+ * ``uncollectable`` is the total number of objects which were found
+ to be uncollectable (and were therefore moved to the :data:`garbage`
+ list) inside this generation.
+
+ .. versionadded:: 3.4
+
+
.. function:: set_threshold(threshold0[, threshold1[, threshold2]])
Set the garbage collection thresholds (the collection frequency). Setting
diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst
index 929d41b..f0fe915 100644
--- a/Doc/library/hashlib.rst
+++ b/Doc/library/hashlib.rst
@@ -51,9 +51,13 @@ concatenation of the data fed to it so far using the :meth:`digest` or
.. index:: single: OpenSSL; (use in module hashlib)
Constructors for hash algorithms that are always present in this module are
-:func:`md5`, :func:`sha1`, :func:`sha224`, :func:`sha256`, :func:`sha384`, and
-:func:`sha512`. Additional algorithms may also be available depending upon the
-OpenSSL library that Python uses on your platform.
+:func:`md5`, :func:`sha1`, :func:`sha224`, :func:`sha256`, :func:`sha384`,
+:func:`sha512`, :func:`sha3_224`, :func:`sha3_256`, :func:`sha3_384`, and
+:func:`sha3_512`. Additional algorithms may also be available depending upon
+the OpenSSL library that Python uses on your platform.
+
+ .. versionchanged:: 3.4
+ Add sha3 family of hash algorithms.
For example, to obtain the digest of the byte string ``b'Nobody inspects the
spammish repetition'``::
diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst
index cbad3ed..0577d5e 100644
--- a/Doc/library/http.server.rst
+++ b/Doc/library/http.server.rst
@@ -177,6 +177,9 @@ of which this module provides three different variants:
complete set of headers is sent, followed by text composed using the
:attr:`error_message_format` class variable.
+ .. versionchanged:: 3.4
+ The error response includes a Content-Length header.
+
.. method:: send_response(code, message=None)
Adds a response header to the headers buffer and logs the accepted
diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst
index 083656e..ee9045b 100644
--- a/Doc/library/importlib.rst
+++ b/Doc/library/importlib.rst
@@ -132,8 +132,6 @@ ABC hierarchy::
+-- ExecutionLoader --+
+-- FileLoader
+-- SourceLoader
- +-- PyLoader (deprecated)
- +-- PyPycLoader (deprecated)
.. class:: Finder
@@ -366,10 +364,12 @@ ABC hierarchy::
* :meth:`ResourceLoader.get_data`
* :meth:`ExecutionLoader.get_filename`
Should only return the path to the source file; sourceless
- loading is not supported.
+ loading is not supported (see :class:`SourcelessLoader` if that
+ functionality is required)
The abstract methods defined by this class are to add optional bytecode
- file support. Not implementing these optional methods causes the loader to
+ file support. Not implementing these optional methods (or causing them to
+ raise :exc:`NotImplementedError`) causes the loader to
only work with source code. Implementing the methods allows the loader to
work with source *and* bytecode files; it does not allow for *sourceless*
loading where only bytecode is provided. Bytecode files are an
@@ -409,6 +409,17 @@ ABC hierarchy::
When writing to the path fails because the path is read-only
(:attr:`errno.EACCES`), do not propagate the exception.
+ .. method:: source_to_code(data, path)
+
+ Create a code object from Python source.
+
+ The *data* argument can be whatever the :func:`compile` function
+ supports (i.e. string or bytes). The *path* argument should be
+ the "path" to where the source code originated from, which can be an
+ abstract concept (e.g. location in a zip file).
+
+ .. versionadded:: 3.4
+
.. method:: get_code(fullname)
Concrete implementation of :meth:`InspectLoader.get_code`.
@@ -430,142 +441,6 @@ ABC hierarchy::
itself does not end in ``__init__``.
-.. class:: PyLoader
-
- An abstract base class inheriting from
- :class:`ExecutionLoader` and
- :class:`ResourceLoader` designed to ease the loading of
- Python source modules (bytecode is not handled; see
- :class:`SourceLoader` for a source/bytecode ABC). A subclass
- implementing this ABC will only need to worry about exposing how the source
- code is stored; all other details for loading Python source code will be
- handled by the concrete implementations of key methods.
-
- .. deprecated:: 3.2
- This class has been deprecated in favor of :class:`SourceLoader` and is
- slated for removal in Python 3.4. See below for how to create a
- subclass that is compatible with Python 3.1 onwards.
-
- If compatibility with Python 3.1 is required, then use the following idiom
- to implement a subclass that will work with Python 3.1 onwards (make sure
- to implement :meth:`ExecutionLoader.get_filename`)::
-
- try:
- from importlib.abc import SourceLoader
- except ImportError:
- from importlib.abc import PyLoader as SourceLoader
-
-
- class CustomLoader(SourceLoader):
- def get_filename(self, fullname):
- """Return the path to the source file."""
- # Implement ...
-
- def source_path(self, fullname):
- """Implement source_path in terms of get_filename."""
- try:
- return self.get_filename(fullname)
- except ImportError:
- return None
-
- def is_package(self, fullname):
- """Implement is_package by looking for an __init__ file
- name as returned by get_filename."""
- filename = os.path.basename(self.get_filename(fullname))
- return os.path.splitext(filename)[0] == '__init__'
-
-
- .. method:: source_path(fullname)
-
- An abstract method that returns the path to the source code for a
- module. Should return ``None`` if there is no source code.
- Raises :exc:`ImportError` if the loader knows it cannot handle the
- module.
-
- .. method:: get_filename(fullname)
-
- A concrete implementation of
- :meth:`importlib.abc.ExecutionLoader.get_filename` that
- relies on :meth:`source_path`. If :meth:`source_path` returns
- ``None``, then :exc:`ImportError` is raised.
-
- .. method:: load_module(fullname)
-
- A concrete implementation of :meth:`importlib.abc.Loader.load_module`
- that loads Python source code. All needed information comes from the
- abstract methods required by this ABC. The only pertinent assumption
- made by this method is that when loading a package
- :attr:`__path__` is set to ``[os.path.dirname(__file__)]``.
-
- .. method:: get_code(fullname)
-
- A concrete implementation of
- :meth:`importlib.abc.InspectLoader.get_code` that creates code objects
- from Python source code, by requesting the source code (using
- :meth:`source_path` and :meth:`get_data`) and compiling it with the
- built-in :func:`compile` function.
-
- .. method:: get_source(fullname)
-
- A concrete implementation of
- :meth:`importlib.abc.InspectLoader.get_source`. Uses
- :meth:`importlib.abc.ResourceLoader.get_data` and :meth:`source_path`
- to get the source code. It tries to guess the source encoding using
- :func:`tokenize.detect_encoding`.
-
-
-.. class:: PyPycLoader
-
- An abstract base class inheriting from :class:`PyLoader`.
- This ABC is meant to help in creating loaders that support both Python
- source and bytecode.
-
- .. deprecated:: 3.2
- This class has been deprecated in favor of :class:`SourceLoader` and to
- properly support :pep:`3147`. If compatibility is required with
- Python 3.1, implement both :class:`SourceLoader` and :class:`PyLoader`;
- instructions on how to do so are included in the documentation for
- :class:`PyLoader`. Do note that this solution will not support
- sourceless/bytecode-only loading; only source *and* bytecode loading.
-
- .. versionchanged:: 3.3
- Updated to parse (but not use) the new source size field in bytecode
- files when reading and to write out the field properly when writing.
-
- .. method:: source_mtime(fullname)
-
- An abstract method which returns the modification time for the source
- code of the specified module. The modification time should be an
- integer. If there is no source code, return ``None``. If the
- module cannot be found then :exc:`ImportError` is raised.
-
- .. method:: bytecode_path(fullname)
-
- An abstract method which returns the path to the bytecode for the
- specified module, if it exists. It returns ``None``
- if no bytecode exists (yet).
- Raises :exc:`ImportError` if the loader knows it cannot handle the
- module.
-
- .. method:: get_filename(fullname)
-
- A concrete implementation of
- :meth:`ExecutionLoader.get_filename` that relies on
- :meth:`PyLoader.source_path` and :meth:`bytecode_path`.
- If :meth:`source_path` returns a path, then that value is returned.
- Else if :meth:`bytecode_path` returns a path, that path will be
- returned. If a path is not available from both methods,
- :exc:`ImportError` is raised.
-
- .. method:: write_bytecode(fullname, bytecode)
-
- An abstract method which has the loader write *bytecode* for future
- use. If the bytecode is written, return ``True``. Return
- ``False`` if the bytecode could not be written. This method
- should not be called if :data:`sys.dont_write_bytecode` is true.
- The *bytecode* argument should be a bytes string or bytes array.
-
-
:mod:`importlib.machinery` -- Importers and path hooks
------------------------------------------------------
diff --git a/Doc/library/json.rst b/Doc/library/json.rst
index bdb6436..25a3358 100644
--- a/Doc/library/json.rst
+++ b/Doc/library/json.rst
@@ -42,8 +42,7 @@ Compact encoding::
Pretty printing::
>>> import json
- >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True,
- ... indent=4, separators=(',', ': ')))
+ >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
{
"4": 5,
"6": 7
@@ -156,15 +155,13 @@ Basic Usage
.. versionchanged:: 3.2
Allow strings for *indent* in addition to integers.
- .. note::
-
- Since the default item separator is ``', '``, the output might include
- trailing whitespace when *indent* is specified. You can use
- ``separators=(',', ': ')`` to avoid this.
+ If specified, *separators* should be an ``(item_separator, key_separator)``
+ tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
+ ``(',', ': ')`` otherwise. To get the most compact JSON representation,
+ you should specify ``(',', ':')`` to eliminate whitespace.
- If *separators* is an ``(item_separator, dict_separator)`` tuple, then it
- will be used instead of the default ``(', ', ': ')`` separators. ``(',',
- ':')`` is the most compact JSON representation.
+ .. versionchanged:: 3.4
+ Use ``(',', ': ')`` as default if *indent* is not ``None``.
*default(obj)* is a function that should return a serializable version of
*obj* or raise :exc:`TypeError`. The default simply raises :exc:`TypeError`.
@@ -400,15 +397,13 @@ Encoders and Decoders
.. versionchanged:: 3.2
Allow strings for *indent* in addition to integers.
- .. note::
-
- Since the default item separator is ``', '``, the output might include
- trailing whitespace when *indent* is specified. You can use
- ``separators=(',', ': ')`` to avoid this.
-
If specified, *separators* should be an ``(item_separator, key_separator)``
- tuple. The default is ``(', ', ': ')``. To get the most compact JSON
- representation, you should specify ``(',', ':')`` to eliminate whitespace.
+ tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
+ ``(',', ': ')`` otherwise. To get the most compact JSON representation,
+ you should specify ``(',', ':')`` to eliminate whitespace.
+
+ .. versionchanged:: 3.4
+ Use ``(',', ': ')`` as default if *indent* is not ``None``.
If specified, *default* is a function that gets called for objects that can't
otherwise be serialized. It should return a JSON encodable version of the
diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst
index 1391ed2..16d3294 100644
--- a/Doc/library/logging.config.rst
+++ b/Doc/library/logging.config.rst
@@ -76,11 +76,23 @@ in :mod:`logging` itself) and defining handlers which are declared either in
.. function:: fileConfig(fname, defaults=None, disable_existing_loggers=True)
- Reads the logging configuration from a :mod:`configparser`\-format file
- named *fname*. This function can be called several times from an
- application, allowing an end user to select from various pre-canned
- configurations (if the developer provides a mechanism to present the choices
- and load the chosen configuration).
+ Reads the logging configuration from a :mod:`configparser`\-format file.
+ This function can be called several times from an application, allowing an
+ end user to select from various pre-canned configurations (if the developer
+ provides a mechanism to present the choices and load the chosen
+ configuration).
+
+ :param fname: A filename, or a file-like object, or an instance derived
+ from :class:`~configparser.RawConfigParser`. If a
+ ``RawConfigParser``-derived instance is passed, it is used as
+ is. Otherwise, a :class:`~configparser.Configparser` is
+ instantiated, and the configuration read by it from the
+ object passed in ``fname``. If that has a :meth:`readline`
+ method, it is assumed to be a file-like object and read using
+ :meth:`~configparser.ConfigParser.read_file`; otherwise,
+ it is assumed to be a filename and passed to
+ :meth:`~configparser.ConfigParser.read`.
+
:param defaults: Defaults to be passed to the ConfigParser can be specified
in this argument.
@@ -94,8 +106,17 @@ in :mod:`logging` itself) and defining handlers which are declared either in
their ancestors are explicitly named in the
logging configuration.
+ .. versionchanged:: 3.4
+ An instance of a subclass of :class:`~configparser.RawConfigParser` is
+ now accepted as a value for ``fname``. This facilitates:
+
+ * Use of a configuration file where logging configuration is just part
+ of the overall application configuration.
+ * Use of a configuration read from a file, and then modified by the using
+ application (e.g. based on command-line parameters or other aspects
+ of the runtime environment) before being passed to ``fileConfig``.
-.. function:: listen(port=DEFAULT_LOGGING_CONFIG_PORT)
+.. function:: listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None)
Starts up a socket server on the specified port, and listens for new
configurations. If no port is specified, the module's default
@@ -105,6 +126,17 @@ in :mod:`logging` itself) and defining handlers which are declared either in
server, and which you can :meth:`join` when appropriate. To stop the server,
call :func:`stopListening`.
+ The ``verify`` argument, if specified, should be a callable which should
+ verify whether bytes received across the socket are valid and should be
+ processed. This could be done by encrypting and/or signing what is sent
+ across the socket, such that the ``verify`` callable can perform
+ signature verification and/or decryption. The ``verify`` callable is called
+ with a single argument - the bytes received across the socket - and should
+ return the bytes to be processed, or None to indicate that the bytes should
+ be discarded. The returned bytes could be the same as the passed in bytes
+ (e.g. when only verification is done), or they could be completely different
+ (perhaps if decryption were performed).
+
To send a configuration to the socket, read in the configuration file and
send it to the socket as a string of bytes preceded by a four-byte length
string packed in binary using ``struct.pack('>L', n)``.
@@ -121,7 +153,12 @@ in :mod:`logging` itself) and defining handlers which are declared either in
:func:`listen` socket and sending a configuration which runs whatever
code the attacker wants to have executed in the victim's process. This is
especially easy to do if the default port is used, but not hard even if a
- different port is used).
+ different port is used). To avoid the risk of this happening, use the
+ ``verify`` argument to :func:`listen` to prevent unrecognised
+ configurations from being applied.
+
+ .. versionchanged:: 3.4.
+ The ``verify`` argument was added.
.. function:: stopListening()
diff --git a/Doc/library/nntplib.rst b/Doc/library/nntplib.rst
index 87a50b0..5977d2a 100644
--- a/Doc/library/nntplib.rst
+++ b/Doc/library/nntplib.rst
@@ -1,4 +1,3 @@
-
:mod:`nntplib` --- NNTP protocol client
=======================================
@@ -71,7 +70,7 @@ The module itself defines the following classes:
reader-specific commands, such as ``group``. If you get unexpected
:exc:`NNTPPermanentError`\ s, you might need to set *readermode*.
:class:`NNTP` class supports the :keyword:`with` statement to
- unconditionally consume :exc:`socket.error` exceptions and to close the NNTP
+ unconditionally consume :exc:`OSError` exceptions and to close the NNTP
connection when done. Here is a sample on how using it:
>>> from nntplib import NNTP
diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst
index 3860880..877a790 100644
--- a/Doc/library/operator.rst
+++ b/Doc/library/operator.rst
@@ -235,6 +235,14 @@ their character equivalents.
.. XXX: find a better, readable, example
+.. function:: length_hint(obj, default=0)
+
+ Return an estimated length for the object *o*. First trying to return its
+ actual length, then an estimate using :meth:`object.__length_hint__`, and
+ finally returning the default value.
+
+ .. versionadded:: 3.4
+
The :mod:`operator` module also defines tools for generalized attribute and item
lookups. These are useful for making fast field extractors as arguments for
:func:`map`, :func:`sorted`, :meth:`itertools.groupby`, or other functions that
diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst
index 13bda9b..d9b7138 100644
--- a/Doc/library/os.path.rst
+++ b/Doc/library/os.path.rst
@@ -37,7 +37,6 @@ applications should use string objects to access all files.
* :mod:`posixpath` for UNIX-style paths
* :mod:`ntpath` for Windows paths
* :mod:`macpath` for old-style MacOS paths
- * :mod:`os2emxpath` for OS/2 EMX paths
.. function:: abspath(path)
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
index 027ad70..d2f7d01 100644
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -54,7 +54,7 @@ Notes on the availability of these functions:
The name of the operating system dependent module imported. The following
names have currently been registered: ``'posix'``, ``'nt'``, ``'mac'``,
- ``'os2'``, ``'ce'``, ``'java'``.
+ ``'ce'``, ``'java'``.
.. seealso::
:attr:`sys.platform` has a finer granularity. :func:`os.uname` gives
diff --git a/Doc/library/poplib.rst b/Doc/library/poplib.rst
index 01f680e..1ac93c9 100644
--- a/Doc/library/poplib.rst
+++ b/Doc/library/poplib.rst
@@ -13,8 +13,11 @@
--------------
This module defines a class, :class:`POP3`, which encapsulates a connection to a
-POP3 server and implements the protocol as defined in :rfc:`1725`. The
-:class:`POP3` class supports both the minimal and optional command sets.
+POP3 server and implements the protocol as defined in :rfc:`1939`. The
+:class:`POP3` class supports both the minimal and optional command sets from
+:rfc:`1939`. The :class:`POP3` class also supports the `STLS` command introduced
+in :rfc:`2595` to enable encrypted communication on an already established connection.
+
Additionally, this module provides a class :class:`POP3_SSL`, which provides
support for connecting to POP3 servers that use SSL as an underlying protocol
layer.
@@ -97,6 +100,14 @@ An :class:`POP3` instance has the following methods:
Returns the greeting string sent by the POP3 server.
+.. method:: POP3.capa()
+
+ Query the server's capabilities as specified in :rfc:`2449`.
+ Returns a dictionary in the form ``{'name': ['param'...]}``.
+
+ .. versionadded:: 3.4
+
+
.. method:: POP3.user(username)
Send user command, response should indicate that a password is required.
@@ -176,6 +187,18 @@ An :class:`POP3` instance has the following methods:
the unique id for that message in the form ``'response mesgnum uid``, otherwise
result is list ``(response, ['mesgnum uid', ...], octets)``.
+.. method:: POP3.stls(context=None)
+
+ Start a TLS session on the active connection as specified in :rfc:`2595`.
+ This is only allowed before user authentication
+
+ *context* parameter is a :class:`ssl.SSLContext` object which allows
+ bundling SSL configuration options, certificates and private keys into
+ a single (potentially long-lived) structure.
+
+ .. versionadded:: 3.4
+
+
Instances of :class:`POP3_SSL` have no additional methods. The interface of this
subclass is identical to its parent.
diff --git a/Doc/library/pty.rst b/Doc/library/pty.rst
index 2b9385b..90baec5 100644
--- a/Doc/library/pty.rst
+++ b/Doc/library/pty.rst
@@ -45,6 +45,9 @@ The :mod:`pty` module defines the following functions:
a file descriptor. The defaults try to read 1024 bytes each time they are
called.
+ .. versionchanged:: 3.4
+ :func:`spawn` now returns the status value from :func:`os.waitpid`
+ on the child process.
Example
-------
diff --git a/Doc/library/select.rst b/Doc/library/select.rst
index 4e60f4a..b73f11e 100644
--- a/Doc/library/select.rst
+++ b/Doc/library/select.rst
@@ -47,11 +47,14 @@ The module defines the following:
to :const:`EPOLL_CLOEXEC`, which causes the epoll descriptor to be closed
automatically when :func:`os.execve` is called. See section
:ref:`epoll-objects` below for the methods supported by epolling objects.
-
+ They also support the :keyword:`with` statement.
.. versionchanged:: 3.3
Added the *flags* parameter.
+ .. versionchanged:: 3.4
+ Support for the :keyword:`with` statement was added.
+
.. function:: poll()
diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst
index e962112..34d8a63 100644
--- a/Doc/library/shutil.rst
+++ b/Doc/library/shutil.rst
@@ -53,7 +53,7 @@ Directory and files operations
*dst* and return *dst*. *src* and *dst* are path names given as strings.
*dst* must be the complete target file name; look at :func:`shutil.copy`
for a copy that accepts a target directory path. If *src* and *dst*
- specify the same file, :exc:`Error` is raised.
+ specify the same file, :exc:`SameFileError` is raised.
The destination location must be writable; otherwise, an :exc:`OSError`
exception will be raised. If *dst* already exists, it will be replaced.
@@ -69,6 +69,19 @@ Directory and files operations
Added *follow_symlinks* argument.
Now returns *dst*.
+ .. versionchanged:: 3.4
+ Raise :exc:`SameFileError` instead of :exc:`Error`. Since the former is
+ a subclass of the latter, this change is backward compatible.
+
+
+.. exception:: SameFileError
+
+ This exception is raised if source and destination in :func:`copyfile`
+ are the same file.
+
+ .. versionadded:: 3.4
+
+
.. function:: copymode(src, dst, *, follow_symlinks=True)
Copy the permission bits from *src* to *dst*. The file contents, owner, and
@@ -380,11 +393,10 @@ provided by this module. ::
errors.extend(err.args[0])
try:
copystat(src, dst)
- except WindowsError:
- # can't copy file access times on Windows
- pass
except OSError as why:
- errors.extend((src, dst, str(why)))
+ # can't copy file access times on Windows
+ if why.winerror is None:
+ errors.extend((src, dst, str(why)))
if errors:
raise Error(errors)
diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst
index 93273c4..a906cde 100644
--- a/Doc/library/sys.rst
+++ b/Doc/library/sys.rst
@@ -393,6 +393,21 @@ always available.
.. versionadded:: 3.1
+.. function:: getallocatedblocks()
+
+ Return the number of memory blocks currently allocated by the interpreter,
+ regardless of their size. This function is mainly useful for tracking
+ and debugging memory leaks. Because of the interpreter's internal
+ caches, the result can vary from call to call; you may have to call
+ :func:`_clear_type_cache()` and :func:`gc.collect()` to get more
+ predictable results.
+
+ If a Python build or implementation cannot reasonably compute this
+ information, :func:`getallocatedblocks()` is allowed to return 0 instead.
+
+ .. versionadded:: 3.4
+
+
.. function:: getcheckinterval()
Return the interpreter's "check interval"; see :func:`setcheckinterval`.
@@ -839,8 +854,6 @@ always available.
Windows ``'win32'``
Windows/Cygwin ``'cygwin'``
Mac OS X ``'darwin'``
- OS/2 ``'os2'``
- OS/2 EMX ``'os2emx'``
================ ===========================
.. versionchanged:: 3.3
@@ -1119,7 +1132,6 @@ always available.
| :const:`name` | Name of the thread implementation: |
| | |
| | * ``'nt'``: Windows threads |
- | | * ``'os2'``: OS/2 threads |
| | * ``'pthread'``: POSIX threads |
| | * ``'solaris'``: Solaris threads |
+------------------+---------------------------------------------------------+
diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst
index c47dcce..535ac54 100644
--- a/Doc/library/sysconfig.rst
+++ b/Doc/library/sysconfig.rst
@@ -83,8 +83,6 @@ Python currently supports seven schemes:
located under the user home directory.
- *nt*: scheme for NT platforms like Windows.
- *nt_user*: scheme for NT platforms, when the *user* option is used.
-- *os2*: scheme for OS/2 platforms.
-- *os2_home*: scheme for OS/2 patforms, when the *user* option is used.
Each scheme is itself composed of a series of paths and each path has a unique
identifier. Python currently uses eight paths:
diff --git a/Doc/library/timeit.rst b/Doc/library/timeit.rst
index a487917..0cc1586 100644
--- a/Doc/library/timeit.rst
+++ b/Doc/library/timeit.rst
@@ -151,7 +151,7 @@ The module defines three convenience functions and a public class:
t = Timer(...) # outside the try/except
try:
t.timeit(...) # or t.repeat(...)
- except:
+ except Exception:
t.print_exc()
The advantage over the standard traceback is that source lines in the
diff --git a/Doc/library/tkinter.rst b/Doc/library/tkinter.rst
index 83a5375..377694f 100644
--- a/Doc/library/tkinter.rst
+++ b/Doc/library/tkinter.rst
@@ -750,32 +750,6 @@ Entry widget indexes (index, view index, etc.)
displayed. You can use these :mod:`tkinter` functions to access these special
points in text widgets:
-.. function:: AtEnd()
- refers to the last position in the text
-
- .. deprecated:: 3.3
-
-.. function:: AtInsert()
- refers to the point where the text cursor is
-
- .. deprecated:: 3.3
-
-.. function:: AtSelFirst()
- indicates the beginning point of the selected text
-
- .. deprecated:: 3.3
-
-.. function:: AtSelLast()
- denotes the last point of the selected text and finally
-
- .. deprecated:: 3.3
-
-.. function:: At(x[, y])
- refers to the character at pixel location *x*, *y* (with *y* not used in the
- case of a text entry widget, which contains a single line of text).
-
- .. deprecated:: 3.3
-
Text widget indexes
The index notation for Text widgets is very rich and is best described in the Tk
man pages.
diff --git a/Doc/library/traceback.rst b/Doc/library/traceback.rst
index 32e5733..0533bea 100644
--- a/Doc/library/traceback.rst
+++ b/Doc/library/traceback.rst
@@ -146,7 +146,7 @@ module. ::
source = input(">>> ")
try:
exec(source, envdir)
- except:
+ except Exception:
print("Exception in user code:")
print("-"*60)
traceback.print_exc(file=sys.stdout)
diff --git a/Doc/library/unicodedata.rst b/Doc/library/unicodedata.rst
index ad70dd9..52acc18 100644
--- a/Doc/library/unicodedata.rst
+++ b/Doc/library/unicodedata.rst
@@ -15,8 +15,8 @@
This module provides access to the Unicode Character Database (UCD) which
defines character properties for all Unicode characters. The data contained in
-this database is compiled from the `UCD version 6.1.0
-<http://www.unicode.org/Public/6.1.0/ucd>`_.
+this database is compiled from the `UCD version 6.2.0
+<http://www.unicode.org/Public/6.2.0/ucd>`_.
The module uses the same names and symbols as defined by Unicode
Standard Annex #44, `"Unicode Character Database"
diff --git a/Doc/library/urllib.error.rst b/Doc/library/urllib.error.rst
index 9c3fe91..7bd04b1 100644
--- a/Doc/library/urllib.error.rst
+++ b/Doc/library/urllib.error.rst
@@ -45,6 +45,13 @@ The following exceptions are raised by :mod:`urllib.error` as appropriate:
This is usually a string explaining the reason for this error.
+ .. attribute:: headers
+
+ The HTTP response headers for the HTTP request that cause the
+ :exc:`HTTPError`.
+
+ .. versionadded:: 3.4
+
.. exception:: ContentTooShortError(msg, content)
This exception is raised when the :func:`urlretrieve` function detects that
diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst
index 21255e5..d5f3f2c 100644
--- a/Doc/library/urllib.request.rst
+++ b/Doc/library/urllib.request.rst
@@ -121,7 +121,7 @@ The :mod:`urllib.request` module defines the following functions:
instances of them or subclasses of them: :class:`ProxyHandler`,
:class:`UnknownHandler`, :class:`HTTPHandler`, :class:`HTTPDefaultErrorHandler`,
:class:`HTTPRedirectHandler`, :class:`FTPHandler`, :class:`FileHandler`,
- :class:`HTTPErrorProcessor`.
+ :class:`HTTPErrorProcessor`, :class:`DataHandler`.
If the Python installation has SSL support (i.e., if the :mod:`ssl` module
can be imported), :class:`HTTPSHandler` will also be added.
@@ -346,6 +346,11 @@ The following classes are provided:
Open local files.
+.. class:: DataHandler()
+
+ Open data URLs.
+
+ .. versionadded:: 3.4
.. class:: FTPHandler()
@@ -403,6 +408,10 @@ request.
The entity body for the request, or None if not specified.
+ .. versionchanged:: 3.4
+ Changing value of :attr:`Request.data` now deletes "Content-Length"
+ header if it was previously set or calculated.
+
.. attribute:: Request.unverifiable
boolean, indicates whether the request is unverifiable as defined
@@ -451,6 +460,14 @@ request.
unredirected).
+.. method:: Request.remove_header(header)
+
+ Remove named header from the request instance (both from regular and
+ unredirected headers).
+
+ .. versionadded:: 3.4
+
+
.. method:: Request.get_full_url()
Return the URL given in the constructor.
@@ -972,6 +989,21 @@ FileHandler Objects
hostname is given, an :exc:`URLError` is raised.
+.. _data-handler-objects:
+
+DataHandler Objects
+-------------------
+
+.. method:: DataHandler.data_open(req)
+
+ Read a data URL. This kind of URL contains the content encoded in the URL
+ itself. The data URL syntax is specified in :rfc:`2397`. This implementation
+ ignores white spaces in base64 encoded data URLs so the URL may be wrapped
+ in whatever source file it comes from. But even though some browsers don't
+ mind about a missing padding at the end of a base64 encoded data URL, this
+ implementation will raise an :exc:`ValueError` in that case.
+
+
.. _ftp-handler-objects:
FTPHandler Objects
@@ -1374,7 +1406,9 @@ some point in the future.
pair: FTP; protocol
* Currently, only the following protocols are supported: HTTP (versions 0.9 and
- 1.0), FTP, and local files.
+ 1.0), FTP, local files, and data URLs.
+
+ .. versionchanged:: 3.4 Added support for data URLs.
* The caching feature of :func:`urlretrieve` has been disabled until someone
finds the time to hack proper processing of Expiration time headers.
diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst
index 2499962..b2ecd93 100644
--- a/Doc/library/venv.rst
+++ b/Doc/library/venv.rst
@@ -75,8 +75,8 @@ creation according to their needs, the :class:`EnvBuilder` class.
* ``system_site_packages`` -- a Boolean value indicating that the system Python
site-packages should be available to the environment (defaults to ``False``).
- * ``clear`` -- a Boolean value which, if True, will delete any existing target
- directory instead of raising an exception (defaults to ``False``).
+ * ``clear`` -- a Boolean value which, if True, will delete the contents of
+ any existing target directory, before creating the environment.
* ``symlinks`` -- a Boolean value indicating whether to attempt to symlink the
Python binary (and any necessary DLLs or other binaries,
@@ -88,7 +88,6 @@ creation according to their needs, the :class:`EnvBuilder` class.
upgraded in-place (defaults to ``False``).
-
Creators of third-party virtual environment tools will be free to use the
provided ``EnvBuilder`` class as a base class.
diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst
index 224f442..1bf6b58 100644
--- a/Doc/library/weakref.rst
+++ b/Doc/library/weakref.rst
@@ -192,6 +192,35 @@ These method have the same issues as the and :meth:`keyrefs` method of
discarded when no strong reference to it exists any more.
+.. class:: WeakMethod(method)
+
+ A custom :class:`ref` subclass which simulates a weak reference to a bound
+ method (i.e., a method defined on a class and looked up on an instance).
+ Since a bound method is ephemeral, a standard weak reference cannot keep
+ hold of it. :class:`WeakMethod` has special code to recreate the bound
+ method until either the object or the original function dies::
+
+ >>> class C:
+ ... def method(self):
+ ... print("method called!")
+ ...
+ >>> c = C()
+ >>> r = weakref.ref(c.method)
+ >>> r()
+ >>> r = weakref.WeakMethod(c.method)
+ >>> r()
+ <bound method C.method of <__main__.C object at 0x7fc859830220>>
+ >>> r()()
+ method called!
+ >>> del c
+ >>> gc.collect()
+ 0
+ >>> r()
+ >>>
+
+ .. versionadded:: 3.4
+
+
.. data:: ReferenceType
The type object for weak references objects.
diff --git a/Doc/license.rst b/Doc/license.rst
index 6326ce4..e56ca5b 100644
--- a/Doc/license.rst
+++ b/Doc/license.rst
@@ -122,6 +122,8 @@ been GPL-compatible; the table below summarizes the various releases.
+----------------+--------------+------------+------------+-----------------+
| 3.3.0 | 3.2 | 2012 | PSF | yes |
+----------------+--------------+------------+------------+-----------------+
+| 3.4.0 | 3.3.0 | 2014 | PSF | yes |
++----------------+--------------+------------+------------+-----------------+
.. note::
@@ -656,6 +658,25 @@ The :mod:`select` and contains the following notice for the kqueue interface::
SUCH DAMAGE.
+SHA-3
+-----
+
+The module :mod:`_sha3` and :mod:`hashlib` are using the reference
+implementation of Keccak. The files at :file:`Modules/_sha3/keccak/` contain
+the following note::
+
+ The Keccak sponge function, designed by Guido Bertoni, Joan Daemen,
+ Michaël Peeters and Gilles Van Assche. For more information, feedback or
+ questions, please refer to our website: http://keccak.noekeon.org/
+
+ Implementation by the designers,
+ hereby denoted as "the implementer".
+
+ To the extent possible under law, the implementer has waived all copyright
+ and related or neighboring rights to the source code in this file.
+ http://creativecommons.org/publicdomain/zero/1.0/
+
+
strtod and dtoa
---------------
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index 7c51068..9a39eb4 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -1815,6 +1815,15 @@ through the container; for mappings, :meth:`__iter__` should be the same as
considered to be false in a Boolean context.
+.. method:: object.__length_hint__(self)
+
+ Called to implement :func:`operator.length_hint`. Should return an estimated
+ length for the object (which may be greater or less than the actual length).
+ The length must be an integer ``>=`` 0. This method is purely an
+ optimization and is never required for correctness.
+
+ .. versionadded:: 3.4
+
.. note::
Slicing is done exclusively with the following three methods. A call like ::
diff --git a/Doc/tools/sphinxext/indexsidebar.html b/Doc/tools/sphinxext/indexsidebar.html
index a0ec32f..ed5da0c 100644
--- a/Doc/tools/sphinxext/indexsidebar.html
+++ b/Doc/tools/sphinxext/indexsidebar.html
@@ -3,7 +3,7 @@
<h3>Docs for other versions</h3>
<ul>
<li><a href="http://docs.python.org/2.7/">Python 2.7 (stable)</a></li>
- <li><a href="http://docs.python.org/3.4/">Python 3.4 (in development)</a></li>
+ <li><a href="http://docs.python.org/3.3/">Python 3.3 (stable)</a></li>
<li><a href="http://www.python.org/doc/versions/">Old versions</a></li>
</ul>
diff --git a/Doc/tools/sphinxext/pyspecific.py b/Doc/tools/sphinxext/pyspecific.py
index e8eb703..df6e48a 100644
--- a/Doc/tools/sphinxext/pyspecific.py
+++ b/Doc/tools/sphinxext/pyspecific.py
@@ -10,7 +10,7 @@
"""
ISSUE_URI = 'http://bugs.python.org/issue%s'
-SOURCE_URI = 'http://hg.python.org/cpython/file/3.3/%s'
+SOURCE_URI = 'http://hg.python.org/cpython/file/default/%s'
from docutils import nodes, utils
from sphinx.util.nodes import split_explicit_title
diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst
index cdc2bf2..c182511 100644
--- a/Doc/tutorial/interpreter.rst
+++ b/Doc/tutorial/interpreter.rst
@@ -10,13 +10,13 @@ Using the Python Interpreter
Invoking the Interpreter
========================
-The Python interpreter is usually installed as :file:`/usr/local/bin/python3.3`
+The Python interpreter is usually installed as :file:`/usr/local/bin/python3.4`
on those machines where it is available; putting :file:`/usr/local/bin` in your
Unix shell's search path makes it possible to start it by typing the command:
.. code-block:: text
- python3.3
+ python3.4
to the shell. [#]_ Since the choice of the directory where the interpreter lives
is an installation option, other places are possible; check with your local
@@ -24,11 +24,11 @@ Python guru or system administrator. (E.g., :file:`/usr/local/python` is a
popular alternative location.)
On Windows machines, the Python installation is usually placed in
-:file:`C:\\Python33`, though you can change this when you're running the
+:file:`C:\\Python34`, though you can change this when you're running the
installer. To add this directory to your path, you can type the following
command into the command prompt in a DOS box::
- set path=%path%;C:\python33
+ set path=%path%;C:\python34
Typing an end-of-file character (:kbd:`Control-D` on Unix, :kbd:`Control-Z` on
Windows) at the primary prompt causes the interpreter to exit with a zero exit
@@ -95,8 +95,8 @@ with the *secondary prompt*, by default three dots (``...``). The interpreter
prints a welcome message stating its version number and a copyright notice
before printing the first prompt::
- $ python3.3
- Python 3.3 (default, Sep 24 2012, 09:25:04)
+ $ python3.4
+ Python 3.4 (default, Sep 24 2012, 09:25:04)
[GCC 4.6.3] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>>
@@ -149,7 +149,7 @@ Executable Python Scripts
On BSD'ish Unix systems, Python scripts can be made directly executable, like
shell scripts, by putting the line ::
- #! /usr/bin/env python3.3
+ #! /usr/bin/env python3.4
(assuming that the interpreter is on the user's :envvar:`PATH`) at the beginning
of the script and giving the file an executable mode. The ``#!`` must be the
diff --git a/Doc/tutorial/modules.rst b/Doc/tutorial/modules.rst
index c4d86ac..3a3283d 100644
--- a/Doc/tutorial/modules.rst
+++ b/Doc/tutorial/modules.rst
@@ -289,24 +289,23 @@ defines. It returns a sorted list of strings::
>>> dir(fibo)
['__name__', 'fib', 'fib2']
>>> dir(sys) # doctest: +NORMALIZE_WHITESPACE
- ['__displayhook__', '__doc__', '__egginsert', '__excepthook__',
- '__loader__', '__name__', '__package__', '__plen', '__stderr__',
- '__stdin__', '__stdout__', '_clear_type_cache', '_current_frames',
- '_debugmallocstats', '_getframe', '_home', '_mercurial', '_xoptions',
- 'abiflags', 'api_version', 'argv', 'base_exec_prefix', 'base_prefix',
- 'builtin_module_names', 'byteorder', 'call_tracing', 'callstats',
- 'copyright', 'displayhook', 'dont_write_bytecode', 'exc_info',
- 'excepthook', 'exec_prefix', 'executable', 'exit', 'flags', 'float_info',
- 'float_repr_style', 'getcheckinterval', 'getdefaultencoding',
- 'getdlopenflags', 'getfilesystemencoding', 'getobjects', 'getprofile',
- 'getrecursionlimit', 'getrefcount', 'getsizeof', 'getswitchinterval',
- 'gettotalrefcount', 'gettrace', 'hash_info', 'hexversion',
- 'implementation', 'int_info', 'intern', 'maxsize', 'maxunicode',
- 'meta_path', 'modules', 'path', 'path_hooks', 'path_importer_cache',
- 'platform', 'prefix', 'ps1', 'setcheckinterval', 'setdlopenflags',
- 'setprofile', 'setrecursionlimit', 'setswitchinterval', 'settrace',
- 'stderr', 'stdin', 'stdout', 'thread_info', 'version', 'version_info',
- 'warnoptions']
+ ['__displayhook__', '__doc__', '__excepthook__', '__loader__', '__name__',
+ '__package__', '__stderr__', '__stdin__', '__stdout__',
+ '_clear_type_cache', '_current_frames', '_debugmallocstats', '_getframe',
+ '_home', '_mercurial', '_xoptions', 'abiflags', 'api_version', 'argv',
+ 'base_exec_prefix', 'base_prefix', 'builtin_module_names', 'byteorder',
+ 'call_tracing', 'callstats', 'copyright', 'displayhook',
+ 'dont_write_bytecode', 'exc_info', 'excepthook', 'exec_prefix',
+ 'executable', 'exit', 'flags', 'float_info', 'float_repr_style',
+ 'getcheckinterval', 'getdefaultencoding', 'getdlopenflags',
+ 'getfilesystemencoding', 'getobjects', 'getprofile', 'getrecursionlimit',
+ 'getrefcount', 'getsizeof', 'getswitchinterval', 'gettotalrefcount',
+ 'gettrace', 'hash_info', 'hexversion', 'implementation', 'int_info',
+ 'intern', 'maxsize', 'maxunicode', 'meta_path', 'modules', 'path',
+ 'path_hooks', 'path_importer_cache', 'platform', 'prefix', 'ps1',
+ 'setcheckinterval', 'setdlopenflags', 'setprofile', 'setrecursionlimit',
+ 'setswitchinterval', 'settrace', 'stderr', 'stdin', 'stdout',
+ 'thread_info', 'version', 'version_info', 'warnoptions']
Without arguments, :func:`dir` lists the names you have defined currently::
diff --git a/Doc/tutorial/stdlib.rst b/Doc/tutorial/stdlib.rst
index 1ebf792..cb892fd 100644
--- a/Doc/tutorial/stdlib.rst
+++ b/Doc/tutorial/stdlib.rst
@@ -15,7 +15,7 @@ operating system::
>>> import os
>>> os.getcwd() # Return the current working directory
- 'C:\\Python33'
+ 'C:\\Python34'
>>> os.chdir('/server/accesslogs') # Change current working directory
>>> os.system('mkdir today') # Run the command mkdir in the system shell
0
diff --git a/Doc/tutorial/stdlib2.rst b/Doc/tutorial/stdlib2.rst
index 4b6e036..3b9122f 100644
--- a/Doc/tutorial/stdlib2.rst
+++ b/Doc/tutorial/stdlib2.rst
@@ -275,7 +275,7 @@ applications include caching objects that are expensive to create::
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
d['primary'] # entry was automatically removed
- File "C:/python33/lib/weakref.py", line 46, in __getitem__
+ File "C:/python34/lib/weakref.py", line 46, in __getitem__
o = self.data[key]()
KeyError: 'primary'
diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc
index 5fdbc9b..706ac5d 100644
--- a/Doc/using/venv-create.inc
+++ b/Doc/using/venv-create.inc
@@ -56,20 +56,21 @@ virtualenv will be created, according to the given options, at each
provided path.
Once a venv has been created, it can be "activated" using a script in the
-venv's binary directory. The invocation of the script is platform-specific: on
-a Posix platform, you would typically do::
-
- $ source <venv>/bin/activate
-
-whereas on Windows, you might do::
-
- C:\> <venv>/Scripts/activate
-
-if you are using the ``cmd.exe`` shell, or perhaps::
-
- PS C:\> <venv>/Scripts/Activate.ps1
-
-if you use PowerShell.
+venv's binary directory. The invocation of the script is platform-specific:
+
++-------------+-----------------+-----------------------------------------+
+| Platform | Shell | Command to activate virtual environment |
++=============+=================+=========================================+
+| Posix | bash/zsh | $ source <venv>/bin/activate |
++-------------+-----------------+-----------------------------------------+
+| | fish | $ . <venv>/bin/activate.fish |
++-------------+-----------------+-----------------------------------------+
+| | csh/tcsh | $ source <venv>/bin/activate.csh |
++-------------+-----------------+-----------------------------------------+
+| Windows | cmd.exe | C:\> <venv>/Scripts/activate.bat |
++-------------+-----------------+-----------------------------------------+
+| | PowerShell | PS C:\> <venv>/Scripts/Activate.ps1 |
++-------------+-----------------+-----------------------------------------+
You don't specifically *need* to activate an environment; activation just
prepends the venv's binary directory to your path, so that "python" invokes the
diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst
new file mode 100644
index 0000000..ab0b163
--- /dev/null
+++ b/Doc/whatsnew/3.4.rst
@@ -0,0 +1,209 @@
+****************************
+ What's New In Python 3.4
+****************************
+
+.. :Author: Someone <email>
+ (uncomment if there is a principal author)
+
+.. Rules for maintenance:
+
+ * Anyone can add text to this document, but the maintainer reserves the
+ right to rewrite any additions. In particular, for obscure or esoteric
+ features, the maintainer may reduce any addition to a simple reference to
+ the new documentation rather than explaining the feature inline.
+
+ * While the maintainer will periodically go through Misc/NEWS
+ and add changes, it's best not to rely on this. We know from experience
+ that any changes that aren't in the What's New documentation around the
+ time of the original release will remain largely unknown to the community
+ for years, even if they're added later. We also know from experience that
+ other priorities can arise, and the maintainer will run out of time to do
+ updates - in such cases, end users will be much better served by partial
+ notifications that at least give a hint about new features to
+ investigate.
+
+ * This is not a complete list of every single change; completeness
+ is the purpose of Misc/NEWS. The What's New should focus on changes that
+ are visible to Python *users* and that *require* a feature release (i.e.
+ most bug fixes should only be recorded in Misc/NEWS)
+
+ * PEPs should not be marked Final until they have an entry in What's New.
+ A placeholder entry that is just a section header and a link to the PEP
+ (e.g ":pep:`397` has been implemented") is acceptable. If a PEP has been
+ implemented and noted in What's New, don't forget to mark it as Final!
+
+ * If you want to draw your new text to the attention of the
+ maintainer, add 'XXX' to the beginning of the paragraph or
+ section.
+
+ * It's OK to add just a very brief note about a change. For
+ example: "The :ref:`~socket.transmogrify()` function was added to the
+ :mod:`socket` module." The maintainer will research the change and
+ write the necessary text (if appropriate). The advantage of doing this
+ is that even if no more descriptive text is ever added, readers will at
+ least have a notification that the new feature exists and a link to the
+ relevant documentation.
+
+ * You can comment out your additions if you like, but it's not
+ necessary (especially when a final release is some months away).
+
+ * Credit the author of a patch or bugfix. Just the name is
+ sufficient; the e-mail address isn't necessary.
+
+ * It's helpful to add the bug/patch number as a comment:
+
+ The :ref:`~socket.transmogrify()` function was added to the
+ :mod:`socket` module. (Contributed by P.Y. Developer in :issue:`12345`.)
+
+ This saves the maintainer the effort of going through the Mercurial log
+ when researching a change.
+
+ * Cross referencing tip: :ref:`mod.attr` will display as ``mod.attr``,
+ while :ref:`~mod.attr` will display as ``attr``.
+
+This article explains the new features in Python 3.4, compared to 3.3.
+
+.. Python 3.4 was released on TBD.
+
+For full details, see the
+`changelog <http://docs.python.org/3.4/whatsnew/changelog.html>`_.
+
+.. note:: Prerelease users should be aware that this document is currently in
+ draft form. It will be updated substantially as Python 3.4 moves towards
+ release, so it's worth checking back even after reading earlier versions.
+
+
+.. seealso::
+
+ .. :pep:`4XX` - Python 3.4 Release Schedule
+
+
+Summary -- Release highlights
+=============================
+
+.. This section singles out the most important changes in Python 3.3.
+ Brevity is key.
+
+New syntax features:
+
+* None yet.
+
+New library modules:
+
+* None yet.
+
+New built-in features:
+
+* None yet.
+
+Implementation improvements:
+
+* None yet.
+
+Significantly Improved Library Modules:
+
+* SHA-3 (Keccak) support for :mod:`hashlib`.
+
+Security improvements:
+
+* None yet.
+
+Please read on for a comprehensive list of user-facing changes.
+
+
+.. PEP-sized items next.
+
+.. _pep-4XX:
+
+.. PEP 4XX: Example PEP
+.. ====================
+
+
+.. (Implemented by Foo Bar.)
+
+.. .. seealso::
+
+ :pep:`4XX` - Example PEP
+ PEP written by Example Author
+
+
+
+
+Other Language Changes
+======================
+
+Some smaller changes made to the core Python language are:
+
+* Unicode database updated to UCD version 6.2.
+
+
+
+New Modules
+===========
+
+.. module name
+.. -----------
+
+* None yet.
+
+
+Improved Modules
+================
+
+doctest
+-------
+
+Added ``FAIL_FAST`` flag to halt test running as soon as the first failure is
+detected. (Contributed by R. David Murray and Daniel Urban in :issue:`16522`.)
+
+
+Optimizations
+=============
+
+Major performance enhancements have been added:
+
+* The UTF-32 decoder is now 3x to 4x faster.
+
+
+Build and C API Changes
+=======================
+
+Changes to Python's build process and to the C API include:
+
+* None yet.
+
+
+Deprecated
+==========
+
+Unsupported Operating Systems
+-----------------------------
+
+* None yet.
+
+
+Deprecated Python modules, functions and methods
+------------------------------------------------
+
+* None yet.
+
+
+Deprecated functions and types of the C API
+-------------------------------------------
+
+* None yet.
+
+
+Deprecated features
+-------------------
+
+* None yet.
+
+
+Porting to Python 3.4
+=====================
+
+This section lists previously described changes and other bugfixes
+that may require changes to your code.
+
+* Nothing yet.
diff --git a/Doc/whatsnew/index.rst b/Doc/whatsnew/index.rst
index bc1206b..29902e4 100644
--- a/Doc/whatsnew/index.rst
+++ b/Doc/whatsnew/index.rst
@@ -11,6 +11,7 @@ anyone wishing to stay up-to-date after a new release.
.. toctree::
:maxdepth: 2
+ 3.4.rst
3.3.rst
3.2.rst
3.1.rst
diff --git a/Include/Python-ast.h b/Include/Python-ast.h
index 7ad6cb3..9d88d5d 100644
--- a/Include/Python-ast.h
+++ b/Include/Python-ast.h
@@ -182,8 +182,9 @@ enum _expr_kind {BoolOp_kind=1, BinOp_kind=2, UnaryOp_kind=3, Lambda_kind=4,
SetComp_kind=9, DictComp_kind=10, GeneratorExp_kind=11,
Yield_kind=12, YieldFrom_kind=13, Compare_kind=14,
Call_kind=15, Num_kind=16, Str_kind=17, Bytes_kind=18,
- Ellipsis_kind=19, Attribute_kind=20, Subscript_kind=21,
- Starred_kind=22, Name_kind=23, List_kind=24, Tuple_kind=25};
+ NameConstant_kind=19, Ellipsis_kind=20, Attribute_kind=21,
+ Subscript_kind=22, Starred_kind=23, Name_kind=24,
+ List_kind=25, Tuple_kind=26};
struct _expr {
enum _expr_kind kind;
union {
@@ -279,6 +280,10 @@ struct _expr {
} Bytes;
struct {
+ singleton value;
+ } NameConstant;
+
+ struct {
expr_ty value;
identifier attr;
expr_context_ty ctx;
@@ -509,6 +514,9 @@ expr_ty _Py_Num(object n, int lineno, int col_offset, PyArena *arena);
expr_ty _Py_Str(string s, int lineno, int col_offset, PyArena *arena);
#define Bytes(a0, a1, a2, a3) _Py_Bytes(a0, a1, a2, a3)
expr_ty _Py_Bytes(bytes s, int lineno, int col_offset, PyArena *arena);
+#define NameConstant(a0, a1, a2, a3) _Py_NameConstant(a0, a1, a2, a3)
+expr_ty _Py_NameConstant(singleton value, int lineno, int col_offset, PyArena
+ *arena);
#define Ellipsis(a0, a1, a2) _Py_Ellipsis(a0, a1, a2)
expr_ty _Py_Ellipsis(int lineno, int col_offset, PyArena *arena);
#define Attribute(a0, a1, a2, a3, a4, a5) _Py_Attribute(a0, a1, a2, a3, a4, a5)
diff --git a/Include/abstract.h b/Include/abstract.h
index 44b5af7..e2b0750 100644
--- a/Include/abstract.h
+++ b/Include/abstract.h
@@ -404,8 +404,9 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
#define PyObject_Length PyObject_Size
#ifndef Py_LIMITED_API
- PyAPI_FUNC(Py_ssize_t) _PyObject_LengthHint(PyObject *o, Py_ssize_t);
+ PyAPI_FUNC(int) _PyObject_HasLen(PyObject *o);
#endif
+PyAPI_FUNC(Py_ssize_t) PyObject_LengthHint(PyObject *o, Py_ssize_t);
/*
Guess the size of object o using len(o) or o.__length_hint__().
diff --git a/Include/asdl.h b/Include/asdl.h
index 6bf618f..0789ad8 100644
--- a/Include/asdl.h
+++ b/Include/asdl.h
@@ -5,6 +5,7 @@ typedef PyObject * identifier;
typedef PyObject * string;
typedef PyObject * bytes;
typedef PyObject * object;
+typedef PyObject * singleton;
/* It would be nice if the code generated by asdl_c.py was completely
independent of Python, but it is a goal the requires too much work
diff --git a/Include/grammar.h b/Include/grammar.h
index 8426da3..862f6a8 100644
--- a/Include/grammar.h
+++ b/Include/grammar.h
@@ -76,7 +76,7 @@ dfa *PyGrammar_FindDFA(grammar *g, int type);
int addlabel(labellist *ll, int type, char *str);
int findlabel(labellist *ll, int type, char *str);
-char *PyGrammar_LabelRepr(label *lb);
+const char *PyGrammar_LabelRepr(label *lb);
void translatelabels(grammar *g);
void addfirstsets(grammar *g);
diff --git a/Include/methodobject.h b/Include/methodobject.h
index 3cc2ea9..28781d8 100644
--- a/Include/methodobject.h
+++ b/Include/methodobject.h
@@ -46,7 +46,7 @@ struct PyMethodDef {
};
typedef struct PyMethodDef PyMethodDef;
-#define PyCFunction_New(ML, SELF) PyCFunction_NewEx((ML), (SELF), NULL)
+PyAPI_FUNC(PyObject *) PyCFunction_New(PyMethodDef *, PyObject *);
PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *,
PyObject *);
diff --git a/Include/object.h b/Include/object.h
index 387cadb..ad58450 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -362,7 +362,7 @@ typedef struct _typeobject {
PyBufferProcs *tp_as_buffer;
/* Flags to define presence of optional/expanded features */
- long tp_flags;
+ unsigned long tp_flags;
const char *tp_doc; /* Documentation string */
@@ -428,7 +428,7 @@ typedef struct{
const char* name;
int basicsize;
int itemsize;
- int flags;
+ unsigned int flags;
PyType_Slot *slots; /* terminated by slot==0. */
} PyType_Spec;
@@ -470,7 +470,7 @@ PyAPI_DATA(PyTypeObject) PyType_Type; /* built-in 'type' */
PyAPI_DATA(PyTypeObject) PyBaseObject_Type; /* built-in 'object' */
PyAPI_DATA(PyTypeObject) PySuper_Type; /* built-in 'super' */
-PyAPI_FUNC(long) PyType_GetFlags(PyTypeObject*);
+PyAPI_FUNC(unsigned long) PyType_GetFlags(PyTypeObject*);
#define PyType_Check(op) \
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_TYPE_SUBCLASS)
@@ -603,44 +603,43 @@ given type object has a specified feature.
*/
/* Set if the type object is dynamically allocated */
-#define Py_TPFLAGS_HEAPTYPE (1L<<9)
+#define Py_TPFLAGS_HEAPTYPE (1UL << 9)
/* Set if the type allows subclassing */
-#define Py_TPFLAGS_BASETYPE (1L<<10)
+#define Py_TPFLAGS_BASETYPE (1UL << 10)
/* Set if the type is 'ready' -- fully initialized */
-#define Py_TPFLAGS_READY (1L<<12)
+#define Py_TPFLAGS_READY (1UL << 12)
/* Set while the type is being 'readied', to prevent recursive ready calls */
-#define Py_TPFLAGS_READYING (1L<<13)
+#define Py_TPFLAGS_READYING (1UL << 13)
/* Objects support garbage collection (see objimp.h) */
-#define Py_TPFLAGS_HAVE_GC (1L<<14)
+#define Py_TPFLAGS_HAVE_GC (1UL << 14)
/* These two bits are preserved for Stackless Python, next after this is 17 */
#ifdef STACKLESS
-#define Py_TPFLAGS_HAVE_STACKLESS_EXTENSION (3L<<15)
+#define Py_TPFLAGS_HAVE_STACKLESS_EXTENSION (3UL << 15)
#else
#define Py_TPFLAGS_HAVE_STACKLESS_EXTENSION 0
#endif
/* Objects support type attribute cache */
-#define Py_TPFLAGS_HAVE_VERSION_TAG (1L<<18)
-#define Py_TPFLAGS_VALID_VERSION_TAG (1L<<19)
+#define Py_TPFLAGS_HAVE_VERSION_TAG (1UL << 18)
+#define Py_TPFLAGS_VALID_VERSION_TAG (1UL << 19)
/* Type is abstract and cannot be instantiated */
-#define Py_TPFLAGS_IS_ABSTRACT (1L<<20)
+#define Py_TPFLAGS_IS_ABSTRACT (1UL << 20)
/* These flags are used to determine if a type is a subclass. */
-#define Py_TPFLAGS_INT_SUBCLASS (1L<<23)
-#define Py_TPFLAGS_LONG_SUBCLASS (1L<<24)
-#define Py_TPFLAGS_LIST_SUBCLASS (1L<<25)
-#define Py_TPFLAGS_TUPLE_SUBCLASS (1L<<26)
-#define Py_TPFLAGS_BYTES_SUBCLASS (1L<<27)
-#define Py_TPFLAGS_UNICODE_SUBCLASS (1L<<28)
-#define Py_TPFLAGS_DICT_SUBCLASS (1L<<29)
-#define Py_TPFLAGS_BASE_EXC_SUBCLASS (1L<<30)
-#define Py_TPFLAGS_TYPE_SUBCLASS (1L<<31)
+#define Py_TPFLAGS_LONG_SUBCLASS (1UL << 24)
+#define Py_TPFLAGS_LIST_SUBCLASS (1UL << 25)
+#define Py_TPFLAGS_TUPLE_SUBCLASS (1UL << 26)
+#define Py_TPFLAGS_BYTES_SUBCLASS (1UL << 27)
+#define Py_TPFLAGS_UNICODE_SUBCLASS (1UL << 28)
+#define Py_TPFLAGS_DICT_SUBCLASS (1UL << 29)
+#define Py_TPFLAGS_BASE_EXC_SUBCLASS (1UL << 30)
+#define Py_TPFLAGS_TYPE_SUBCLASS (1UL << 31)
#define Py_TPFLAGS_DEFAULT ( \
Py_TPFLAGS_HAVE_STACKLESS_EXTENSION | \
diff --git a/Include/objimpl.h b/Include/objimpl.h
index 3d5f509..b577be2 100644
--- a/Include/objimpl.h
+++ b/Include/objimpl.h
@@ -98,6 +98,8 @@ PyAPI_FUNC(void *) PyObject_Malloc(size_t);
PyAPI_FUNC(void *) PyObject_Realloc(void *, size_t);
PyAPI_FUNC(void) PyObject_Free(void *);
+/* This function returns the number of allocated memory blocks, regardless of size */
+PyAPI_FUNC(Py_ssize_t) _Py_GetAllocatedBlocks(void);
/* Macros */
#ifdef WITH_PYMALLOC
@@ -249,7 +251,7 @@ typedef union _gc_head {
union _gc_head *gc_prev;
Py_ssize_t gc_refs;
} gc;
- long double dummy; /* force worst-case alignment */
+ double dummy; /* force worst-case alignment */
} PyGC_Head;
extern PyGC_Head *_PyGC_generation0;
diff --git a/Include/osdefs.h b/Include/osdefs.h
index 05c0c8e..0c2e34b 100644
--- a/Include/osdefs.h
+++ b/Include/osdefs.h
@@ -9,16 +9,10 @@ extern "C" {
/* Mod by chrish: QNX has WATCOM, but isn't DOS */
#if !defined(__QNX__)
-#if defined(MS_WINDOWS) || defined(__BORLANDC__) || defined(__WATCOMC__) || defined(__DJGPP__) || defined(PYOS_OS2)
-#if defined(PYOS_OS2) && defined(PYCC_GCC)
-#define MAXPATHLEN 260
-#define SEP L'/'
-#define ALTSEP L'\\'
-#else
+#if defined(MS_WINDOWS) || defined(__BORLANDC__) || defined(__WATCOMC__) || defined(__DJGPP__)
#define SEP L'\\'
#define ALTSEP L'/'
#define MAXPATHLEN 256
-#endif
#define DELIM L';'
#endif
#endif
diff --git a/Include/patchlevel.h b/Include/patchlevel.h
index b5919a4..a759045 100644
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -17,13 +17,13 @@
/* Version parsed out into numeric values */
/*--start constants--*/
#define PY_MAJOR_VERSION 3
-#define PY_MINOR_VERSION 3
+#define PY_MINOR_VERSION 4
#define PY_MICRO_VERSION 0
-#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL
+#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA
#define PY_RELEASE_SERIAL 0
/* Version as a string */
-#define PY_VERSION "3.3.0+"
+#define PY_VERSION "3.4.0a0"
/*--end constants--*/
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
diff --git a/Include/pyport.h b/Include/pyport.h
index e4e3601..8d38ccb 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -392,9 +392,6 @@ typedef size_t Py_uhash_t;
#endif
#ifdef HAVE_SYS_STAT_H
-#if defined(PYOS_OS2) && defined(PYCC_GCC)
-#include <sys/types.h>
-#endif
#include <sys/stat.h>
#elif defined(HAVE_STAT_H)
#include <stat.h>
@@ -881,4 +878,18 @@ extern pid_t forkpty(int *, char *, struct termios *, struct winsize *);
#endif
#endif
+/*
+ * Convenient macros to deal with endianness of the platform. WORDS_BIGENDIAN is
+ * detected by configure and defined in pyconfig.h. The code in pyconfig.h
+ * also also takes care of Apple's universal builds.
+ */
+
+#ifdef WORDS_BIGENDIAN
+#define PY_BIG_ENDIAN 1
+#define PY_LITTLE_ENDIAN 0
+#else
+#define PY_BIG_ENDIAN 0
+#define PY_LITTLE_ENDIAN 1
+#endif
+
#endif /* Py_PYPORT_H */
diff --git a/Include/sliceobject.h b/Include/sliceobject.h
index 8bec179..f7ee90c 100644
--- a/Include/sliceobject.h
+++ b/Include/sliceobject.h
@@ -34,6 +34,9 @@ PyAPI_FUNC(PyObject *) PySlice_New(PyObject* start, PyObject* stop,
PyObject* step);
#ifndef Py_LIMITED_API
PyAPI_FUNC(PyObject *) _PySlice_FromIndices(Py_ssize_t start, Py_ssize_t stop);
+PyAPI_FUNC(int) _PySlice_GetLongIndices(PySliceObject *self, PyObject *length,
+ PyObject **start_ptr, PyObject **stop_ptr,
+ PyObject **step_ptr);
#endif
PyAPI_FUNC(int) PySlice_GetIndices(PyObject *r, Py_ssize_t length,
Py_ssize_t *start, Py_ssize_t *stop, Py_ssize_t *step);
diff --git a/Include/symtable.h b/Include/symtable.h
index 6ed3a2b..4d82f0c 100644
--- a/Include/symtable.h
+++ b/Include/symtable.h
@@ -41,6 +41,7 @@ typedef struct _symtable_entry {
PyObject *ste_name; /* string: name of current block */
PyObject *ste_varnames; /* list of function parameters */
PyObject *ste_children; /* list of child blocks */
+ PyObject *ste_directives;/* locations of global and nonlocal statements */
_Py_block_ty ste_type; /* module, class, or function */
int ste_unoptimized; /* false if namespace is optimized */
int ste_nested; /* true if block is nested */
diff --git a/Include/token.h b/Include/token.h
index f7f6504..905022b 100644
--- a/Include/token.h
+++ b/Include/token.h
@@ -75,7 +75,7 @@ extern "C" {
#define ISEOF(x) ((x) == ENDMARKER)
-PyAPI_DATA(char *) _PyParser_TokenNames[]; /* Token names */
+PyAPI_DATA(const char *) _PyParser_TokenNames[]; /* Token names */
PyAPI_FUNC(int) PyToken_OneChar(int);
PyAPI_FUNC(int) PyToken_TwoChars(int, int);
PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h
index a8f5b5d..a70585c 100644
--- a/Include/unicodeobject.h
+++ b/Include/unicodeobject.h
@@ -180,9 +180,9 @@ typedef unsigned char Py_UCS1;
} while (0)
/* macros to work with surrogates */
-#define Py_UNICODE_IS_SURROGATE(ch) (0xD800 <= ch && ch <= 0xDFFF)
-#define Py_UNICODE_IS_HIGH_SURROGATE(ch) (0xD800 <= ch && ch <= 0xDBFF)
-#define Py_UNICODE_IS_LOW_SURROGATE(ch) (0xDC00 <= ch && ch <= 0xDFFF)
+#define Py_UNICODE_IS_SURROGATE(ch) (0xD800 <= (ch) && (ch) <= 0xDFFF)
+#define Py_UNICODE_IS_HIGH_SURROGATE(ch) (0xD800 <= (ch) && (ch) <= 0xDBFF)
+#define Py_UNICODE_IS_LOW_SURROGATE(ch) (0xDC00 <= (ch) && (ch) <= 0xDFFF)
/* Join two surrogate characters and return a single Py_UCS4 value. */
#define Py_UNICODE_JOIN_SURROGATES(high, low) \
(((((Py_UCS4)(high) & 0x03FF) << 10) | \
@@ -933,12 +933,28 @@ PyAPI_FUNC(int)
_PyUnicodeWriter_PrepareInternal(_PyUnicodeWriter *writer,
Py_ssize_t length, Py_UCS4 maxchar);
+/* Append a Unicode string.
+ Return 0 on success, raise an exception and return -1 on error. */
PyAPI_FUNC(int)
-_PyUnicodeWriter_WriteStr(_PyUnicodeWriter *writer, PyObject *str);
+_PyUnicodeWriter_WriteStr(_PyUnicodeWriter *writer,
+ PyObject *str /* Unicode string */
+ );
+/* Append a latin1-encoded byte string.
+ Return 0 on success, raise an exception and return -1 on error. */
+PyAPI_FUNC(int)
+_PyUnicodeWriter_WriteCstr(_PyUnicodeWriter *writer,
+ const char *str, /* latin1-encoded byte string */
+ Py_ssize_t len /* length in bytes */
+ );
+
+/* Get the value of the write as an Unicode string. Clear the
+ buffer of the writer. Raise an exception and return NULL
+ on error. */
PyAPI_FUNC(PyObject *)
_PyUnicodeWriter_Finish(_PyUnicodeWriter *writer);
+/* Deallocate memory of a writer (clear its internal buffer). */
PyAPI_FUNC(void)
_PyUnicodeWriter_Dealloc(_PyUnicodeWriter *writer);
#endif
@@ -1726,7 +1742,7 @@ PyAPI_FUNC(PyObject*) PyUnicode_DecodeLocale(
/* Encode a Unicode object to the current locale encoding. The encoder is
strict is *surrogateescape* is equal to zero, otherwise the
"surrogateescape" error handler is used. Return a bytes object. The string
- cannot contain embedded null characters.. */
+ cannot contain embedded null characters. */
PyAPI_FUNC(PyObject*) PyUnicode_EncodeLocale(
PyObject *unicode,
@@ -1950,7 +1966,8 @@ PyAPI_FUNC(PyObject *) PyUnicode_Replace(
);
/* Compare two strings and return -1, 0, 1 for less than, equal,
- greater than resp. */
+ greater than resp.
+ Raise an exception and return -1 on error. */
PyAPI_FUNC(int) PyUnicode_Compare(
PyObject *left, /* Left string */
diff --git a/LICENSE b/LICENSE
index 88eed1f..d98353f 100644
--- a/LICENSE
+++ b/LICENSE
@@ -75,6 +75,7 @@ the various releases.
3.2.2 3.2.1 2011 PSF yes
3.2.3 3.2.2 2012 PSF yes
3.3.0 3.2 2012 PSF yes
+ 3.4.0 3.3.0 2014 PSF yes
Footnotes:
diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py
index b3aad56..1076778 100644
--- a/Lib/_osx_support.py
+++ b/Lib/_osx_support.py
@@ -38,7 +38,7 @@ def _find_executable(executable, path=None):
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
- if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+ if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
diff --git a/Lib/_pyio.py b/Lib/_pyio.py
index 9cbb364..583eb7f 100644
--- a/Lib/_pyio.py
+++ b/Lib/_pyio.py
@@ -200,7 +200,7 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
buffering = DEFAULT_BUFFER_SIZE
try:
bs = os.fstat(raw.fileno()).st_blksize
- except (os.error, AttributeError):
+ except (OSError, AttributeError):
pass
else:
if bs > 1:
diff --git a/Lib/abc.py b/Lib/abc.py
index 09778e8..e807895 100644
--- a/Lib/abc.py
+++ b/Lib/abc.py
@@ -226,3 +226,9 @@ class ABCMeta(type):
# No dice; update negative cache
cls._abc_negative_cache.add(subclass)
return False
+
+class ABC(metaclass=ABCMeta):
+ """Helper class that provides a standard way to create an ABC using
+ inheritance.
+ """
+ pass
diff --git a/Lib/argparse.py b/Lib/argparse.py
index f25b1b6..3b1a079 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -606,8 +606,7 @@ class HelpFormatter(object):
pass
else:
self._indent()
- for subaction in get_subactions():
- yield subaction
+ yield from get_subactions()
self._dedent()
def _split_lines(self, text, width):
@@ -1141,11 +1140,17 @@ class FileType(object):
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
+ - encoding -- The file's encoding. Accepts the same values as the
+ the builtin open() function.
+ - errors -- A string indicating how encoding and decoding errors are to
+ be handled. Accepts the same value as the builtin open() function.
"""
- def __init__(self, mode='r', bufsize=-1):
+ def __init__(self, mode='r', bufsize=-1, encoding=None, errors=None):
self._mode = mode
self._bufsize = bufsize
+ self._encoding = encoding
+ self._errors = errors
def __call__(self, string):
# the special argument "-" means sys.std{in,out}
@@ -1160,14 +1165,18 @@ class FileType(object):
# all other arguments are used as file names
try:
- return open(string, self._mode, self._bufsize)
+ return open(string, self._mode, self._bufsize, self._encoding,
+ self._errors)
except IOError as e:
message = _("can't open '%s': %s")
raise ArgumentTypeError(message % (string, e))
def __repr__(self):
args = self._mode, self._bufsize
- args_str = ', '.join(repr(arg) for arg in args if arg != -1)
+ kwargs = [('encoding', self._encoding), ('errors', self._errors)]
+ args_str = ', '.join([repr(arg) for arg in args if arg != -1] +
+ ['%s=%r' % (kw, arg) for kw, arg in kwargs
+ if arg is not None])
return '%s(%s)' % (type(self).__name__, args_str)
# ===========================
diff --git a/Lib/ast.py b/Lib/ast.py
index 13f59f9..02c3b28 100644
--- a/Lib/ast.py
+++ b/Lib/ast.py
@@ -42,7 +42,6 @@ def literal_eval(node_or_string):
Python literal structures: strings, bytes, numbers, tuples, lists, dicts,
sets, booleans, and None.
"""
- _safe_names = {'None': None, 'True': True, 'False': False}
if isinstance(node_or_string, str):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
@@ -61,9 +60,8 @@ def literal_eval(node_or_string):
elif isinstance(node, Dict):
return dict((_convert(k), _convert(v)) for k, v
in zip(node.keys, node.values))
- elif isinstance(node, Name):
- if node.id in _safe_names:
- return _safe_names[node.id]
+ elif isinstance(node, NameConstant):
+ return node.value
elif isinstance(node, UnaryOp) and \
isinstance(node.op, (UAdd, USub)) and \
isinstance(node.operand, (Num, UnaryOp, BinOp)):
diff --git a/Lib/asynchat.py b/Lib/asynchat.py
index 4e26bb5..bb636ab 100644
--- a/Lib/asynchat.py
+++ b/Lib/asynchat.py
@@ -114,7 +114,7 @@ class async_chat (asyncore.dispatcher):
try:
data = self.recv (self.ac_in_buffer_size)
- except socket.error as why:
+ except OSError as why:
self.handle_error()
return
@@ -243,7 +243,7 @@ class async_chat (asyncore.dispatcher):
# send the data
try:
num_sent = self.send(data)
- except socket.error:
+ except OSError:
self.handle_error()
return
diff --git a/Lib/asyncore.py b/Lib/asyncore.py
index 909d9f6..b15ddeb 100644
--- a/Lib/asyncore.py
+++ b/Lib/asyncore.py
@@ -112,7 +112,7 @@ def readwrite(obj, flags):
obj.handle_expt_event()
if flags & (select.POLLHUP | select.POLLERR | select.POLLNVAL):
obj.handle_close()
- except socket.error as e:
+ except OSError as e:
if e.args[0] not in _DISCONNECTED:
obj.handle_error()
else:
@@ -240,7 +240,7 @@ class dispatcher:
# passed be connected.
try:
self.addr = sock.getpeername()
- except socket.error as err:
+ except OSError as err:
if err.args[0] in (ENOTCONN, EINVAL):
# To handle the case where we got an unconnected
# socket.
@@ -304,7 +304,7 @@ class dispatcher:
self.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR) | 1
)
- except socket.error:
+ except OSError:
pass
# ==================================================
@@ -345,7 +345,7 @@ class dispatcher:
self.addr = address
self.handle_connect_event()
else:
- raise socket.error(err, errorcode[err])
+ raise OSError(err, errorcode[err])
def accept(self):
# XXX can return either an address pair or None
@@ -353,7 +353,7 @@ class dispatcher:
conn, addr = self.socket.accept()
except TypeError:
return None
- except socket.error as why:
+ except OSError as why:
if why.args[0] in (EWOULDBLOCK, ECONNABORTED, EAGAIN):
return None
else:
@@ -365,7 +365,7 @@ class dispatcher:
try:
result = self.socket.send(data)
return result
- except socket.error as why:
+ except OSError as why:
if why.args[0] == EWOULDBLOCK:
return 0
elif why.args[0] in _DISCONNECTED:
@@ -384,7 +384,7 @@ class dispatcher:
return b''
else:
return data
- except socket.error as why:
+ except OSError as why:
# winsock sometimes raises ENOTCONN
if why.args[0] in _DISCONNECTED:
self.handle_close()
@@ -399,7 +399,7 @@ class dispatcher:
self.del_channel()
try:
self.socket.close()
- except socket.error as why:
+ except OSError as why:
if why.args[0] not in (ENOTCONN, EBADF):
raise
@@ -443,7 +443,7 @@ class dispatcher:
def handle_connect_event(self):
err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if err != 0:
- raise socket.error(err, _strerror(err))
+ raise OSError(err, _strerror(err))
self.handle_connect()
self.connected = True
self.connecting = False
diff --git a/Lib/bz2.py b/Lib/bz2.py
index c307507..6e6a2b9 100644
--- a/Lib/bz2.py
+++ b/Lib/bz2.py
@@ -9,7 +9,6 @@ __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor",
__author__ = "Nadeem Vawda <nadeem.vawda@gmail.com>"
-import builtins
import io
import warnings
@@ -28,6 +27,8 @@ _MODE_WRITE = 3
_BUFFER_SIZE = 8192
+_builtin_open = open
+
class BZ2File(io.BufferedIOBase):
@@ -43,12 +44,13 @@ class BZ2File(io.BufferedIOBase):
def __init__(self, filename, mode="r", buffering=None, compresslevel=9):
"""Open a bzip2-compressed file.
- If filename is a str or bytes object, is gives the name of the file to
- be opened. Otherwise, it should be a file object, which will be used to
- read or write the compressed data.
+ If filename is a str or bytes object, it gives the name
+ of the file to be opened. Otherwise, it should be a file object,
+ which will be used to read or write the compressed data.
- mode can be 'r' for reading (default), 'w' for (over)writing, or 'a' for
- appending. These can equivalently be given as 'rb', 'wb', and 'ab'.
+ mode can be 'r' for reading (default), 'w' for (over)writing,
+ or 'a' for appending. These can equivalently be given as 'rb',
+ 'wb', and 'ab'.
buffering is ignored. Its use is deprecated.
@@ -90,10 +92,10 @@ class BZ2File(io.BufferedIOBase):
mode_code = _MODE_WRITE
self._compressor = BZ2Compressor(compresslevel)
else:
- raise ValueError("Invalid mode: {!r}".format(mode))
+ raise ValueError("Invalid mode: %r" % (mode,))
if isinstance(filename, (str, bytes)):
- self._fp = builtins.open(filename, mode)
+ self._fp = _builtin_open(filename, mode)
self._closefp = True
self._mode = mode_code
elif hasattr(filename, "read") or hasattr(filename, "write"):
@@ -189,15 +191,17 @@ class BZ2File(io.BufferedIOBase):
if not rawblock:
if self._decompressor.eof:
+ # End-of-stream marker and end of file. We're good.
self._mode = _MODE_READ_EOF
self._size = self._pos
return False
else:
+ # Problem - we were expecting more compressed data.
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
- # Continue to next stream.
if self._decompressor.eof:
+ # Continue to next stream.
self._decompressor = BZ2Decompressor()
self._buffer = self._decompressor.decompress(rawblock)
@@ -412,7 +416,7 @@ class BZ2File(io.BufferedIOBase):
self._read_all(return_data=False)
offset = self._size + offset
else:
- raise ValueError("Invalid value for whence: {}".format(whence))
+ raise ValueError("Invalid value for whence: %s" % (whence,))
# Make it so that offset is the number of bytes to skip forward.
if offset < self._pos:
@@ -436,20 +440,20 @@ def open(filename, mode="rb", compresslevel=9,
encoding=None, errors=None, newline=None):
"""Open a bzip2-compressed file in binary or text mode.
- The filename argument can be an actual filename (a str or bytes object), or
- an existing file object to read from or write to.
+ The filename argument can be an actual filename (a str or bytes
+ object), or an existing file object to read from or write to.
- The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for binary mode,
- or "rt", "wt" or "at" for text mode. The default mode is "rb", and the
- default compresslevel is 9.
+ The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for
+ binary mode, or "rt", "wt" or "at" for text mode. The default mode
+ is "rb", and the default compresslevel is 9.
- For binary mode, this function is equivalent to the BZ2File constructor:
- BZ2File(filename, mode, compresslevel). In this case, the encoding, errors
- and newline arguments must not be provided.
+ For binary mode, this function is equivalent to the BZ2File
+ constructor: BZ2File(filename, mode, compresslevel). In this case,
+ the encoding, errors and newline arguments must not be provided.
For text mode, a BZ2File object is created, and wrapped in an
- io.TextIOWrapper instance with the specified encoding, error handling
- behavior, and line ending(s).
+ io.TextIOWrapper instance with the specified encoding, error
+ handling behavior, and line ending(s).
"""
if "t" in mode:
diff --git a/Lib/cgi.py b/Lib/cgi.py
index e964f0c..8cc48bd 100755
--- a/Lib/cgi.py
+++ b/Lib/cgi.py
@@ -949,8 +949,8 @@ def print_directory():
print("<H3>Current Working Directory:</H3>")
try:
pwd = os.getcwd()
- except os.error as msg:
- print("os.error:", html.escape(str(msg)))
+ except OSError as msg:
+ print("OSError:", html.escape(str(msg)))
else:
print(html.escape(pwd))
print()
diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py
index e5f9599..53083e4 100644
--- a/Lib/collections/__init__.py
+++ b/Lib/collections/__init__.py
@@ -228,8 +228,7 @@ class OrderedDict(dict):
'''
if isinstance(other, OrderedDict):
- return len(self)==len(other) and \
- all(map(_eq, self.items(), other.items()))
+ return dict.__eq__(self, other) and all(map(_eq, self, other))
return dict.__eq__(self, other)
diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py
index c23b7dd..18c07bf 100644
--- a/Lib/collections/abc.py
+++ b/Lib/collections/abc.py
@@ -430,8 +430,7 @@ class KeysView(MappingView, Set):
return key in self._mapping
def __iter__(self):
- for key in self._mapping:
- yield key
+ yield from self._mapping
KeysView.register(dict_keys)
diff --git a/Lib/compileall.py b/Lib/compileall.py
index d3cff6a..a47e84f 100644
--- a/Lib/compileall.py
+++ b/Lib/compileall.py
@@ -38,7 +38,7 @@ def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None,
print('Listing {!r}...'.format(dir))
try:
names = os.listdir(dir)
- except os.error:
+ except OSError:
print("Can't list {!r}".format(dir))
names = []
names.sort()
@@ -209,7 +209,7 @@ def main():
with (sys.stdin if args.flist=='-' else open(args.flist)) as f:
for line in f:
compile_dests.append(line.strip())
- except EnvironmentError:
+ except OSError:
print("Error reading file list {}".format(args.flist))
return False
diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py
index 1e098be..883d993 100644
--- a/Lib/concurrent/futures/_base.py
+++ b/Lib/concurrent/futures/_base.py
@@ -198,8 +198,7 @@ def as_completed(fs, timeout=None):
waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
try:
- for future in finished:
- yield future
+ yield from finished
while pending:
if timeout is None:
diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py
index 04238a7..3c20b93 100644
--- a/Lib/concurrent/futures/process.py
+++ b/Lib/concurrent/futures/process.py
@@ -40,7 +40,7 @@ Local worker thread:
Process #1..n:
- reads _CallItems from "Call Q", executes the calls, and puts the resulting
- _ResultItems in "Request Q"
+ _ResultItems in "Result Q"
"""
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
@@ -240,6 +240,8 @@ def _queue_management_worker(executor_reference,
"terminated abruptly while the future was "
"running or pending."
))
+ # Delete references to object. See issue16284
+ del work_item
pending_work_items.clear()
# Terminate remaining workers forcibly: the queues or their
# locks may be in a dirty state and block forever.
@@ -264,6 +266,8 @@ def _queue_management_worker(executor_reference,
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
+ # Delete references to object. See issue16284
+ del work_item
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py
index 95bb682..f9beb0f 100644
--- a/Lib/concurrent/futures/thread.py
+++ b/Lib/concurrent/futures/thread.py
@@ -63,6 +63,8 @@ def _worker(executor_reference, work_queue):
work_item = work_queue.get(block=True)
if work_item is not None:
work_item.run()
+ # Delete references to object. See issue16284
+ del work_item
continue
executor = executor_reference()
# Exit if:
diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py
index c92e130..e2f75c5 100644
--- a/Lib/ctypes/__init__.py
+++ b/Lib/ctypes/__init__.py
@@ -395,7 +395,7 @@ if _os.name in ("nt", "ce"):
_type_ = "l"
# _check_retval_ is called with the function's result when it
# is used as restype. It checks for the FAILED bit, and
- # raises a WindowsError if it is set.
+ # raises an OSError if it is set.
#
# The _check_retval_ method is implemented in C, so that the
# method definition itself is not included in the traceback
@@ -407,7 +407,7 @@ if _os.name in ("nt", "ce"):
class OleDLL(CDLL):
"""This class represents a dll exporting functions using the
Windows stdcall calling convention, and returning HRESULT.
- HRESULT error values are automatically raised as WindowsError
+ HRESULT error values are automatically raised as OSError
exceptions.
"""
_func_flags_ = _FUNCFLAG_STDCALL
@@ -456,7 +456,7 @@ if _os.name in ("nt", "ce"):
code = GetLastError()
if descr is None:
descr = FormatError(code).strip()
- return WindowsError(None, descr, None, code)
+ return OSError(None, descr, None, code)
if sizeof(c_uint) == sizeof(c_void_p):
c_size_t = c_uint
diff --git a/Lib/ctypes/test/test_checkretval.py b/Lib/ctypes/test/test_checkretval.py
index 01ccc57..19bb813 100644
--- a/Lib/ctypes/test/test_checkretval.py
+++ b/Lib/ctypes/test/test_checkretval.py
@@ -31,7 +31,7 @@ class Test(unittest.TestCase):
pass
else:
def test_oledll(self):
- self.assertRaises(WindowsError,
+ self.assertRaises(OSError,
oledll.oleaut32.CreateTypeLib2,
0, None, None)
diff --git a/Lib/ctypes/test/test_win32.py b/Lib/ctypes/test/test_win32.py
index 128914e..4c85825 100644
--- a/Lib/ctypes/test/test_win32.py
+++ b/Lib/ctypes/test/test_win32.py
@@ -40,7 +40,7 @@ if sys.platform == "win32":
# Call functions with invalid arguments, and make sure
# that access violations are trapped and raise an
# exception.
- self.assertRaises(WindowsError, windll.kernel32.GetModuleHandleA, 32)
+ self.assertRaises(OSError, windll.kernel32.GetModuleHandleA, 32)
def test_noargs(self):
# This is a special case on win32 x64
diff --git a/Lib/dbm/__init__.py b/Lib/dbm/__init__.py
index 813a29d..3bff170 100644
--- a/Lib/dbm/__init__.py
+++ b/Lib/dbm/__init__.py
@@ -106,10 +106,8 @@ def whichdb(filename):
try:
f = io.open(filename + ".pag", "rb")
f.close()
- # dbm linked with gdbm on OS/2 doesn't have .dir file
- if not (ndbm.library == "GNU gdbm" and sys.platform == "os2emx"):
- f = io.open(filename + ".dir", "rb")
- f.close()
+ f = io.open(filename + ".dir", "rb")
+ f.close()
return "dbm.ndbm"
except IOError:
# some dbm emulations based on Berkeley DB generate a .db file
diff --git a/Lib/dbm/dumb.py b/Lib/dbm/dumb.py
index cfb9123..0bb3a89 100644
--- a/Lib/dbm/dumb.py
+++ b/Lib/dbm/dumb.py
@@ -100,12 +100,12 @@ class _Database(collections.MutableMapping):
try:
self._os.unlink(self._bakfile)
- except self._os.error:
+ except OSError:
pass
try:
self._os.rename(self._dirfile, self._bakfile)
- except self._os.error:
+ except OSError:
pass
f = self._io.open(self._dirfile, 'w', encoding="Latin-1")
diff --git a/Lib/decimal.py b/Lib/decimal.py
index 746b34a..25f8fbc 100644
--- a/Lib/decimal.py
+++ b/Lib/decimal.py
@@ -703,8 +703,7 @@ class Decimal(object):
raise TypeError("Cannot convert %r to Decimal" % value)
- # @classmethod, but @decorator is not valid Python 2.3 syntax, so
- # don't use it (see notes on Py2.3 compatibility at top of file)
+ @classmethod
def from_float(cls, f):
"""Converts a float to a decimal number, exactly.
@@ -743,7 +742,6 @@ class Decimal(object):
return result
else:
return cls(result)
- from_float = classmethod(from_float)
def _isnan(self):
"""Returns whether the number is not actually one.
diff --git a/Lib/difflib.py b/Lib/difflib.py
index ae377d7..db5f82e 100644
--- a/Lib/difflib.py
+++ b/Lib/difflib.py
@@ -922,8 +922,7 @@ class Differ:
else:
raise ValueError('unknown tag %r' % (tag,))
- for line in g:
- yield line
+ yield from g
def _dump(self, tag, x, lo, hi):
"""Generate comparison results for a same-tagged range."""
@@ -942,8 +941,7 @@ class Differ:
second = self._dump('+', b, blo, bhi)
for g in first, second:
- for line in g:
- yield line
+ yield from g
def _fancy_replace(self, a, alo, ahi, b, blo, bhi):
r"""
@@ -997,8 +995,7 @@ class Differ:
# no non-identical "pretty close" pair
if eqi is None:
# no identical pair either -- treat it as a straight replace
- for line in self._plain_replace(a, alo, ahi, b, blo, bhi):
- yield line
+ yield from self._plain_replace(a, alo, ahi, b, blo, bhi)
return
# no close pair, but an identical pair -- synch up on that
best_i, best_j, best_ratio = eqi, eqj, 1.0
@@ -1010,8 +1007,7 @@ class Differ:
# identical
# pump out diffs from before the synch point
- for line in self._fancy_helper(a, alo, best_i, b, blo, best_j):
- yield line
+ yield from self._fancy_helper(a, alo, best_i, b, blo, best_j)
# do intraline marking on the synch pair
aelt, belt = a[best_i], b[best_j]
@@ -1033,15 +1029,13 @@ class Differ:
btags += ' ' * lb
else:
raise ValueError('unknown tag %r' % (tag,))
- for line in self._qformat(aelt, belt, atags, btags):
- yield line
+ yield from self._qformat(aelt, belt, atags, btags)
else:
# the synch pair is identical
yield ' ' + aelt
# pump out diffs from after the synch point
- for line in self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi):
- yield line
+ yield from self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi)
def _fancy_helper(self, a, alo, ahi, b, blo, bhi):
g = []
@@ -1053,8 +1047,7 @@ class Differ:
elif blo < bhi:
g = self._dump('+', b, blo, bhi)
- for line in g:
- yield line
+ yield from g
def _qformat(self, aline, bline, atags, btags):
r"""
diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py
index 345ac4f..70a72b0 100644
--- a/Lib/distutils/__init__.py
+++ b/Lib/distutils/__init__.py
@@ -13,5 +13,5 @@ used from a setup script as
# Updated automatically by the Python release process.
#
#--start constants--
-__version__ = "3.3.0"
+__version__ = "3.4.0a0"
#--end constants--
diff --git a/Lib/distutils/ccompiler.py b/Lib/distutils/ccompiler.py
index c795c95..911e84d 100644
--- a/Lib/distutils/ccompiler.py
+++ b/Lib/distutils/ccompiler.py
@@ -351,7 +351,7 @@ class CCompiler:
return macros, objects, extra, pp_opts, build
def _get_cc_args(self, pp_opts, debug, before):
- # works for unixccompiler, emxccompiler, cygwinccompiler
+ # works for unixccompiler, cygwinccompiler
cc_args = pp_opts + ['-c']
if debug:
cc_args[:0] = ['-g']
@@ -926,7 +926,6 @@ _default_compilers = (
# on a cygwin built python we can use gcc like an ordinary UNIXish
# compiler
('cygwin.*', 'unix'),
- ('os2emx', 'emx'),
# OS name mappings
('posix', 'unix'),
@@ -968,8 +967,6 @@ compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler',
"Mingw32 port of GNU C Compiler for Win32"),
'bcpp': ('bcppcompiler', 'BCPPCompiler',
"Borland C++ Compiler"),
- 'emx': ('emxccompiler', 'EMXCCompiler',
- "EMX port of GNU C Compiler for OS/2"),
}
def show_compilers():
diff --git a/Lib/distutils/command/bdist.py b/Lib/distutils/command/bdist.py
index c5188eb..38b169a 100644
--- a/Lib/distutils/command/bdist.py
+++ b/Lib/distutils/command/bdist.py
@@ -52,8 +52,7 @@ class bdist(Command):
# This won't do in reality: will need to distinguish RPM-ish Linux,
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
default_format = {'posix': 'gztar',
- 'nt': 'zip',
- 'os2': 'zip'}
+ 'nt': 'zip'}
# Establish the preferred order (for the --help-formats option).
format_commands = ['rpm', 'gztar', 'bztar', 'ztar', 'tar',
diff --git a/Lib/distutils/command/bdist_dumb.py b/Lib/distutils/command/bdist_dumb.py
index 1ab09d1..eefdfea 100644
--- a/Lib/distutils/command/bdist_dumb.py
+++ b/Lib/distutils/command/bdist_dumb.py
@@ -38,8 +38,7 @@ class bdist_dumb(Command):
boolean_options = ['keep-temp', 'skip-build', 'relative']
default_format = { 'posix': 'gztar',
- 'nt': 'zip',
- 'os2': 'zip' }
+ 'nt': 'zip' }
def initialize_options(self):
self.bdist_dir = None
@@ -85,11 +84,6 @@ class bdist_dumb(Command):
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
self.plat_name)
- # OS/2 objects to any ":" characters in a filename (such as when
- # a timestamp is used in a version) so change them to hyphens.
- if os.name == "os2":
- archive_basename = archive_basename.replace(":", "-")
-
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
index b1d951e..f7c71b3 100644
--- a/Lib/distutils/command/build_ext.py
+++ b/Lib/distutils/command/build_ext.py
@@ -230,11 +230,6 @@ class build_ext(Command):
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VC6'))
- # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
- # import libraries in its "Config" subdirectory
- if os.name == 'os2':
- self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
-
# for extensions under Cygwin and AtheOS Python's library directory must be
# appended to library_dirs
if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
@@ -620,9 +615,6 @@ class build_ext(Command):
return fn
else:
return "swig.exe"
- elif os.name == "os2":
- # assume swig available in the PATH.
- return "swig.exe"
else:
raise DistutilsPlatformError(
"I don't know how to find (much less run) SWIG "
@@ -673,9 +665,6 @@ class build_ext(Command):
"""
from distutils.sysconfig import get_config_var
ext_path = ext_name.split('.')
- # OS/2 has an 8 character module (extension) limit :-(
- if os.name == "os2":
- ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
# extensions in debug_mode are named 'module_d.pyd' under windows
so_ext = get_config_var('SO')
if os.name == 'nt' and self.debug:
@@ -696,7 +685,7 @@ class build_ext(Command):
def get_libraries(self, ext):
"""Return the list of libraries to link against when building a
shared extension. On most platforms, this is just 'ext.libraries';
- on Windows and OS/2, we add the Python library (eg. python20.dll).
+ on Windows, we add the Python library (eg. python20.dll).
"""
# The python library is always needed on Windows. For MSVC, this
# is redundant, since the library is mentioned in a pragma in
@@ -716,19 +705,6 @@ class build_ext(Command):
return ext.libraries + [pythonlib]
else:
return ext.libraries
- elif sys.platform == "os2emx":
- # EMX/GCC requires the python library explicitly, and I
- # believe VACPP does as well (though not confirmed) - AIM Apr01
- template = "python%d%d"
- # debug versions of the main DLL aren't supported, at least
- # not at this time - AIM Apr01
- #if self.debug:
- # template = template + '_d'
- pythonlib = (template %
- (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
- # don't extend ext.libraries, it may be shared with other
- # extensions, it is a reference to the original list
- return ext.libraries + [pythonlib]
elif sys.platform[:6] == "cygwin":
template = "python%d.%d"
pythonlib = (template %
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index 0161898..04326a1 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -58,13 +58,6 @@ INSTALL_SCHEMES = {
'data' : '$base',
},
'nt': WINDOWS_SCHEME,
- 'os2': {
- 'purelib': '$base/Lib/site-packages',
- 'platlib': '$base/Lib/site-packages',
- 'headers': '$base/Include/$dist_name',
- 'scripts': '$base/Scripts',
- 'data' : '$base',
- },
}
# user site schemes
@@ -86,14 +79,6 @@ if HAS_USER_SITE:
'data' : '$userbase',
}
- INSTALL_SCHEMES['os2_home'] = {
- 'purelib': '$usersite',
- 'platlib': '$usersite',
- 'headers': '$userbase/include/python$py_version_short/$dist_name',
- 'scripts': '$userbase/bin',
- 'data' : '$userbase',
- }
-
# The keys to an installation scheme; if any new types of files are to be
# installed, be sure to add an entry to every installation scheme above,
# and to SCHEME_KEYS here.
diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py
index 8b36851..88990b2 100644
--- a/Lib/distutils/command/upload.py
+++ b/Lib/distutils/command/upload.py
@@ -186,7 +186,7 @@ class upload(PyPIRCCommand):
http.putheader('Authorization', auth)
http.endheaders()
http.send(body)
- except socket.error as e:
+ except OSError as e:
self.announce(str(e), log.ERROR)
return
diff --git a/Lib/distutils/core.py b/Lib/distutils/core.py
index 260332a..a43d725 100644
--- a/Lib/distutils/core.py
+++ b/Lib/distutils/core.py
@@ -148,7 +148,7 @@ def setup (**attrs):
dist.run_commands()
except KeyboardInterrupt:
raise SystemExit("interrupted")
- except (IOError, os.error) as exc:
+ except (IOError, OSError) as exc:
error = grok_environment_error(exc)
if DEBUG:
diff --git a/Lib/distutils/dir_util.py b/Lib/distutils/dir_util.py
index 2826ff8..7d1c78a 100644
--- a/Lib/distutils/dir_util.py
+++ b/Lib/distutils/dir_util.py
@@ -124,7 +124,7 @@ def copy_tree(src, dst, preserve_mode=1, preserve_times=1,
"cannot copy tree '%s': not a directory" % src)
try:
names = os.listdir(src)
- except os.error as e:
+ except OSError as e:
(errno, errstr) = e
if dry_run:
names = []
diff --git a/Lib/distutils/emxccompiler.py b/Lib/distutils/emxccompiler.py
deleted file mode 100644
index 3675f8d..0000000
--- a/Lib/distutils/emxccompiler.py
+++ /dev/null
@@ -1,315 +0,0 @@
-"""distutils.emxccompiler
-
-Provides the EMXCCompiler class, a subclass of UnixCCompiler that
-handles the EMX port of the GNU C compiler to OS/2.
-"""
-
-# issues:
-#
-# * OS/2 insists that DLLs can have names no longer than 8 characters
-# We put export_symbols in a def-file, as though the DLL can have
-# an arbitrary length name, but truncate the output filename.
-#
-# * only use OMF objects and use LINK386 as the linker (-Zomf)
-#
-# * always build for multithreading (-Zmt) as the accompanying OS/2 port
-# of Python is only distributed with threads enabled.
-#
-# tested configurations:
-#
-# * EMX gcc 2.81/EMX 0.9d fix03
-
-import os,sys,copy
-from distutils.ccompiler import gen_preprocess_options, gen_lib_options
-from distutils.unixccompiler import UnixCCompiler
-from distutils.file_util import write_file
-from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
-from distutils import log
-
-class EMXCCompiler (UnixCCompiler):
-
- compiler_type = 'emx'
- obj_extension = ".obj"
- static_lib_extension = ".lib"
- shared_lib_extension = ".dll"
- static_lib_format = "%s%s"
- shared_lib_format = "%s%s"
- res_extension = ".res" # compiled resource file
- exe_extension = ".exe"
-
- def __init__ (self,
- verbose=0,
- dry_run=0,
- force=0):
-
- UnixCCompiler.__init__ (self, verbose, dry_run, force)
-
- (status, details) = check_config_h()
- self.debug_print("Python's GCC status: %s (details: %s)" %
- (status, details))
- if status is not CONFIG_H_OK:
- self.warn(
- "Python's pyconfig.h doesn't seem to support your compiler. " +
- ("Reason: %s." % details) +
- "Compiling may fail because of undefined preprocessor macros.")
-
- (self.gcc_version, self.ld_version) = \
- get_versions()
- self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
- (self.gcc_version,
- self.ld_version) )
-
- # Hard-code GCC because that's what this is all about.
- # XXX optimization, warnings etc. should be customizable.
- self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
- compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
- linker_exe='gcc -Zomf -Zmt -Zcrtdll',
- linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
-
- # want the gcc library statically linked (so that we don't have
- # to distribute a version dependent on the compiler we have)
- self.dll_libraries=["gcc"]
-
- # __init__ ()
-
- def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
- if ext == '.rc':
- # gcc requires '.rc' compiled to binary ('.res') files !!!
- try:
- self.spawn(["rc", "-r", src])
- except DistutilsExecError as msg:
- raise CompileError(msg)
- else: # for other files use the C-compiler
- try:
- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
- extra_postargs)
- except DistutilsExecError as msg:
- raise CompileError(msg)
-
- def link (self,
- target_desc,
- objects,
- output_filename,
- output_dir=None,
- libraries=None,
- library_dirs=None,
- runtime_library_dirs=None,
- export_symbols=None,
- debug=0,
- extra_preargs=None,
- extra_postargs=None,
- build_temp=None,
- target_lang=None):
-
- # use separate copies, so we can modify the lists
- extra_preargs = copy.copy(extra_preargs or [])
- libraries = copy.copy(libraries or [])
- objects = copy.copy(objects or [])
-
- # Additional libraries
- libraries.extend(self.dll_libraries)
-
- # handle export symbols by creating a def-file
- # with executables this only works with gcc/ld as linker
- if ((export_symbols is not None) and
- (target_desc != self.EXECUTABLE)):
- # (The linker doesn't do anything if output is up-to-date.
- # So it would probably better to check if we really need this,
- # but for this we had to insert some unchanged parts of
- # UnixCCompiler, and this is not what we want.)
-
- # we want to put some files in the same directory as the
- # object files are, build_temp doesn't help much
- # where are the object files
- temp_dir = os.path.dirname(objects[0])
- # name of dll to give the helper files the same base name
- (dll_name, dll_extension) = os.path.splitext(
- os.path.basename(output_filename))
-
- # generate the filenames for these files
- def_file = os.path.join(temp_dir, dll_name + ".def")
-
- # Generate .def file
- contents = [
- "LIBRARY %s INITINSTANCE TERMINSTANCE" % \
- os.path.splitext(os.path.basename(output_filename))[0],
- "DATA MULTIPLE NONSHARED",
- "EXPORTS"]
- for sym in export_symbols:
- contents.append(' "%s"' % sym)
- self.execute(write_file, (def_file, contents),
- "writing %s" % def_file)
-
- # next add options for def-file and to creating import libraries
- # for gcc/ld the def-file is specified as any other object files
- objects.append(def_file)
-
- #end: if ((export_symbols is not None) and
- # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
-
- # who wants symbols and a many times larger output file
- # should explicitly switch the debug mode on
- # otherwise we let dllwrap/ld strip the output file
- # (On my machine: 10KB < stripped_file < ??100KB
- # unstripped_file = stripped_file + XXX KB
- # ( XXX=254 for a typical python extension))
- if not debug:
- extra_preargs.append("-s")
-
- UnixCCompiler.link(self,
- target_desc,
- objects,
- output_filename,
- output_dir,
- libraries,
- library_dirs,
- runtime_library_dirs,
- None, # export_symbols, we do this in our def-file
- debug,
- extra_preargs,
- extra_postargs,
- build_temp,
- target_lang)
-
- # link ()
-
- # -- Miscellaneous methods -----------------------------------------
-
- # override the object_filenames method from CCompiler to
- # support rc and res-files
- def object_filenames (self,
- source_filenames,
- strip_dir=0,
- output_dir=''):
- if output_dir is None: output_dir = ''
- obj_names = []
- for src_name in source_filenames:
- # use normcase to make sure '.rc' is really '.rc' and not '.RC'
- (base, ext) = os.path.splitext (os.path.normcase(src_name))
- if ext not in (self.src_extensions + ['.rc']):
- raise UnknownFileError("unknown file type '%s' (from '%s')" % \
- (ext, src_name))
- if strip_dir:
- base = os.path.basename (base)
- if ext == '.rc':
- # these need to be compiled to object files
- obj_names.append (os.path.join (output_dir,
- base + self.res_extension))
- else:
- obj_names.append (os.path.join (output_dir,
- base + self.obj_extension))
- return obj_names
-
- # object_filenames ()
-
- # override the find_library_file method from UnixCCompiler
- # to deal with file naming/searching differences
- def find_library_file(self, dirs, lib, debug=0):
- shortlib = '%s.lib' % lib
- longlib = 'lib%s.lib' % lib # this form very rare
-
- # get EMX's default library directory search path
- try:
- emx_dirs = os.environ['LIBRARY_PATH'].split(';')
- except KeyError:
- emx_dirs = []
-
- for dir in dirs + emx_dirs:
- shortlibp = os.path.join(dir, shortlib)
- longlibp = os.path.join(dir, longlib)
- if os.path.exists(shortlibp):
- return shortlibp
- elif os.path.exists(longlibp):
- return longlibp
-
- # Oops, didn't find it in *any* of 'dirs'
- return None
-
-# class EMXCCompiler
-
-
-# Because these compilers aren't configured in Python's pyconfig.h file by
-# default, we should at least warn the user if he is using a unmodified
-# version.
-
-CONFIG_H_OK = "ok"
-CONFIG_H_NOTOK = "not ok"
-CONFIG_H_UNCERTAIN = "uncertain"
-
-def check_config_h():
-
- """Check if the current Python installation (specifically, pyconfig.h)
- appears amenable to building extensions with GCC. Returns a tuple
- (status, details), where 'status' is one of the following constants:
- CONFIG_H_OK
- all is well, go ahead and compile
- CONFIG_H_NOTOK
- doesn't look good
- CONFIG_H_UNCERTAIN
- not sure -- unable to read pyconfig.h
- 'details' is a human-readable string explaining the situation.
-
- Note there are two ways to conclude "OK": either 'sys.version' contains
- the string "GCC" (implying that this Python was built with GCC), or the
- installed "pyconfig.h" contains the string "__GNUC__".
- """
-
- # XXX since this function also checks sys.version, it's not strictly a
- # "pyconfig.h" check -- should probably be renamed...
-
- from distutils import sysconfig
- # if sys.version contains GCC then python was compiled with
- # GCC, and the pyconfig.h file should be OK
- if sys.version.find("GCC") >= 0:
- return (CONFIG_H_OK, "sys.version mentions 'GCC'")
-
- fn = sysconfig.get_config_h_filename()
- try:
- # It would probably better to read single lines to search.
- # But we do this only once, and it is fast enough
- f = open(fn)
- try:
- s = f.read()
- finally:
- f.close()
-
- except IOError as exc:
- # if we can't read this file, we cannot say it is wrong
- # the compiler will complain later about this file as missing
- return (CONFIG_H_UNCERTAIN,
- "couldn't read '%s': %s" % (fn, exc.strerror))
-
- else:
- # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
- if s.find("__GNUC__") >= 0:
- return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
- else:
- return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
-
-
-def get_versions():
- """ Try to find out the versions of gcc and ld.
- If not possible it returns None for it.
- """
- from distutils.version import StrictVersion
- from distutils.spawn import find_executable
- import re
-
- gcc_exe = find_executable('gcc')
- if gcc_exe:
- out = os.popen(gcc_exe + ' -dumpversion','r')
- try:
- out_string = out.read()
- finally:
- out.close()
- result = re.search('(\d+\.\d+\.\d+)', out_string, re.ASCII)
- if result:
- gcc_version = StrictVersion(result.group(1))
- else:
- gcc_version = None
- else:
- gcc_version = None
- # EMX ld has no way of reporting version number, and we use GCC
- # anyway - so we can link OMF DLLs
- ld_version = None
- return (gcc_version, ld_version)
diff --git a/Lib/distutils/file_util.py b/Lib/distutils/file_util.py
index 9bdd14e..f6ed290 100644
--- a/Lib/distutils/file_util.py
+++ b/Lib/distutils/file_util.py
@@ -27,26 +27,26 @@ def _copy_file_contents(src, dst, buffer_size=16*1024):
try:
try:
fsrc = open(src, 'rb')
- except os.error as e:
+ except OSError as e:
raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror))
if os.path.exists(dst):
try:
os.unlink(dst)
- except os.error as e:
+ except OSError as e:
raise DistutilsFileError(
"could not delete '%s': %s" % (dst, e.strerror))
try:
fdst = open(dst, 'wb')
- except os.error as e:
+ except OSError as e:
raise DistutilsFileError(
"could not create '%s': %s" % (dst, e.strerror))
while True:
try:
buf = fsrc.read(buffer_size)
- except os.error as e:
+ except OSError as e:
raise DistutilsFileError(
"could not read from '%s': %s" % (src, e.strerror))
@@ -55,7 +55,7 @@ def _copy_file_contents(src, dst, buffer_size=16*1024):
try:
fdst.write(buf)
- except os.error as e:
+ except OSError as e:
raise DistutilsFileError(
"could not write to '%s': %s" % (dst, e.strerror))
finally:
@@ -193,7 +193,7 @@ def move_file (src, dst,
copy_it = False
try:
os.rename(src, dst)
- except os.error as e:
+ except OSError as e:
(num, msg) = e
if num == errno.EXDEV:
copy_it = True
@@ -205,11 +205,11 @@ def move_file (src, dst,
copy_file(src, dst, verbose=verbose)
try:
os.unlink(src)
- except os.error as e:
+ except OSError as e:
(num, msg) = e
try:
os.unlink(dst)
- except os.error:
+ except OSError:
pass
raise DistutilsFileError(
"couldn't move '%s' to '%s' by copy/delete: "
diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py
index f58c55f..b1c5a44 100644
--- a/Lib/distutils/spawn.py
+++ b/Lib/distutils/spawn.py
@@ -32,8 +32,6 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0):
_spawn_posix(cmd, search_path, dry_run=dry_run)
elif os.name == 'nt':
_spawn_nt(cmd, search_path, dry_run=dry_run)
- elif os.name == 'os2':
- _spawn_os2(cmd, search_path, dry_run=dry_run)
else:
raise DistutilsPlatformError(
"don't know how to spawn programs on platform '%s'" % os.name)
@@ -74,26 +72,6 @@ def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0):
raise DistutilsExecError(
"command '%s' failed with exit status %d" % (cmd[0], rc))
-def _spawn_os2(cmd, search_path=1, verbose=0, dry_run=0):
- executable = cmd[0]
- if search_path:
- # either we find one or it stays the same
- executable = find_executable(executable) or executable
- log.info(' '.join([executable] + cmd[1:]))
- if not dry_run:
- # spawnv for OS/2 EMX requires a full path to the .exe
- try:
- rc = os.spawnv(os.P_WAIT, executable, cmd)
- except OSError as exc:
- # this seems to happen when the command isn't found
- raise DistutilsExecError(
- "command '%s' failed: %s" % (cmd[0], exc.args[-1]))
- if rc != 0:
- # and this reflects the command running but failing
- log.debug("command '%s' failed with exit status %d" % (cmd[0], rc))
- raise DistutilsExecError(
- "command '%s' failed with exit status %d" % (cmd[0], rc))
-
if sys.platform == 'darwin':
from distutils import sysconfig
_cfg_target = None
@@ -180,7 +158,7 @@ def find_executable(executable, path=None):
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
- if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+ if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index 317640c..3b6549f 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -110,8 +110,6 @@ def get_python_inc(plat_specific=0, prefix=None):
return os.path.join(prefix, "include", python_dir)
elif os.name == "nt":
return os.path.join(prefix, "include")
- elif os.name == "os2":
- return os.path.join(prefix, "Include")
else:
raise DistutilsPlatformError(
"I don't know where Python installs its C header files "
@@ -153,11 +151,6 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
return prefix
else:
return os.path.join(prefix, "Lib", "site-packages")
- elif os.name == "os2":
- if standard_lib:
- return os.path.join(prefix, "Lib")
- else:
- return os.path.join(prefix, "Lib", "site-packages")
else:
raise DistutilsPlatformError(
"I don't know where Python installs its library "
@@ -492,23 +485,6 @@ def _init_nt():
_config_vars = g
-def _init_os2():
- """Initialize the module as appropriate for OS/2"""
- g = {}
- # set basic install directories
- g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
- g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
-
- # XXX hmmm.. a normal install puts include files here
- g['INCLUDEPY'] = get_python_inc(plat_specific=0)
-
- g['SO'] = '.pyd'
- g['EXE'] = ".exe"
-
- global _config_vars
- _config_vars = g
-
-
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform. Generally this includes
diff --git a/Lib/distutils/tests/test_bdist_dumb.py b/Lib/distutils/tests/test_bdist_dumb.py
index 1037d82..761051c 100644
--- a/Lib/distutils/tests/test_bdist_dumb.py
+++ b/Lib/distutils/tests/test_bdist_dumb.py
@@ -75,8 +75,6 @@ class BuildDumbTestCase(support.TempdirManager,
# see what we have
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name)
- if os.name == 'os2':
- base = base.replace(':', '-')
self.assertEqual(dist_created, [base])
diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
index cb2e1f2..47d630c 100644
--- a/Lib/distutils/tests/test_install.py
+++ b/Lib/distutils/tests/test_install.py
@@ -94,7 +94,7 @@ class InstallTestCase(support.TempdirManager,
self.addCleanup(cleanup)
- for key in ('nt_user', 'unix_user', 'os2_home'):
+ for key in ('nt_user', 'unix_user'):
self.assertIn(key, INSTALL_SCHEMES)
dist = Distribution({'name': 'xx'})
diff --git a/Lib/distutils/tests/test_util.py b/Lib/distutils/tests/test_util.py
index eac9b51..b73cfe9 100644
--- a/Lib/distutils/tests/test_util.py
+++ b/Lib/distutils/tests/test_util.py
@@ -236,7 +236,7 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase):
self.assertRaises(DistutilsPlatformError,
change_root, 'c:\\root', 'its\\here')
- # XXX platforms to be covered: os2, mac
+ # XXX platforms to be covered: mac
def test_check_environ(self):
util._environ_checked = 0
diff --git a/Lib/distutils/util.py b/Lib/distutils/util.py
index 67d8166..ba09ac4 100644
--- a/Lib/distutils/util.py
+++ b/Lib/distutils/util.py
@@ -154,12 +154,6 @@ def change_root (new_root, pathname):
path = path[1:]
return os.path.join(new_root, path)
- elif os.name == 'os2':
- (drive, path) = os.path.splitdrive(pathname)
- if path[0] == os.sep:
- path = path[1:]
- return os.path.join(new_root, path)
-
else:
raise DistutilsPlatformError("nothing known about platform '%s'" % os.name)
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 3af05fb..16a732d 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -62,6 +62,7 @@ __all__ = [
'REPORT_NDIFF',
'REPORT_ONLY_FIRST_FAILURE',
'REPORTING_FLAGS',
+ 'FAIL_FAST',
# 1. Utility Functions
# 2. Example & DocTest
'Example',
@@ -150,11 +151,13 @@ REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
+FAIL_FAST = register_optionflag('FAIL_FAST')
REPORTING_FLAGS = (REPORT_UDIFF |
REPORT_CDIFF |
REPORT_NDIFF |
- REPORT_ONLY_FIRST_FAILURE)
+ REPORT_ONLY_FIRST_FAILURE |
+ FAIL_FAST)
# Special string markers for use in `want` strings:
BLANKLINE_MARKER = '<BLANKLINE>'
@@ -1342,6 +1345,9 @@ class DocTestRunner:
else:
assert False, ("unknown outcome", outcome)
+ if failures and self.optionflags & FAIL_FAST:
+ break
+
# Restore the option flags (in case they were modified)
self.optionflags = original_optionflags
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
index 1924ed1..b596462 100644
--- a/Lib/email/_header_value_parser.py
+++ b/Lib/email/_header_value_parser.py
@@ -367,8 +367,7 @@ class TokenList(list):
yield (indent + ' !! invalid element in token '
'list: {!r}'.format(token))
else:
- for line in token._pp(indent+' '):
- yield line
+ yield from token._pp(indent+' ')
if self.defects:
extra = ' Defects: {}'.format(self.defects)
else:
diff --git a/Lib/email/iterators.py b/Lib/email/iterators.py
index 3adc4a0..b5502ee 100644
--- a/Lib/email/iterators.py
+++ b/Lib/email/iterators.py
@@ -26,8 +26,7 @@ def walk(self):
yield self
if self.is_multipart():
for subpart in self.get_payload():
- for subsubpart in subpart.walk():
- yield subsubpart
+ yield from subpart.walk()
@@ -40,8 +39,7 @@ def body_line_iterator(msg, decode=False):
for subpart in msg.walk():
payload = subpart.get_payload(decode=decode)
if isinstance(payload, str):
- for line in StringIO(payload):
- yield line
+ yield from StringIO(payload)
def typed_subpart_iterator(msg, maintype='text', subtype=None):
diff --git a/Lib/filecmp.py b/Lib/filecmp.py
index f5cea1d..5bbd0e8 100644
--- a/Lib/filecmp.py
+++ b/Lib/filecmp.py
@@ -147,12 +147,12 @@ class dircmp:
ok = 1
try:
a_stat = os.stat(a_path)
- except os.error as why:
+ except OSError as why:
# print('Can\'t stat', a_path, ':', why.args[1])
ok = 0
try:
b_stat = os.stat(b_path)
- except os.error as why:
+ except OSError as why:
# print('Can\'t stat', b_path, ':', why.args[1])
ok = 0
@@ -268,7 +268,7 @@ def cmpfiles(a, b, common, shallow=True):
def _cmp(a, b, sh, abs=abs, cmp=cmp):
try:
return not abs(cmp(a, b, sh))
- except os.error:
+ except OSError:
return 2
diff --git a/Lib/fileinput.py b/Lib/fileinput.py
index dbbbb21..5be0318 100644
--- a/Lib/fileinput.py
+++ b/Lib/fileinput.py
@@ -324,8 +324,10 @@ class FileInput:
if self._inplace:
self._backupfilename = (
self._filename + (self._backup or ".bak"))
- try: os.unlink(self._backupfilename)
- except os.error: pass
+ try:
+ os.unlink(self._backupfilename)
+ except OSError:
+ pass
# The next few lines may raise IOError
os.rename(self._filename, self._backupfilename)
self._file = open(self._backupfilename, self._mode)
diff --git a/Lib/fractions.py b/Lib/fractions.py
index 8be52d2..79e83ff 100644
--- a/Lib/fractions.py
+++ b/Lib/fractions.py
@@ -182,8 +182,10 @@ class Fraction(numbers.Rational):
elif not isinstance(f, float):
raise TypeError("%s.from_float() only takes floats, not %r (%s)" %
(cls.__name__, f, type(f).__name__))
- if math.isnan(f) or math.isinf(f):
- raise TypeError("Cannot convert %r to %s." % (f, cls.__name__))
+ if math.isnan(f):
+ raise ValueError("Cannot convert %r to %s." % (f, cls.__name__))
+ if math.isinf(f):
+ raise OverflowError("Cannot convert %r to %s." % (f, cls.__name__))
return cls(*f.as_integer_ratio())
@classmethod
@@ -196,9 +198,11 @@ class Fraction(numbers.Rational):
raise TypeError(
"%s.from_decimal() only takes Decimals, not %r (%s)" %
(cls.__name__, dec, type(dec).__name__))
- if not dec.is_finite():
- # Catches infinities and nans.
- raise TypeError("Cannot convert %s to %s." % (dec, cls.__name__))
+ if dec.is_infinite():
+ raise OverflowError(
+ "Cannot convert %s to %s." % (dec, cls.__name__))
+ if dec.is_nan():
+ raise ValueError("Cannot convert %s to %s." % (dec, cls.__name__))
sign, digits, exp = dec.as_tuple()
digits = int(''.join(map(str, digits)))
if sign:
diff --git a/Lib/ftplib.py b/Lib/ftplib.py
index 5efae95..2641a1d 100644
--- a/Lib/ftplib.py
+++ b/Lib/ftplib.py
@@ -123,7 +123,7 @@ class FTP:
if self.sock is not None:
try:
self.quit()
- except (socket.error, EOFError):
+ except (OSError, EOFError):
pass
finally:
if self.sock is not None:
@@ -295,7 +295,7 @@ class FTP:
try:
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
- except socket.error as _:
+ except OSError as _:
err = _
if sock:
sock.close()
@@ -306,8 +306,8 @@ class FTP:
if err is not None:
raise err
else:
- raise socket.error("getaddrinfo returns an empty list")
- raise socket.error(msg)
+ raise OSError("getaddrinfo returns an empty list")
+ raise OSError(msg)
sock.listen(1)
port = sock.getsockname()[1] # Get proper port
host = self.sock.getsockname()[0] # Get proper host
diff --git a/Lib/functools.py b/Lib/functools.py
index 226a46e..60d2cf1 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -11,7 +11,10 @@
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
-from _functools import partial, reduce
+try:
+ from _functools import reduce
+except ImportError:
+ pass
from collections import namedtuple
try:
from _thread import allocate_lock as Lock
@@ -137,6 +140,29 @@ except ImportError:
################################################################################
+### partial() argument application
+################################################################################
+
+def partial(func, *args, **keywords):
+ """new function with partial application of the given arguments
+ and keywords.
+ """
+ def newfunc(*fargs, **fkeywords):
+ newkeywords = keywords.copy()
+ newkeywords.update(fkeywords)
+ return func(*(args + fargs), **newkeywords)
+ newfunc.func = func
+ newfunc.args = args
+ newfunc.keywords = keywords
+ return newfunc
+
+try:
+ from _functools import partial
+except ImportError:
+ pass
+
+
+################################################################################
### LRU Cache function decorator
################################################################################
diff --git a/Lib/genericpath.py b/Lib/genericpath.py
index 2174187..943fdb3 100644
--- a/Lib/genericpath.py
+++ b/Lib/genericpath.py
@@ -16,7 +16,7 @@ def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
os.stat(path)
- except os.error:
+ except OSError:
return False
return True
@@ -27,7 +27,7 @@ def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
- except os.error:
+ except OSError:
return False
return stat.S_ISREG(st.st_mode)
@@ -39,7 +39,7 @@ def isdir(s):
"""Return true if the pathname refers to an existing directory."""
try:
st = os.stat(s)
- except os.error:
+ except OSError:
return False
return stat.S_ISDIR(st.st_mode)
diff --git a/Lib/getpass.py b/Lib/getpass.py
index 0044742..53689e9 100644
--- a/Lib/getpass.py
+++ b/Lib/getpass.py
@@ -47,7 +47,7 @@ def unix_getpass(prompt='Password: ', stream=None):
input = tty
if not stream:
stream = tty
- except EnvironmentError as e:
+ except OSError as e:
# If that fails, see if stdin can be controlled.
try:
fd = sys.stdin.fileno()
diff --git a/Lib/glob.py b/Lib/glob.py
index 58888d6..33ee43b 100644
--- a/Lib/glob.py
+++ b/Lib/glob.py
@@ -26,8 +26,7 @@ def iglob(pathname):
return
dirname, basename = os.path.split(pathname)
if not dirname:
- for name in glob1(None, basename):
- yield name
+ yield from glob1(None, basename)
return
# `os.path.split()` returns the argument itself as a dirname if it is a
# drive or UNC path. Prevent an infinite recursion if a drive or UNC path
@@ -56,7 +55,7 @@ def glob1(dirname, pattern):
dirname = os.curdir
try:
names = os.listdir(dirname)
- except os.error:
+ except OSError:
return []
if pattern[0] != '.':
names = [x for x in names if x[0] != '.']
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
index 21454c7..a1bd8b2 100644
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -54,7 +54,8 @@ More condensed:
# This tuple and __get_builtin_constructor() must be modified if a new
# always available algorithm is added.
-__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
+__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
+ 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512')
algorithms_guaranteed = set(__always_supported)
algorithms_available = set(__always_supported)
@@ -85,6 +86,18 @@ def __get_builtin_constructor(name):
return _sha512.sha512
elif bs == '384':
return _sha512.sha384
+ elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
+ 'SHA3_224', 'SHA3_256', 'SHA3_384', 'SHA3_512'}:
+ import _sha3
+ bs = name[5:]
+ if bs == '224':
+ return _sha3.sha3_224
+ elif bs == '256':
+ return _sha3.sha3_256
+ elif bs == '384':
+ return _sha3.sha3_384
+ elif bs == '512':
+ return _sha3.sha3_512
except ImportError:
pass # no extension module, this hash is unsupported.
diff --git a/Lib/http/client.py b/Lib/http/client.py
index 6a4496f..5e5b3a3 100644
--- a/Lib/http/client.py
+++ b/Lib/http/client.py
@@ -791,8 +791,8 @@ class HTTPConnection:
if code != 200:
self.close()
- raise socket.error("Tunnel connection failed: %d %s" % (code,
- message.strip()))
+ raise OSError("Tunnel connection failed: %d %s" % (code,
+ message.strip()))
while True:
line = response.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py
index 901e762..a77dc3f 100644
--- a/Lib/http/cookiejar.py
+++ b/Lib/http/cookiejar.py
@@ -1193,8 +1193,7 @@ def deepvalues(mapping):
pass
else:
mapping = True
- for subobj in deepvalues(obj):
- yield subobj
+ yield from deepvalues(obj)
if not mapping:
yield obj
diff --git a/Lib/http/server.py b/Lib/http/server.py
index c4ac703..e549982 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -425,12 +425,14 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
content = (self.error_message_format %
{'code': code, 'message': _quote_html(message), 'explain': explain})
+ body = content.encode('UTF-8', 'replace')
self.send_response(code, message)
self.send_header("Content-Type", self.error_content_type)
self.send_header('Connection', 'close')
+ self.send_header('Content-Length', int(len(body)))
self.end_headers()
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
- self.wfile.write(content.encode('UTF-8', 'replace'))
+ self.wfile.write(body)
def send_response(self, code, message=None):
"""Add the response header to the headers buffer and log the
@@ -730,7 +732,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
"""
try:
list = os.listdir(path)
- except os.error:
+ except OSError:
self.send_error(404, "No permission to list directory")
return None
list.sort(key=lambda a: a.lower())
@@ -1121,7 +1123,7 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
try:
try:
os.setuid(nobody)
- except os.error:
+ except OSError:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
diff --git a/Lib/idlelib/EditorWindow.py b/Lib/idlelib/EditorWindow.py
index a153878..2fe9447 100644
--- a/Lib/idlelib/EditorWindow.py
+++ b/Lib/idlelib/EditorWindow.py
@@ -542,7 +542,7 @@ class EditorWindow(object):
if sys.platform[:3] == 'win':
try:
os.startfile(self.help_url)
- except WindowsError as why:
+ except OSError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
@@ -852,7 +852,7 @@ class EditorWindow(object):
if sys.platform[:3] == 'win':
try:
os.startfile(helpfile)
- except WindowsError as why:
+ except OSError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
diff --git a/Lib/idlelib/FileList.py b/Lib/idlelib/FileList.py
index 37a337e..a9989a8 100644
--- a/Lib/idlelib/FileList.py
+++ b/Lib/idlelib/FileList.py
@@ -103,7 +103,7 @@ class FileList:
if not os.path.isabs(filename):
try:
pwd = os.getcwd()
- except os.error:
+ except OSError:
pass
else:
filename = os.path.join(pwd, filename)
diff --git a/Lib/idlelib/GrepDialog.py b/Lib/idlelib/GrepDialog.py
index 27fcc33..1450f7a 100644
--- a/Lib/idlelib/GrepDialog.py
+++ b/Lib/idlelib/GrepDialog.py
@@ -110,7 +110,7 @@ class GrepDialog(SearchDialogBase):
def findfiles(self, dir, base, rec):
try:
names = os.listdir(dir or os.curdir)
- except os.error as msg:
+ except OSError as msg:
print(msg)
return []
list = []
diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py
index ec50eb2..62c62b0 100644
--- a/Lib/idlelib/IOBinding.py
+++ b/Lib/idlelib/IOBinding.py
@@ -503,7 +503,7 @@ class IOBinding:
else:
try:
pwd = os.getcwd()
- except os.error:
+ except OSError:
pwd = ""
return pwd, ""
diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt
index f6e8917d..d193b0a 100644
--- a/Lib/idlelib/NEWS.txt
+++ b/Lib/idlelib/NEWS.txt
@@ -1,4 +1,4 @@
-What's New in IDLE 3.3.1?
+What's New in IDLE 3.4.0?
=========================
- Issue #16226: Fix IDLE Path Browser crash.
diff --git a/Lib/idlelib/PathBrowser.py b/Lib/idlelib/PathBrowser.py
index 55bf1aa..ab05c67 100644
--- a/Lib/idlelib/PathBrowser.py
+++ b/Lib/idlelib/PathBrowser.py
@@ -45,7 +45,7 @@ class DirBrowserTreeItem(TreeItem):
def GetSubList(self):
try:
names = os.listdir(self.dir or os.curdir)
- except os.error:
+ except OSError:
return []
packages = []
for name in names:
diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py
index 3b78f38..696d363 100644
--- a/Lib/idlelib/PyShell.py
+++ b/Lib/idlelib/PyShell.py
@@ -393,7 +393,7 @@ class ModifiedInterpreter(InteractiveInterpreter):
try:
self.rpcclt = MyRPCClient(addr)
break
- except socket.error as err:
+ except OSError as err:
pass
else:
self.display_port_binding_error()
@@ -1014,7 +1014,10 @@ class PyShell(OutputWindow):
self.close()
return False
else:
- nosub = "==== No Subprocess ===="
+ nosub = ("==== No Subprocess ====\n\n" +
+ "WARNING: Running IDLE without a Subprocess is deprecated\n" +
+ "and will be removed in a later version. See Help/IDLE Help\n" +
+ "for details.\n\n")
sys.displayhook = rpc.displayhook
self.write("Python %s on %s\n%s\n%s" %
@@ -1314,7 +1317,8 @@ USAGE: idle [-deins] [-t title] [file]*
idle [-dns] [-t title] - [arg]*
-h print this help message and exit
- -n run IDLE without a subprocess (see Help/IDLE Help for details)
+ -n run IDLE without a subprocess (DEPRECATED,
+ see Help/IDLE Help for details)
The following options will override the IDLE 'settings' configuration:
@@ -1392,6 +1396,8 @@ def main():
if o == '-i':
enable_shell = True
if o == '-n':
+ print(" Warning: running IDLE without a subprocess is deprecated.",
+ file=sys.stderr)
use_subprocess = False
if o == '-r':
script = a
diff --git a/Lib/idlelib/TreeWidget.py b/Lib/idlelib/TreeWidget.py
index d4e524b..833896c 100644
--- a/Lib/idlelib/TreeWidget.py
+++ b/Lib/idlelib/TreeWidget.py
@@ -382,7 +382,7 @@ class FileTreeItem(TreeItem):
try:
os.rename(self.path, newpath)
self.path = newpath
- except os.error:
+ except OSError:
pass
def GetIconName(self):
@@ -395,7 +395,7 @@ class FileTreeItem(TreeItem):
def GetSubList(self):
try:
names = os.listdir(self.path)
- except os.error:
+ except OSError:
return []
names.sort(key = os.path.normcase)
sublist = []
diff --git a/Lib/idlelib/help.txt b/Lib/idlelib/help.txt
index 815ee40..ef90a3a 100644
--- a/Lib/idlelib/help.txt
+++ b/Lib/idlelib/help.txt
@@ -274,7 +274,7 @@ Command line usage:
Enter idle -h at the command prompt to get a usage message.
-Running without a subprocess:
+Running without a subprocess: (DEPRECATED)
If IDLE is started with the -n command line switch it will run in a
single process and will not create the subprocess which runs the RPC
diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py
index 8d8317d..efe96a4 100644
--- a/Lib/idlelib/idlever.py
+++ b/Lib/idlelib/idlever.py
@@ -1 +1 @@
-IDLE_VERSION = "3.3.0"
+IDLE_VERSION = "3.4.0a0"
diff --git a/Lib/idlelib/rpc.py b/Lib/idlelib/rpc.py
index 77cb3ac..44b3430 100644
--- a/Lib/idlelib/rpc.py
+++ b/Lib/idlelib/rpc.py
@@ -199,7 +199,7 @@ class SocketIO(object):
raise
except KeyboardInterrupt:
raise
- except socket.error:
+ except OSError:
raise
except Exception as ex:
return ("CALLEXC", ex)
@@ -340,7 +340,7 @@ class SocketIO(object):
n = self.sock.send(s[:BUFSIZE])
except (AttributeError, TypeError):
raise IOError("socket no longer exists")
- except socket.error:
+ except OSError:
raise
else:
s = s[n:]
@@ -357,7 +357,7 @@ class SocketIO(object):
return None
try:
s = self.sock.recv(BUFSIZE)
- except socket.error:
+ except OSError:
raise EOFError
if len(s) == 0:
raise EOFError
@@ -537,7 +537,7 @@ class RPCClient(SocketIO):
SocketIO.__init__(self, working_sock)
else:
print("** Invalid host: ", address, file=sys.__stderr__)
- raise socket.error
+ raise OSError
def get_remote_proxy(self, oid):
return RPCProxy(self, oid)
diff --git a/Lib/idlelib/run.py b/Lib/idlelib/run.py
index 9872af4..5d321af 100644
--- a/Lib/idlelib/run.py
+++ b/Lib/idlelib/run.py
@@ -135,8 +135,8 @@ def manage_socket(address):
try:
server = MyRPCServer(address, MyHandler)
break
- except socket.error as err:
- print("IDLE Subprocess: socket error: " + err.args[1] +
+ except OSError as err:
+ print("IDLE Subprocess: OSError: " + err.args[1] +
", retrying....", file=sys.__stderr__)
socket_error = err
else:
diff --git a/Lib/imaplib.py b/Lib/imaplib.py
index 3f8c65a..fba3bca 100644
--- a/Lib/imaplib.py
+++ b/Lib/imaplib.py
@@ -174,7 +174,7 @@ class IMAP4:
except Exception:
try:
self.shutdown()
- except socket.error:
+ except OSError:
pass
raise
@@ -267,7 +267,7 @@ class IMAP4:
self.file.close()
try:
self.sock.shutdown(socket.SHUT_RDWR)
- except socket.error as e:
+ except OSError as e:
# The server might already have closed the connection
if e.errno != errno.ENOTCONN:
raise
@@ -899,7 +899,7 @@ class IMAP4:
try:
self.send(data + CRLF)
- except (socket.error, OSError) as val:
+ except OSError as val:
raise self.abort('socket error: %s' % val)
if literal is None:
@@ -924,7 +924,7 @@ class IMAP4:
try:
self.send(literal)
self.send(CRLF)
- except (socket.error, OSError) as val:
+ except OSError as val:
raise self.abort('socket error: %s' % val)
if not literator:
diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py
index a18ccc4..3ea798d 100644
--- a/Lib/importlib/_bootstrap.py
+++ b/Lib/importlib/_bootstrap.py
@@ -237,7 +237,7 @@ class _ModuleLock:
self.wakeup.release()
def __repr__(self):
- return "_ModuleLock(%r) at %d" % (self.name, id(self))
+ return "_ModuleLock({!r}) at {}".format(self.name, id(self))
class _DummyModuleLock:
@@ -258,7 +258,7 @@ class _DummyModuleLock:
self.count -= 1
def __repr__(self):
- return "_DummyModuleLock(%r) at %d" % (self.name, id(self))
+ return "_DummyModuleLock({!r}) at {}".format(self.name, id(self))
# The following two functions are for consumption by Python/import.c.
@@ -755,7 +755,7 @@ class WindowsRegistryFinder:
def _open_registry(cls, key):
try:
return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, key)
- except WindowsError:
+ except OSError:
return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key)
@classmethod
@@ -769,7 +769,7 @@ class WindowsRegistryFinder:
try:
with cls._open_registry(key) as hkey:
filepath = _winreg.QueryValue(hkey, "")
- except WindowsError:
+ except OSError:
return None
return filepath
@@ -931,6 +931,14 @@ class SourceLoader(_LoaderBasics):
raise ImportError("Failed to decode source file",
name=fullname) from exc
+ def source_to_code(self, data, path):
+ """Return the code object compiled from source.
+
+ The 'data' argument can be any object type that compile() supports.
+ """
+ return _call_with_frames_removed(compile, data, path, 'exec',
+ dont_inherit=True)
+
def get_code(self, fullname):
"""Concrete implementation of InspectLoader.get_code.
@@ -976,9 +984,7 @@ class SourceLoader(_LoaderBasics):
raise ImportError(msg.format(bytecode_path),
name=fullname, path=bytecode_path)
source_bytes = self.get_data(source_path)
- code_object = _call_with_frames_removed(compile,
- source_bytes, source_path, 'exec',
- dont_inherit=True)
+ code_object = self.source_to_code(source_bytes, source_path)
_verbose_message('code object from {}', source_path)
if (not sys.dont_write_bytecode and bytecode_path is not None and
source_mtime is not None):
@@ -1439,7 +1445,7 @@ class FileFinder:
return path_hook_for_FileFinder
def __repr__(self):
- return "FileFinder(%r)" % (self.path,)
+ return "FileFinder({!r})".format(self.path)
# Import itself ###############################################################
@@ -1714,7 +1720,7 @@ def _setup(sys_module, _imp_module):
builtin_module = sys.modules[builtin_name]
setattr(self_module, builtin_name, builtin_module)
- os_details = ('posix', ['/']), ('nt', ['\\', '/']), ('os2', ['\\', '/'])
+ os_details = ('posix', ['/']), ('nt', ['\\', '/'])
for builtin_os, path_separators in os_details:
# Assumption made in _path_join()
assert all(len(sep) == 1 for sep in path_separators)
@@ -1725,9 +1731,6 @@ def _setup(sys_module, _imp_module):
else:
try:
os_module = BuiltinImporter.load_module(builtin_os)
- # TODO: rip out os2 code after 3.3 is released as per PEP 11
- if builtin_os == 'os2' and 'EMX GCC' in sys.version:
- path_sep = path_separators[1]
break
except ImportError:
continue
diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py
index 387567a..11e45f4 100644
--- a/Lib/importlib/abc.py
+++ b/Lib/importlib/abc.py
@@ -225,180 +225,3 @@ class SourceLoader(_bootstrap.SourceLoader, ResourceLoader, ExecutionLoader):
raise NotImplementedError
_register(SourceLoader, machinery.SourceFileLoader)
-
-class PyLoader(SourceLoader):
-
- """Implement the deprecated PyLoader ABC in terms of SourceLoader.
-
- This class has been deprecated! It is slated for removal in Python 3.4.
- If compatibility with Python 3.1 is not needed then implement the
- SourceLoader ABC instead of this class. If Python 3.1 compatibility is
- needed, then use the following idiom to have a single class that is
- compatible with Python 3.1 onwards::
-
- try:
- from importlib.abc import SourceLoader
- except ImportError:
- from importlib.abc import PyLoader as SourceLoader
-
-
- class CustomLoader(SourceLoader):
- def get_filename(self, fullname):
- # Implement ...
-
- def source_path(self, fullname):
- '''Implement source_path in terms of get_filename.'''
- try:
- return self.get_filename(fullname)
- except ImportError:
- return None
-
- def is_package(self, fullname):
- filename = os.path.basename(self.get_filename(fullname))
- return os.path.splitext(filename)[0] == '__init__'
-
- """
-
- @abc.abstractmethod
- def is_package(self, fullname):
- raise NotImplementedError
-
- @abc.abstractmethod
- def source_path(self, fullname):
- """Abstract method. Accepts a str module name and returns the path to
- the source code for the module."""
- raise NotImplementedError
-
- def get_filename(self, fullname):
- """Implement get_filename in terms of source_path.
-
- As get_filename should only return a source file path there is no
- chance of the path not existing but loading still being possible, so
- ImportError should propagate instead of being turned into returning
- None.
-
- """
- warnings.warn("importlib.abc.PyLoader is deprecated and is "
- "slated for removal in Python 3.4; "
- "use SourceLoader instead. "
- "See the importlib documentation on how to be "
- "compatible with Python 3.1 onwards.",
- DeprecationWarning)
- path = self.source_path(fullname)
- if path is None:
- raise ImportError(name=fullname)
- else:
- return path
-
-
-class PyPycLoader(PyLoader):
-
- """Abstract base class to assist in loading source and bytecode by
- requiring only back-end storage methods to be implemented.
-
- This class has been deprecated! Removal is slated for Python 3.4. Implement
- the SourceLoader ABC instead. If Python 3.1 compatibility is needed, see
- PyLoader.
-
- The methods get_code, get_source, and load_module are implemented for the
- user.
-
- """
-
- def get_filename(self, fullname):
- """Return the source or bytecode file path."""
- path = self.source_path(fullname)
- if path is not None:
- return path
- path = self.bytecode_path(fullname)
- if path is not None:
- return path
- raise ImportError("no source or bytecode path available for "
- "{0!r}".format(fullname), name=fullname)
-
- def get_code(self, fullname):
- """Get a code object from source or bytecode."""
- warnings.warn("importlib.abc.PyPycLoader is deprecated and slated for "
- "removal in Python 3.4; use SourceLoader instead. "
- "If Python 3.1 compatibility is required, see the "
- "latest documentation for PyLoader.",
- DeprecationWarning)
- source_timestamp = self.source_mtime(fullname)
- # Try to use bytecode if it is available.
- bytecode_path = self.bytecode_path(fullname)
- if bytecode_path:
- data = self.get_data(bytecode_path)
- try:
- magic = data[:4]
- if len(magic) < 4:
- raise ImportError(
- "bad magic number in {}".format(fullname),
- name=fullname, path=bytecode_path)
- raw_timestamp = data[4:8]
- if len(raw_timestamp) < 4:
- raise EOFError("bad timestamp in {}".format(fullname))
- pyc_timestamp = _bootstrap._r_long(raw_timestamp)
- raw_source_size = data[8:12]
- if len(raw_source_size) != 4:
- raise EOFError("bad file size in {}".format(fullname))
- # Source size is unused as the ABC does not provide a way to
- # get the size of the source ahead of reading it.
- bytecode = data[12:]
- # Verify that the magic number is valid.
- if imp.get_magic() != magic:
- raise ImportError(
- "bad magic number in {}".format(fullname),
- name=fullname, path=bytecode_path)
- # Verify that the bytecode is not stale (only matters when
- # there is source to fall back on.
- if source_timestamp:
- if pyc_timestamp < source_timestamp:
- raise ImportError("bytecode is stale", name=fullname,
- path=bytecode_path)
- except (ImportError, EOFError):
- # If source is available give it a shot.
- if source_timestamp is not None:
- pass
- else:
- raise
- else:
- # Bytecode seems fine, so try to use it.
- return marshal.loads(bytecode)
- elif source_timestamp is None:
- raise ImportError("no source or bytecode available to create code "
- "object for {0!r}".format(fullname),
- name=fullname)
- # Use the source.
- source_path = self.source_path(fullname)
- if source_path is None:
- message = "a source path must exist to load {0}".format(fullname)
- raise ImportError(message, name=fullname)
- source = self.get_data(source_path)
- code_object = compile(source, source_path, 'exec', dont_inherit=True)
- # Generate bytecode and write it out.
- if not sys.dont_write_bytecode:
- data = bytearray(imp.get_magic())
- data.extend(_bootstrap._w_long(source_timestamp))
- data.extend(_bootstrap._w_long(len(source) & 0xFFFFFFFF))
- data.extend(marshal.dumps(code_object))
- self.write_bytecode(fullname, data)
- return code_object
-
- @abc.abstractmethod
- def source_mtime(self, fullname):
- """Abstract method. Accepts a str filename and returns an int
- modification time for the source of the module."""
- raise NotImplementedError
-
- @abc.abstractmethod
- def bytecode_path(self, fullname):
- """Abstract method. Accepts a str filename and returns the str pathname
- to the bytecode for the module."""
- raise NotImplementedError
-
- @abc.abstractmethod
- def write_bytecode(self, fullname, bytecode):
- """Abstract method. Accepts a str filename and bytes object
- representing the bytecode for the module. Returns a boolean
- representing whether the bytecode was written or not."""
- raise NotImplementedError
diff --git a/Lib/json/__init__.py b/Lib/json/__init__.py
index 44f49c4..3b23224 100644
--- a/Lib/json/__init__.py
+++ b/Lib/json/__init__.py
@@ -39,8 +39,7 @@ Compact encoding::
Pretty printing::
>>> import json
- >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True,
- ... indent=4, separators=(',', ': ')))
+ >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
{
"4": 5,
"6": 7
@@ -146,13 +145,12 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
- representation. Since the default item separator is ``', '``, the
- output might include trailing whitespace when ``indent`` is specified.
- You can use ``separators=(',', ': ')`` to avoid this.
+ representation.
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
+ If specified, ``separators`` should be an ``(item_separator, key_separator)``
+ tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
+ ``(',', ': ')`` otherwise. To get the most compact JSON representation,
+ you should specify ``(',', ':')`` to eliminate whitespace.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
@@ -209,13 +207,12 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
- representation. Since the default item separator is ``', '``, the
- output might include trailing whitespace when ``indent`` is specified.
- You can use ``separators=(',', ': ')`` to avoid this.
+ representation.
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
+ If specified, ``separators`` should be an ``(item_separator, key_separator)``
+ tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
+ ``(',', ': ')`` otherwise. To get the most compact JSON representation,
+ you should specify ``(',', ':')`` to eliminate whitespace.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
diff --git a/Lib/json/decoder.py b/Lib/json/decoder.py
index 07fd696..9b7438c 100644
--- a/Lib/json/decoder.py
+++ b/Lib/json/decoder.py
@@ -1,9 +1,6 @@
"""Implementation of JSONDecoder
"""
-import binascii
import re
-import sys
-import struct
from json import scanner
try:
@@ -15,14 +12,9 @@ __all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
-def _floatconstants():
- _BYTES = binascii.unhexlify(b'7FF80000000000007FF0000000000000')
- if sys.byteorder != 'big':
- _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
- nan, inf = struct.unpack('dd', _BYTES)
- return nan, inf, -inf
-
-NaN, PosInf, NegInf = _floatconstants()
+NaN = float('nan')
+PosInf = float('inf')
+NegInf = float('-inf')
def linecol(doc, pos):
diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py
index e1ed21f..93b5ea7 100644
--- a/Lib/json/encoder.py
+++ b/Lib/json/encoder.py
@@ -125,14 +125,12 @@ class JSONEncoder(object):
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
- None is the most compact representation. Since the default
- item separator is ', ', the output might include trailing
- whitespace when indent is specified. You can use
- separators=(',', ': ') to avoid this.
+ None is the most compact representation.
- If specified, separators should be a (item_separator, key_separator)
- tuple. The default is (', ', ': '). To get the most compact JSON
- representation you should specify (',', ':') to eliminate whitespace.
+ If specified, separators should be an (item_separator, key_separator)
+ tuple. The default is (', ', ': ') if *indent* is ``None`` and
+ (',', ': ') otherwise. To get the most compact JSON representation,
+ you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
@@ -148,6 +146,8 @@ class JSONEncoder(object):
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
+ elif indent is not None:
+ self.item_separator = ','
if default is not None:
self.default = default
@@ -308,8 +308,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
+ yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
@@ -384,8 +383,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
+ yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
@@ -407,11 +405,9 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
- for chunk in _iterencode_list(o, _current_indent_level):
- yield chunk
+ yield from _iterencode_list(o, _current_indent_level)
elif isinstance(o, dict):
- for chunk in _iterencode_dict(o, _current_indent_level):
- yield chunk
+ yield from _iterencode_dict(o, _current_indent_level)
else:
if markers is not None:
markerid = id(o)
@@ -419,8 +415,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
- for chunk in _iterencode(o, _current_indent_level):
- yield chunk
+ yield from _iterencode(o, _current_indent_level)
if markers is not None:
del markers[markerid]
return _iterencode
diff --git a/Lib/json/tool.py b/Lib/json/tool.py
index 0f108c6..9ab6d65 100644
--- a/Lib/json/tool.py
+++ b/Lib/json/tool.py
@@ -31,8 +31,7 @@ def main():
except ValueError as e:
raise SystemExit(e)
with outfile:
- json.dump(obj, outfile, sort_keys=True,
- indent=4, separators=(',', ': '))
+ json.dump(obj, outfile, sort_keys=True, indent=4)
outfile.write('\n')
diff --git a/Lib/lib2to3/btm_utils.py b/Lib/lib2to3/btm_utils.py
index 2276dc9..339750e 100644
--- a/Lib/lib2to3/btm_utils.py
+++ b/Lib/lib2to3/btm_utils.py
@@ -96,8 +96,7 @@ class MinNode(object):
def leaves(self):
"Generator that returns the leaves of the tree"
for child in self.children:
- for x in child.leaves():
- yield x
+ yield from child.leaves()
if not self.children:
yield self
@@ -277,7 +276,6 @@ def rec_test(sequence, test_func):
sub-iterables"""
for x in sequence:
if isinstance(x, (list, tuple)):
- for y in rec_test(x, test_func):
- yield y
+ yield from rec_test(x, test_func)
else:
yield test_func(x)
diff --git a/Lib/lib2to3/fixer_util.py b/Lib/lib2to3/fixer_util.py
index 60d219f..6e259c5 100644
--- a/Lib/lib2to3/fixer_util.py
+++ b/Lib/lib2to3/fixer_util.py
@@ -129,6 +129,29 @@ def FromImport(package_name, name_leafs):
imp = Node(syms.import_from, children)
return imp
+def ImportAndCall(node, results, names):
+ """Returns an import statement and calls a method
+ of the module:
+
+ import module
+ module.name()"""
+ obj = results["obj"].clone()
+ if obj.type == syms.arglist:
+ newarglist = obj.clone()
+ else:
+ newarglist = Node(syms.arglist, [obj.clone()])
+ after = results["after"]
+ if after:
+ after = [n.clone() for n in after]
+ new = Node(syms.power,
+ Attr(Name(names[0]), Name(names[1])) +
+ [Node(syms.trailer,
+ [results["lpar"].clone(),
+ newarglist,
+ results["rpar"].clone()])] + after)
+ new.prefix = node.prefix
+ return new
+
###########################################################
### Determine whether a node represents a given literal
diff --git a/Lib/lib2to3/fixes/fix_intern.py b/Lib/lib2to3/fixes/fix_intern.py
index 6be11cd..fb2973c 100644
--- a/Lib/lib2to3/fixes/fix_intern.py
+++ b/Lib/lib2to3/fixes/fix_intern.py
@@ -6,9 +6,8 @@
intern(s) -> sys.intern(s)"""
# Local imports
-from .. import pytree
from .. import fixer_base
-from ..fixer_util import Name, Attr, touch_import
+from ..fixer_util import ImportAndCall, touch_import
class FixIntern(fixer_base.BaseFix):
@@ -26,21 +25,7 @@ class FixIntern(fixer_base.BaseFix):
"""
def transform(self, node, results):
- syms = self.syms
- obj = results["obj"].clone()
- if obj.type == syms.arglist:
- newarglist = obj.clone()
- else:
- newarglist = pytree.Node(syms.arglist, [obj.clone()])
- after = results["after"]
- if after:
- after = [n.clone() for n in after]
- new = pytree.Node(syms.power,
- Attr(Name("sys"), Name("intern")) +
- [pytree.Node(syms.trailer,
- [results["lpar"].clone(),
- newarglist,
- results["rpar"].clone()])] + after)
- new.prefix = node.prefix
+ names = ('sys', 'intern')
+ new = ImportAndCall(node, results, names)
touch_import(None, 'sys', node)
return new
diff --git a/Lib/lib2to3/fixes/fix_reload.py b/Lib/lib2to3/fixes/fix_reload.py
new file mode 100644
index 0000000..1855357
--- /dev/null
+++ b/Lib/lib2to3/fixes/fix_reload.py
@@ -0,0 +1,28 @@
+"""Fixer for reload().
+
+reload(s) -> imp.reload(s)"""
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import ImportAndCall, touch_import
+
+
+class FixReload(fixer_base.BaseFix):
+ BM_compatible = True
+ order = "pre"
+
+ PATTERN = """
+ power< 'reload'
+ trailer< lpar='('
+ ( not(arglist | argument<any '=' any>) obj=any
+ | obj=arglist<(not argument<any '=' any>) any ','> )
+ rpar=')' >
+ after=any*
+ >
+ """
+
+ def transform(self, node, results):
+ names = ('imp', 'reload')
+ new = ImportAndCall(node, results, names)
+ touch_import(None, 'imp', node)
+ return new
diff --git a/Lib/lib2to3/main.py b/Lib/lib2to3/main.py
index f9cc18b..93bae90 100644
--- a/Lib/lib2to3/main.py
+++ b/Lib/lib2to3/main.py
@@ -90,11 +90,11 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
if os.path.lexists(backup):
try:
os.remove(backup)
- except os.error as err:
+ except OSError as err:
self.log_message("Can't remove backup %s", backup)
try:
os.rename(filename, backup)
- except os.error as err:
+ except OSError as err:
self.log_message("Can't rename %s to %s", filename, backup)
# Actually write the new file
write = super(StdoutRefactoringTool, self).write_file
diff --git a/Lib/lib2to3/pytree.py b/Lib/lib2to3/pytree.py
index 17cbf0a..c4a1be3 100644
--- a/Lib/lib2to3/pytree.py
+++ b/Lib/lib2to3/pytree.py
@@ -194,8 +194,7 @@ class Base(object):
def leaves(self):
for child in self.children:
- for x in child.leaves():
- yield x
+ yield from child.leaves()
def depth(self):
if self.parent is None:
@@ -274,16 +273,14 @@ class Node(Base):
def post_order(self):
"""Return a post-order iterator for the tree."""
for child in self.children:
- for node in child.post_order():
- yield node
+ yield from child.post_order()
yield self
def pre_order(self):
"""Return a pre-order iterator for the tree."""
yield self
for child in self.children:
- for node in child.pre_order():
- yield node
+ yield from child.pre_order()
def _prefix_getter(self):
"""
diff --git a/Lib/lib2to3/refactor.py b/Lib/lib2to3/refactor.py
index 201e193..87f686e 100644
--- a/Lib/lib2to3/refactor.py
+++ b/Lib/lib2to3/refactor.py
@@ -534,12 +534,12 @@ class RefactoringTool(object):
"""
try:
f = _open_with_encoding(filename, "w", encoding=encoding)
- except os.error as err:
+ except OSError as err:
self.log_error("Can't create %s: %s", filename, err)
return
try:
f.write(_to_system_newlines(new_text))
- except os.error as err:
+ except OSError as err:
self.log_error("Can't write %s: %s", filename, err)
finally:
f.close()
diff --git a/Lib/lib2to3/tests/pytree_idempotency.py b/Lib/lib2to3/tests/pytree_idempotency.py
index a02bbfe..731c403 100755
--- a/Lib/lib2to3/tests/pytree_idempotency.py
+++ b/Lib/lib2to3/tests/pytree_idempotency.py
@@ -53,7 +53,7 @@ def main():
for dir in sys.path:
try:
names = os.listdir(dir)
- except os.error:
+ except OSError:
continue
print("Scanning", dir, "...", file=sys.stderr)
for name in names:
diff --git a/Lib/lib2to3/tests/test_fixers.py b/Lib/lib2to3/tests/test_fixers.py
index 914b3bf..d7659fa 100644
--- a/Lib/lib2to3/tests/test_fixers.py
+++ b/Lib/lib2to3/tests/test_fixers.py
@@ -282,6 +282,65 @@ class Test_apply(FixerTestCase):
b = """f(*args, **kwds)"""
self.check(a, b)
+class Test_reload(FixerTestCase):
+ fixer = "reload"
+
+ def test(self):
+ b = """reload(a)"""
+ a = """import imp\nimp.reload(a)"""
+ self.check(b, a)
+
+ def test_comment(self):
+ b = """reload( a ) # comment"""
+ a = """import imp\nimp.reload( a ) # comment"""
+ self.check(b, a)
+
+ # PEP 8 comments
+ b = """reload( a ) # comment"""
+ a = """import imp\nimp.reload( a ) # comment"""
+ self.check(b, a)
+
+ def test_space(self):
+ b = """reload( a )"""
+ a = """import imp\nimp.reload( a )"""
+ self.check(b, a)
+
+ b = """reload( a)"""
+ a = """import imp\nimp.reload( a)"""
+ self.check(b, a)
+
+ b = """reload(a )"""
+ a = """import imp\nimp.reload(a )"""
+ self.check(b, a)
+
+ def test_unchanged(self):
+ s = """reload(a=1)"""
+ self.unchanged(s)
+
+ s = """reload(f, g)"""
+ self.unchanged(s)
+
+ s = """reload(f, *h)"""
+ self.unchanged(s)
+
+ s = """reload(f, *h, **i)"""
+ self.unchanged(s)
+
+ s = """reload(f, **i)"""
+ self.unchanged(s)
+
+ s = """reload(*h, **i)"""
+ self.unchanged(s)
+
+ s = """reload(*h)"""
+ self.unchanged(s)
+
+ s = """reload(**i)"""
+ self.unchanged(s)
+
+ s = """reload()"""
+ self.unchanged(s)
+
class Test_intern(FixerTestCase):
fixer = "intern"
diff --git a/Lib/linecache.py b/Lib/linecache.py
index c3f2c3f..71dc93d 100644
--- a/Lib/linecache.py
+++ b/Lib/linecache.py
@@ -59,7 +59,7 @@ def checkcache(filename=None):
continue # no-op for files loaded via a __loader__
try:
stat = os.stat(fullname)
- except os.error:
+ except OSError:
del cache[filename]
continue
if size != stat.st_size or mtime != stat.st_mtime:
@@ -118,7 +118,7 @@ def updatecache(filename, module_globals=None):
try:
stat = os.stat(fullname)
break
- except os.error:
+ except OSError:
pass
else:
return []
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
index 591f45b..6f8b4fa 100644
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -67,7 +67,7 @@ else: #pragma: no cover
"""Return the frame object for the caller's stack frame."""
try:
raise Exception
- except:
+ except Exception:
return sys.exc_info()[2].tb_frame.f_back
# _srcfile is only used in conjunction with sys._getframe().
@@ -879,16 +879,27 @@ class Handler(Filterer):
The record which was being processed is passed in to this method.
"""
if raiseExceptions and sys.stderr: # see issue 13807
- ei = sys.exc_info()
+ t, v, tb = sys.exc_info()
try:
- traceback.print_exception(ei[0], ei[1], ei[2],
- None, sys.stderr)
- sys.stderr.write('Logged from file %s, line %s\n' % (
- record.filename, record.lineno))
+ sys.stderr.write('--- Logging error ---\n')
+ traceback.print_exception(t, v, tb, None, sys.stderr)
+ sys.stderr.write('Call stack:\n')
+ # Walk the stack frame up until we're out of logging,
+ # so as to print the calling context.
+ frame = tb.tb_frame
+ while (frame and os.path.dirname(frame.f_code.co_filename) ==
+ __path__[0]):
+ frame = frame.f_back
+ if frame:
+ traceback.print_stack(frame, file=sys.stderr)
+ else:
+ # couldn't find the right stack frame, for some reason
+ sys.stderr.write('Logged from file %s, line %s\n' % (
+ record.filename, record.lineno))
except IOError: #pragma: no cover
pass # see issue 5971
finally:
- del ei
+ del t, v, tb
class StreamHandler(Handler):
"""
@@ -938,9 +949,7 @@ class StreamHandler(Handler):
stream.write(msg)
stream.write(self.terminator)
self.flush()
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class FileHandler(StreamHandler):
@@ -1837,7 +1846,7 @@ def shutdown(handlerList=_handlerList):
pass
finally:
h.release()
- except:
+ except: # ignore everything, as we're shutting down
if raiseExceptions:
raise
#else, swallow
diff --git a/Lib/logging/config.py b/Lib/logging/config.py
index 5ef5c91..5eae183 100644
--- a/Lib/logging/config.py
+++ b/Lib/logging/config.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2012 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
@@ -19,7 +19,7 @@ Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
-Copyright (C) 2001-2010 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2012 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
@@ -61,11 +61,14 @@ def fileConfig(fname, defaults=None, disable_existing_loggers=True):
"""
import configparser
- cp = configparser.ConfigParser(defaults)
- if hasattr(fname, 'readline'):
- cp.read_file(fname)
+ if isinstance(fname, configparser.RawConfigParser):
+ cp = fname
else:
- cp.read(fname)
+ cp = configparser.ConfigParser(defaults)
+ if hasattr(fname, 'readline'):
+ cp.read_file(fname)
+ else:
+ cp.read(fname)
formatters = _create_formatters(cp)
@@ -707,6 +710,7 @@ class DictConfigurator(BaseConfigurator):
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
+ props = config.pop('.', None)
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
try:
result = factory(**kwargs)
@@ -725,6 +729,9 @@ class DictConfigurator(BaseConfigurator):
result.setLevel(logging._checkLevel(level))
if filters:
self.add_filters(result, filters)
+ if props:
+ for name, value in props.items():
+ setattr(result, name, value)
return result
def add_handlers(self, logger, handlers):
@@ -773,7 +780,7 @@ def dictConfig(config):
dictConfigClass(config).configure()
-def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
+def listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None):
"""
Start up a socket server on the specified port, and listen for new
configurations.
@@ -782,6 +789,15 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
+
+ Use the ``verify`` argument to verify any bytes received across the wire
+ from a client. If specified, it should be a callable which receives a
+ single argument - the bytes of configuration data received across the
+ network - and it should return either ``None``, to indicate that the
+ passed in bytes could not be verified and should be discarded, or a
+ byte string which is then passed to the configuration machinery as
+ normal. Note that you can return transformed bytes, e.g. by decrypting
+ the bytes passed in.
"""
if not thread: #pragma: no cover
raise NotImplementedError("listen() needs threading to work")
@@ -809,25 +825,26 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
- chunk = chunk.decode("utf-8")
- try:
- import json
- d =json.loads(chunk)
- assert isinstance(d, dict)
- dictConfig(d)
- except:
- #Apply new configuration.
-
- file = io.StringIO(chunk)
+ if self.server.verify is not None:
+ chunk = self.server.verify(chunk)
+ if chunk is not None: # verified, can process
+ chunk = chunk.decode("utf-8")
try:
- fileConfig(file)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
- traceback.print_exc()
+ import json
+ d =json.loads(chunk)
+ assert isinstance(d, dict)
+ dictConfig(d)
+ except Exception:
+ #Apply new configuration.
+
+ file = io.StringIO(chunk)
+ try:
+ fileConfig(file)
+ except Exception:
+ traceback.print_exc()
if self.server.ready:
self.server.ready.set()
- except socket.error as e:
+ except OSError as e:
if not isinstance(e.args, tuple):
raise
else:
@@ -843,13 +860,14 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
- handler=None, ready=None):
+ handler=None, ready=None, verify=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
self.ready = ready
+ self.verify = verify
def serve_until_stopped(self):
import select
@@ -867,16 +885,18 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
class Server(threading.Thread):
- def __init__(self, rcvr, hdlr, port):
+ def __init__(self, rcvr, hdlr, port, verify):
super(Server, self).__init__()
self.rcvr = rcvr
self.hdlr = hdlr
self.port = port
+ self.verify = verify
self.ready = threading.Event()
def run(self):
server = self.rcvr(port=self.port, handler=self.hdlr,
- ready=self.ready)
+ ready=self.ready,
+ verify=self.verify)
if self.port == 0:
self.port = server.server_address[1]
self.ready.set()
@@ -886,7 +906,7 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
logging._releaseLock()
server.serve_until_stopped()
- return Server(ConfigSocketReceiver, ConfigStreamHandler, port)
+ return Server(ConfigSocketReceiver, ConfigStreamHandler, port, verify)
def stopListening():
"""
diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
index f286cd6..1491b47 100644
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -72,9 +72,7 @@ class BaseRotatingHandler(logging.FileHandler):
if self.shouldRollover(record):
self.doRollover()
logging.FileHandler.emit(self, record)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def rotation_filename(self, default_name):
@@ -496,15 +494,7 @@ class SocketHandler(logging.Handler):
A factory method which allows subclasses to define the precise
type of socket they want.
"""
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- if hasattr(s, 'settimeout'):
- s.settimeout(timeout)
- try:
- s.connect((self.host, self.port))
- return s
- except socket.error:
- s.close()
- raise
+ return socket.create_connection((self.host, self.port), timeout=timeout)
def createSocket(self):
"""
@@ -524,7 +514,7 @@ class SocketHandler(logging.Handler):
try:
self.sock = self.makeSocket()
self.retryTime = None # next time, no delay before trying
- except socket.error:
+ except OSError:
#Creation failed, so set the retry time and return.
if self.retryTime is None:
self.retryPeriod = self.retryStart
@@ -548,16 +538,8 @@ class SocketHandler(logging.Handler):
#but are still unable to connect.
if self.sock:
try:
- if hasattr(self.sock, "sendall"):
- self.sock.sendall(s)
- else: #pragma: no cover
- sentsofar = 0
- left = len(s)
- while left > 0:
- sent = self.sock.send(s[sentsofar:])
- sentsofar = sentsofar + sent
- left = left - sent
- except socket.error: #pragma: no cover
+ self.sock.sendall(s)
+ except OSError: #pragma: no cover
self.sock.close()
self.sock = None # so we can call createSocket next time
@@ -607,9 +589,7 @@ class SocketHandler(logging.Handler):
try:
s = self.makePickle(record)
self.send(s)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def close(self):
@@ -795,7 +775,7 @@ class SysLogHandler(logging.Handler):
self.socket = socket.socket(socket.AF_UNIX, self.socktype)
try:
self.socket.connect(address)
- except socket.error:
+ except OSError:
self.socket.close()
raise
@@ -862,16 +842,14 @@ class SysLogHandler(logging.Handler):
if self.unixsocket:
try:
self.socket.send(msg)
- except socket.error:
+ except OSError:
self._connect_unixsocket(self.address)
self.socket.send(msg)
elif self.socktype == socket.SOCK_DGRAM:
self.socket.sendto(msg, self.address)
else:
self.socket.sendall(msg)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class SMTPHandler(logging.Handler):
@@ -949,9 +927,7 @@ class SMTPHandler(logging.Handler):
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class NTEventLogHandler(logging.Handler):
@@ -1036,9 +1012,7 @@ class NTEventLogHandler(logging.Handler):
type = self.getEventType(record)
msg = self.format(record)
self._welu.ReportEvent(self.appname, id, cat, type, [msg])
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def close(self):
@@ -1123,9 +1097,7 @@ class HTTPHandler(logging.Handler):
if self.method == "POST":
h.send(data.encode('utf-8'))
h.getresponse() #can't do anything with the result
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class BufferingHandler(logging.Handler):
@@ -1305,9 +1277,7 @@ class QueueHandler(logging.Handler):
"""
try:
self.enqueue(self.prepare(record))
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
if threading:
diff --git a/Lib/lzma.py b/Lib/lzma.py
index 1a1b065..b2e2f7e 100644
--- a/Lib/lzma.py
+++ b/Lib/lzma.py
@@ -55,7 +55,7 @@ class LZMAFile(io.BufferedIOBase):
be an existing file object to read from or write to.
mode can be "r" for reading (default), "w" for (over)writing, or
- "a" for appending. These can equivalently be given as "rb", "wb",
+ "a" for appending. These can equivalently be given as "rb", "wb"
and "ab" respectively.
format specifies the container format to use for the file.
@@ -110,7 +110,8 @@ class LZMAFile(io.BufferedIOBase):
# stream will need a separate decompressor object.
self._init_args = {"format":format, "filters":filters}
self._decompressor = LZMADecompressor(**self._init_args)
- self._buffer = None
+ self._buffer = b""
+ self._buffer_offset = 0
elif mode in ("w", "wb", "a", "ab"):
if format is None:
format = FORMAT_XZ
@@ -143,7 +144,7 @@ class LZMAFile(io.BufferedIOBase):
try:
if self._mode in (_MODE_READ, _MODE_READ_EOF):
self._decompressor = None
- self._buffer = None
+ self._buffer = b""
elif self._mode == _MODE_WRITE:
self._fp.write(self._compressor.flush())
self._compressor = None
@@ -187,15 +188,18 @@ class LZMAFile(io.BufferedIOBase):
raise ValueError("I/O operation on closed file")
def _check_can_read(self):
- if not self.readable():
+ if self._mode not in (_MODE_READ, _MODE_READ_EOF):
+ self._check_not_closed()
raise io.UnsupportedOperation("File not open for reading")
def _check_can_write(self):
- if not self.writable():
+ if self._mode != _MODE_WRITE:
+ self._check_not_closed()
raise io.UnsupportedOperation("File not open for writing")
def _check_can_seek(self):
- if not self.readable():
+ if self._mode not in (_MODE_READ, _MODE_READ_EOF):
+ self._check_not_closed()
raise io.UnsupportedOperation("Seeking is only supported "
"on files open for reading")
if not self._fp.seekable():
@@ -204,16 +208,13 @@ class LZMAFile(io.BufferedIOBase):
# Fill the readahead buffer if it is empty. Returns False on EOF.
def _fill_buffer(self):
+ if self._mode == _MODE_READ_EOF:
+ return False
# Depending on the input data, our call to the decompressor may not
# return any data. In this case, try again after reading another block.
- while True:
- if self._buffer:
- return True
-
- if self._decompressor.unused_data:
- rawblock = self._decompressor.unused_data
- else:
- rawblock = self._fp.read(_BUFFER_SIZE)
+ while self._buffer_offset == len(self._buffer):
+ rawblock = (self._decompressor.unused_data or
+ self._fp.read(_BUFFER_SIZE))
if not rawblock:
if self._decompressor.eof:
@@ -229,30 +230,48 @@ class LZMAFile(io.BufferedIOBase):
self._decompressor = LZMADecompressor(**self._init_args)
self._buffer = self._decompressor.decompress(rawblock)
+ self._buffer_offset = 0
+ return True
# Read data until EOF.
# If return_data is false, consume the data without returning it.
def _read_all(self, return_data=True):
+ # The loop assumes that _buffer_offset is 0. Ensure that this is true.
+ self._buffer = self._buffer[self._buffer_offset:]
+ self._buffer_offset = 0
+
blocks = []
while self._fill_buffer():
if return_data:
blocks.append(self._buffer)
self._pos += len(self._buffer)
- self._buffer = None
+ self._buffer = b""
if return_data:
return b"".join(blocks)
# Read a block of up to n bytes.
# If return_data is false, consume the data without returning it.
def _read_block(self, n, return_data=True):
+ # If we have enough data buffered, return immediately.
+ end = self._buffer_offset + n
+ if end <= len(self._buffer):
+ data = self._buffer[self._buffer_offset : end]
+ self._buffer_offset = end
+ self._pos += len(data)
+ return data if return_data else None
+
+ # The loop assumes that _buffer_offset is 0. Ensure that this is true.
+ self._buffer = self._buffer[self._buffer_offset:]
+ self._buffer_offset = 0
+
blocks = []
while n > 0 and self._fill_buffer():
if n < len(self._buffer):
data = self._buffer[:n]
- self._buffer = self._buffer[n:]
+ self._buffer_offset = n
else:
data = self._buffer
- self._buffer = None
+ self._buffer = b""
if return_data:
blocks.append(data)
self._pos += len(data)
@@ -267,9 +286,9 @@ class LZMAFile(io.BufferedIOBase):
The exact number of bytes returned is unspecified.
"""
self._check_can_read()
- if self._mode == _MODE_READ_EOF or not self._fill_buffer():
+ if not self._fill_buffer():
return b""
- return self._buffer
+ return self._buffer[self._buffer_offset:]
def read(self, size=-1):
"""Read up to size uncompressed bytes from the file.
@@ -278,7 +297,7 @@ class LZMAFile(io.BufferedIOBase):
Returns b"" if the file is already at EOF.
"""
self._check_can_read()
- if self._mode == _MODE_READ_EOF or size == 0:
+ if size == 0:
return b""
elif size < 0:
return self._read_all()
@@ -295,18 +314,40 @@ class LZMAFile(io.BufferedIOBase):
# this does not give enough data for the decompressor to make progress.
# In this case we make multiple reads, to avoid returning b"".
self._check_can_read()
- if (size == 0 or self._mode == _MODE_READ_EOF or
- not self._fill_buffer()):
+ if (size == 0 or
+ # Only call _fill_buffer() if the buffer is actually empty.
+ # This gives a significant speedup if *size* is small.
+ (self._buffer_offset == len(self._buffer) and not self._fill_buffer())):
return b""
- if 0 < size < len(self._buffer):
- data = self._buffer[:size]
- self._buffer = self._buffer[size:]
+ if size > 0:
+ data = self._buffer[self._buffer_offset :
+ self._buffer_offset + size]
+ self._buffer_offset += len(data)
else:
- data = self._buffer
- self._buffer = None
+ data = self._buffer[self._buffer_offset:]
+ self._buffer = b""
+ self._buffer_offset = 0
self._pos += len(data)
return data
+ def readline(self, size=-1):
+ """Read a line of uncompressed bytes from the file.
+
+ The terminating newline (if present) is retained. If size is
+ non-negative, no more than size bytes will be read (in which
+ case the line may be incomplete). Returns b'' if already at EOF.
+ """
+ self._check_can_read()
+ # Shortcut for the common case - the whole line is in the buffer.
+ if size < 0:
+ end = self._buffer.find(b"\n", self._buffer_offset) + 1
+ if end > 0:
+ line = self._buffer[self._buffer_offset : end]
+ self._buffer_offset = end
+ self._pos += len(line)
+ return line
+ return io.BufferedIOBase.readline(self, size)
+
def write(self, data):
"""Write a bytes object to the file.
@@ -326,7 +367,8 @@ class LZMAFile(io.BufferedIOBase):
self._mode = _MODE_READ
self._pos = 0
self._decompressor = LZMADecompressor(**self._init_args)
- self._buffer = None
+ self._buffer = b""
+ self._buffer_offset = 0
def seek(self, offset, whence=0):
"""Change the file position.
@@ -365,8 +407,7 @@ class LZMAFile(io.BufferedIOBase):
offset -= self._pos
# Read and discard data until we reach the desired position.
- if self._mode != _MODE_READ_EOF:
- self._read_block(offset, return_data=False)
+ self._read_block(offset, return_data=False)
return self._pos
@@ -381,23 +422,24 @@ def open(filename, mode="rb", *,
encoding=None, errors=None, newline=None):
"""Open an LZMA-compressed file in binary or text mode.
- filename can be either an actual file name (given as a str or bytes object),
- in which case the named file is opened, or it can be an existing file object
- to read from or write to.
+ filename can be either an actual file name (given as a str or bytes
+ object), in which case the named file is opened, or it can be an
+ existing file object to read from or write to.
- The mode argument can be "r", "rb" (default), "w", "wb", "a", or "ab" for
- binary mode, or "rt", "wt" or "at" for text mode.
+ The mode argument can be "r", "rb" (default), "w", "wb", "a" or "ab"
+ for binary mode, or "rt", "wt" or "at" for text mode.
- The format, check, preset and filters arguments specify the compression
- settings, as for LZMACompressor, LZMADecompressor and LZMAFile.
+ The format, check, preset and filters arguments specify the
+ compression settings, as for LZMACompressor, LZMADecompressor and
+ LZMAFile.
- For binary mode, this function is equivalent to the LZMAFile constructor:
- LZMAFile(filename, mode, ...). In this case, the encoding, errors and
- newline arguments must not be provided.
+ For binary mode, this function is equivalent to the LZMAFile
+ constructor: LZMAFile(filename, mode, ...). In this case, the
+ encoding, errors and newline arguments must not be provided.
For text mode, a LZMAFile object is created, and wrapped in an
- io.TextIOWrapper instance with the specified encoding, error handling
- behavior, and line ending(s).
+ io.TextIOWrapper instance with the specified encoding, error
+ handling behavior, and line ending(s).
"""
if "t" in mode:
@@ -427,7 +469,7 @@ def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None):
Refer to LZMACompressor's docstring for a description of the
optional arguments *format*, *check*, *preset* and *filters*.
- For incremental compression, use an LZMACompressor object instead.
+ For incremental compression, use an LZMACompressor instead.
"""
comp = LZMACompressor(format, check, preset, filters)
return comp.compress(data) + comp.flush()
@@ -439,7 +481,7 @@ def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None):
Refer to LZMADecompressor's docstring for a description of the
optional arguments *format*, *check* and *filters*.
- For incremental decompression, use a LZMADecompressor object instead.
+ For incremental decompression, use an LZMADecompressor instead.
"""
results = []
while True:
diff --git a/Lib/macpath.py b/Lib/macpath.py
index 1615d91..d34f9e94 100644
--- a/Lib/macpath.py
+++ b/Lib/macpath.py
@@ -127,7 +127,7 @@ def lexists(path):
try:
st = os.lstat(path)
- except os.error:
+ except OSError:
return False
return True
diff --git a/Lib/mailbox.py b/Lib/mailbox.py
index d3bf3fd..01f3551 100644
--- a/Lib/mailbox.py
+++ b/Lib/mailbox.py
@@ -22,9 +22,6 @@ import email.generator
import io
import contextlib
try:
- if sys.platform == 'os2emx':
- # OS/2 EMX fcntl() not adequate
- raise ImportError
import fcntl
except ImportError:
fcntl = None
@@ -631,8 +628,7 @@ class _singlefileMailbox(Mailbox):
def iterkeys(self):
"""Return an iterator over keys."""
self._lookup()
- for key in self._toc.keys():
- yield key
+ yield from self._toc.keys()
def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise."""
@@ -711,8 +707,7 @@ class _singlefileMailbox(Mailbox):
try:
os.rename(new_file.name, self._path)
except OSError as e:
- if e.errno == errno.EEXIST or \
- (os.name == 'os2' and e.errno == errno.EACCES):
+ if e.errno == errno.EEXIST:
os.remove(self._path)
os.rename(new_file.name, self._path)
else:
@@ -2097,8 +2092,7 @@ def _lock_file(f, dotlock=True):
os.rename(pre_lock.name, f.name + '.lock')
dotlock_done = True
except OSError as e:
- if e.errno == errno.EEXIST or \
- (os.name == 'os2' and e.errno == errno.EACCES):
+ if e.errno == errno.EEXIST:
os.remove(pre_lock.name)
raise ExternalClashError('dot lock unavailable: %s' %
f.name)
diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py
index 3f0bd0e..40faf8c 100644
--- a/Lib/mimetypes.py
+++ b/Lib/mimetypes.py
@@ -243,7 +243,7 @@ class MimeTypes:
while True:
try:
ctype = _winreg.EnumKey(mimedb, i)
- except EnvironmentError:
+ except OSError:
break
else:
yield ctype
@@ -256,7 +256,7 @@ class MimeTypes:
with _winreg.OpenKey(mimedb, ctype) as key:
suffix, datatype = _winreg.QueryValueEx(key,
'Extension')
- except EnvironmentError:
+ except OSError:
continue
if datatype != _winreg.REG_SZ:
continue
@@ -378,12 +378,14 @@ def _default_mime_types():
'.taz': '.tar.gz',
'.tz': '.tar.gz',
'.tbz2': '.tar.bz2',
+ '.txz': '.tar.xz',
}
encodings_map = {
'.gz': 'gzip',
'.Z': 'compress',
'.bz2': 'bzip2',
+ '.xz': 'xz',
}
# Before adding new types, make sure they are either registered with IANA,
diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py
index f90a432..4996d7a 100644
--- a/Lib/modulefinder.py
+++ b/Lib/modulefinder.py
@@ -229,7 +229,7 @@ class ModuleFinder:
for dir in m.__path__:
try:
names = os.listdir(dir)
- except os.error:
+ except OSError:
self.msg(2, "can't list directory", dir)
continue
for name in names:
diff --git a/Lib/multiprocessing/__init__.py b/Lib/multiprocessing/__init__.py
index 1f3e67c..efad532 100644
--- a/Lib/multiprocessing/__init__.py
+++ b/Lib/multiprocessing/__init__.py
@@ -40,6 +40,13 @@ from multiprocessing.process import Process, current_process, active_children
from multiprocessing.util import SUBDEBUG, SUBWARNING
#
+# Alias for main module -- will be reset by bootstrapping child processes
+#
+
+if '__main__' in sys.modules:
+ sys.modules['__mp_main__'] = sys.modules['__main__']
+
+#
# Exceptions
#
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
index fbbd5d9..391d6b2 100644
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -676,7 +676,7 @@ if sys.platform == 'win32':
0, _winapi.NULL, _winapi.OPEN_EXISTING,
_winapi.FILE_FLAG_OVERLAPPED, _winapi.NULL
)
- except WindowsError as e:
+ except OSError as e:
if e.winerror not in (_winapi.ERROR_SEM_TIMEOUT,
_winapi.ERROR_PIPE_BUSY) or _check_timeout(t):
raise
diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py
index c5501a2..d3e7a10 100644
--- a/Lib/multiprocessing/forking.py
+++ b/Lib/multiprocessing/forking.py
@@ -111,7 +111,7 @@ if sys.platform != 'win32':
if self.returncode is None:
try:
pid, sts = os.waitpid(self.pid, flag)
- except os.error:
+ except OSError:
# Child process not yet created. See #1731717
# e.errno == errno.ECHILD == 10
return None
@@ -442,27 +442,17 @@ def prepare(data):
dirs = [os.path.dirname(main_path)]
assert main_name not in sys.modules, main_name
+ sys.modules.pop('__mp_main__', None)
file, path_name, etc = imp.find_module(main_name, dirs)
try:
- # We would like to do "imp.load_module('__main__', ...)"
- # here. However, that would cause 'if __name__ ==
- # "__main__"' clauses to be executed.
+ # We should not do 'imp.load_module("__main__", ...)'
+ # since that would execute 'if __name__ == "__main__"'
+ # clauses, potentially causing a psuedo fork bomb.
main_module = imp.load_module(
- '__parents_main__', file, path_name, etc
+ '__mp_main__', file, path_name, etc
)
finally:
if file:
file.close()
- sys.modules['__main__'] = main_module
- main_module.__name__ = '__main__'
-
- # Try to make the potentially picklable objects in
- # sys.modules['__main__'] realize they are in the main
- # module -- somewhat ugly.
- for obj in list(main_module.__dict__.values()):
- try:
- if obj.__module__ == '__parents_main__':
- obj.__module__ = '__main__'
- except Exception:
- pass
+ sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module
diff --git a/Lib/multiprocessing/queues.py b/Lib/multiprocessing/queues.py
index 37271fb..f6f02b6 100644
--- a/Lib/multiprocessing/queues.py
+++ b/Lib/multiprocessing/queues.py
@@ -243,10 +243,14 @@ class Queue(object):
if wacquire is None:
send(obj)
+ # Delete references to object. See issue16284
+ del obj
else:
wacquire()
try:
send(obj)
+ # Delete references to object. See issue16284
+ del obj
finally:
wrelease()
except IndexError:
diff --git a/Lib/nntplib.py b/Lib/nntplib.py
index 2de6ebd..ffab2c2 100644
--- a/Lib/nntplib.py
+++ b/Lib/nntplib.py
@@ -359,7 +359,7 @@ class _NNTPBase:
if is_connected():
try:
self.quit()
- except (socket.error, EOFError):
+ except (OSError, EOFError):
pass
finally:
if is_connected():
diff --git a/Lib/ntpath.py b/Lib/ntpath.py
index 826be87..2fbb6d69 100644
--- a/Lib/ntpath.py
+++ b/Lib/ntpath.py
@@ -30,9 +30,6 @@ altsep = '/'
defpath = '.;C:\\bin'
if 'ce' in sys.builtin_module_names:
defpath = '\\Windows'
-elif 'os2' in sys.builtin_module_names:
- # OS/2 w/ VACPP
- altsep = '/'
devnull = 'nul'
def _get_empty(path):
@@ -320,12 +317,11 @@ def dirname(p):
def islink(path):
"""Test whether a path is a symbolic link.
- This will always return false for Windows prior to 6.0
- and for OS/2.
+ This will always return false for Windows prior to 6.0.
"""
try:
st = os.lstat(path)
- except (os.error, AttributeError):
+ except (OSError, AttributeError):
return False
return stat.S_ISLNK(st.st_mode)
@@ -335,7 +331,7 @@ def lexists(path):
"""Test whether a path exists. Returns True for broken symbolic links"""
try:
st = os.lstat(path)
- except (os.error, WindowsError):
+ except OSError:
return False
return True
@@ -588,7 +584,7 @@ else: # use native Windows method on Windows
if path: # Empty path must return current working directory.
try:
path = _getfullpathname(path)
- except WindowsError:
+ except OSError:
pass # Bad path - return unchanged.
elif isinstance(path, bytes):
path = os.getcwdb()
diff --git a/Lib/os.py b/Lib/os.py
index 84eeaeb..dbebf89 100644
--- a/Lib/os.py
+++ b/Lib/os.py
@@ -1,9 +1,9 @@
r"""OS routines for Mac, NT, or Posix depending on what system we're on.
This exports:
- - all functions from posix, nt, os2, or ce, e.g. unlink, stat, etc.
+ - all functions from posix, nt or ce, e.g. unlink, stat, etc.
- os.path is either posixpath or ntpath
- - os.name is either 'posix', 'nt', 'os2' or 'ce'.
+ - os.name is either 'posix', 'nt' or 'ce'.
- os.curdir is a string representing the current directory ('.' or ':')
- os.pardir is a string representing the parent directory ('..' or '::')
- os.sep is the (or a most common) pathname separator ('/' or ':' or '\\')
@@ -81,30 +81,6 @@ elif 'nt' in _names:
except ImportError:
pass
-elif 'os2' in _names:
- name = 'os2'
- linesep = '\r\n'
- from os2 import *
- try:
- from os2 import _exit
- __all__.append('_exit')
- except ImportError:
- pass
- if sys.version.find('EMX GCC') == -1:
- import ntpath as path
- else:
- import os2emxpath as path
- from _emx_link import link
-
- import os2
- __all__.extend(_get_exports_list(os2))
- del os2
-
- try:
- from os2 import _have_functions
- except ImportError:
- pass
-
elif 'ce' in _names:
name = 'ce'
linesep = '\r\n'
@@ -299,7 +275,7 @@ def removedirs(name):
while head and tail:
try:
rmdir(head)
- except error:
+ except OSError:
break
head, tail = path.split(head)
@@ -326,7 +302,7 @@ def renames(old, new):
if head and tail:
try:
removedirs(head)
- except error:
+ except OSError:
pass
__all__.extend(["makedirs", "removedirs", "renames"])
@@ -362,7 +338,7 @@ def walk(top, topdown=True, onerror=None, followlinks=False):
By default errors from the os.listdir() call are ignored. If
optional arg 'onerror' is specified, it should be a function; it
- will be called with one argument, an os.error instance. It can
+ will be called with one argument, an OSError instance. It can
report the error to continue with the walk, or raise the exception
to abort the walk. Note that the filename is available as the
filename attribute of the exception object.
@@ -396,10 +372,10 @@ def walk(top, topdown=True, onerror=None, followlinks=False):
# minor reason when (say) a thousand readable directories are still
# left to visit. That logic is copied here.
try:
- # Note that listdir and error are globals in this module due
+ # Note that listdir is global in this module due
# to earlier import-*.
names = listdir(top)
- except error as err:
+ except OSError as err:
if onerror is not None:
onerror(err)
return
@@ -501,7 +477,7 @@ if {open, stat} <= supports_dir_fd and {listdir, stat} <= supports_fd:
try:
orig_st = stat(name, dir_fd=topfd, follow_symlinks=follow_symlinks)
dirfd = open(name, O_RDONLY, dir_fd=topfd)
- except error as err:
+ except OSError as err:
if onerror is not None:
onerror(err)
return
@@ -596,7 +572,7 @@ def _execvpe(file, args, env=None):
fullname = path.join(dir, file)
try:
exec_func(fullname, *argrest)
- except error as e:
+ except OSError as e:
last_exc = e
tb = sys.exc_info()[2]
if (e.errno != errno.ENOENT and e.errno != errno.ENOTDIR
@@ -715,7 +691,7 @@ else:
__all__.append("unsetenv")
def _createenviron():
- if name in ('os2', 'nt'):
+ if name == 'nt':
# Where Env Var Names Must Be UPPERCASE
def check_str(value):
if not isinstance(value, str):
@@ -755,7 +731,7 @@ def getenv(key, default=None):
key, default and the result are str."""
return environ.get(key, default)
-supports_bytes_environ = name not in ('os2', 'nt')
+supports_bytes_environ = (name != 'nt')
__all__.extend(("getenv", "supports_bytes_environ"))
if supports_bytes_environ:
@@ -854,7 +830,7 @@ if _exists("fork") and not _exists("spawnv") and _exists("execv"):
elif WIFEXITED(sts):
return WEXITSTATUS(sts)
else:
- raise error("Not stopped, signaled or exited???")
+ raise OSError("Not stopped, signaled or exited???")
def spawnv(mode, file, args):
"""spawnv(mode, file, args) -> integer
diff --git a/Lib/os2emxpath.py b/Lib/os2emxpath.py
deleted file mode 100644
index 0ccbf8a..0000000
--- a/Lib/os2emxpath.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Module 'os2emxpath' -- common operations on OS/2 pathnames
-"""Common pathname manipulations, OS/2 EMX version.
-
-Instead of importing this module directly, import os and refer to this
-module as os.path.
-"""
-
-import os
-import stat
-from genericpath import *
-from ntpath import (expanduser, expandvars, isabs, islink, splitdrive,
- splitext, split)
-
-__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
- "basename","dirname","commonprefix","getsize","getmtime",
- "getatime","getctime", "islink","exists","lexists","isdir","isfile",
- "ismount","expanduser","expandvars","normpath","abspath",
- "splitunc","curdir","pardir","sep","pathsep","defpath","altsep",
- "extsep","devnull","realpath","supports_unicode_filenames"]
-
-# strings representing various path-related bits and pieces
-curdir = '.'
-pardir = '..'
-extsep = '.'
-sep = '/'
-altsep = '\\'
-pathsep = ';'
-defpath = '.;C:\\bin'
-devnull = 'nul'
-
-# Normalize the case of a pathname and map slashes to backslashes.
-# Other normalizations (such as optimizing '../' away) are not done
-# (this is done by normpath).
-
-def normcase(s):
- """Normalize case of pathname.
-
- Makes all characters lowercase and all altseps into seps."""
- if not isinstance(s, (bytes, str)):
- raise TypeError("normcase() argument must be str or bytes, "
- "not '{}'".format(s.__class__.__name__))
- return s.replace('\\', '/').lower()
-
-
-# Join two (or more) paths.
-
-def join(a, *p):
- """Join two or more pathname components, inserting sep as needed"""
- path = a
- for b in p:
- if isabs(b):
- path = b
- elif path == '' or path[-1:] in '/\\:':
- path = path + b
- else:
- path = path + '/' + b
- return path
-
-
-# Parse UNC paths
-def splitunc(p):
- """Split a pathname into UNC mount point and relative path specifiers.
-
- Return a 2-tuple (unc, rest); either part may be empty.
- If unc is not empty, it has the form '//host/mount' (or similar
- using backslashes). unc+rest is always the input path.
- Paths containing drive letters never have an UNC part.
- """
- if p[1:2] == ':':
- return '', p # Drive letter present
- firstTwo = p[0:2]
- if firstTwo == '/' * 2 or firstTwo == '\\' * 2:
- # is a UNC path:
- # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter
- # \\machine\mountpoint\directories...
- # directory ^^^^^^^^^^^^^^^
- normp = normcase(p)
- index = normp.find('/', 2)
- if index == -1:
- ##raise RuntimeError, 'illegal UNC path: "' + p + '"'
- return ("", p)
- index = normp.find('/', index + 1)
- if index == -1:
- index = len(p)
- return p[:index], p[index:]
- return '', p
-
-
-# Return the tail (basename) part of a path.
-
-def basename(p):
- """Returns the final component of a pathname"""
- return split(p)[1]
-
-
-# Return the head (dirname) part of a path.
-
-def dirname(p):
- """Returns the directory component of a pathname"""
- return split(p)[0]
-
-
-# alias exists to lexists
-lexists = exists
-
-
-# Is a path a directory?
-
-# Is a path a mount point? Either a root (with or without drive letter)
-# or an UNC path with at most a / or \ after the mount point.
-
-def ismount(path):
- """Test whether a path is a mount point (defined as root of drive)"""
- unc, rest = splitunc(path)
- if unc:
- return rest in ("", "/", "\\")
- p = splitdrive(path)[1]
- return len(p) == 1 and p[0] in '/\\'
-
-
-# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
-
-def normpath(path):
- """Normalize path, eliminating double slashes, etc."""
- path = path.replace('\\', '/')
- prefix, path = splitdrive(path)
- while path[:1] == '/':
- prefix = prefix + '/'
- path = path[1:]
- comps = path.split('/')
- i = 0
- while i < len(comps):
- if comps[i] == '.':
- del comps[i]
- elif comps[i] == '..' and i > 0 and comps[i-1] not in ('', '..'):
- del comps[i-1:i+1]
- i = i - 1
- elif comps[i] == '' and i > 0 and comps[i-1] != '':
- del comps[i]
- else:
- i = i + 1
- # If the path is now empty, substitute '.'
- if not prefix and not comps:
- comps.append('.')
- return prefix + '/'.join(comps)
-
-
-# Return an absolute path.
-def abspath(path):
- """Return the absolute version of a path"""
- if not isabs(path):
- path = join(os.getcwd(), path)
- return normpath(path)
-
-# realpath is a no-op on systems without islink support
-realpath = abspath
-
-supports_unicode_filenames = False
diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py
index a5de04d..01eeefb 100644
--- a/Lib/pkgutil.py
+++ b/Lib/pkgutil.py
@@ -121,8 +121,7 @@ def walk_packages(path=None, prefix='', onerror=None):
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
- for item in walk_packages(path, name+'.', onerror):
- yield item
+ yield from walk_packages(path, name+'.', onerror)
def iter_modules(path=None, prefix=''):
@@ -456,8 +455,7 @@ def iter_importers(fullname=""):
if path is None:
return
else:
- for importer in sys.meta_path:
- yield importer
+ yield from sys.meta_path
path = sys.path
for item in path:
yield get_importer(item)
diff --git a/Lib/plat-os2emx/IN.py b/Lib/plat-os2emx/IN.py
deleted file mode 100644
index 753ae24..0000000
--- a/Lib/plat-os2emx/IN.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Generated by h2py from f:/emx/include/netinet/in.h
-
-# Included from sys/param.h
-PAGE_SIZE = 0x1000
-HZ = 100
-MAXNAMLEN = 260
-MAXPATHLEN = 260
-def htonl(X): return _swapl(X)
-
-def ntohl(X): return _swapl(X)
-
-def htons(X): return _swaps(X)
-
-def ntohs(X): return _swaps(X)
-
-IPPROTO_IP = 0
-IPPROTO_ICMP = 1
-IPPROTO_IGMP = 2
-IPPROTO_GGP = 3
-IPPROTO_TCP = 6
-IPPROTO_EGP = 8
-IPPROTO_PUP = 12
-IPPROTO_UDP = 17
-IPPROTO_IDP = 22
-IPPROTO_TP = 29
-IPPROTO_EON = 80
-IPPROTO_RAW = 255
-IPPROTO_MAX = 256
-IPPORT_RESERVED = 1024
-IPPORT_USERRESERVED = 5000
-def IN_CLASSA(i): return (((int)(i) & 0x80000000) == 0)
-
-IN_CLASSA_NET = 0xff000000
-IN_CLASSA_NSHIFT = 24
-IN_CLASSA_HOST = 0x00ffffff
-IN_CLASSA_MAX = 128
-def IN_CLASSB(i): return (((int)(i) & 0xc0000000) == 0x80000000)
-
-IN_CLASSB_NET = 0xffff0000
-IN_CLASSB_NSHIFT = 16
-IN_CLASSB_HOST = 0x0000ffff
-IN_CLASSB_MAX = 65536
-def IN_CLASSC(i): return (((int)(i) & 0xe0000000) == 0xc0000000)
-
-IN_CLASSC_NET = 0xffffff00
-IN_CLASSC_NSHIFT = 8
-IN_CLASSC_HOST = 0x000000ff
-def IN_CLASSD(i): return (((int)(i) & 0xf0000000) == 0xe0000000)
-
-IN_CLASSD_NET = 0xf0000000
-IN_CLASSD_NSHIFT = 28
-IN_CLASSD_HOST = 0x0fffffff
-def IN_MULTICAST(i): return IN_CLASSD(i)
-
-def IN_EXPERIMENTAL(i): return (((int)(i) & 0xe0000000) == 0xe0000000)
-
-def IN_BADCLASS(i): return (((int)(i) & 0xf0000000) == 0xf0000000)
-
-INADDR_ANY = 0x00000000
-INADDR_LOOPBACK = 0x7f000001
-INADDR_BROADCAST = 0xffffffff
-INADDR_NONE = 0xffffffff
-INADDR_UNSPEC_GROUP = 0xe0000000
-INADDR_ALLHOSTS_GROUP = 0xe0000001
-INADDR_MAX_LOCAL_GROUP = 0xe00000ff
-IN_LOOPBACKNET = 127
-IP_OPTIONS = 1
-IP_MULTICAST_IF = 2
-IP_MULTICAST_TTL = 3
-IP_MULTICAST_LOOP = 4
-IP_ADD_MEMBERSHIP = 5
-IP_DROP_MEMBERSHIP = 6
-IP_HDRINCL = 2
-IP_TOS = 3
-IP_TTL = 4
-IP_RECVOPTS = 5
-IP_RECVRETOPTS = 6
-IP_RECVDSTADDR = 7
-IP_RETOPTS = 8
-IP_DEFAULT_MULTICAST_TTL = 1
-IP_DEFAULT_MULTICAST_LOOP = 1
-IP_MAX_MEMBERSHIPS = 20
diff --git a/Lib/plat-os2emx/SOCKET.py b/Lib/plat-os2emx/SOCKET.py
deleted file mode 100644
index dac594a..0000000
--- a/Lib/plat-os2emx/SOCKET.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Generated by h2py from f:/emx/include/sys/socket.h
-
-# Included from sys/types.h
-FD_SETSIZE = 256
-
-# Included from sys/uio.h
-FREAD = 1
-FWRITE = 2
-SOCK_STREAM = 1
-SOCK_DGRAM = 2
-SOCK_RAW = 3
-SOCK_RDM = 4
-SOCK_SEQPACKET = 5
-SO_DEBUG = 0x0001
-SO_ACCEPTCONN = 0x0002
-SO_REUSEADDR = 0x0004
-SO_KEEPALIVE = 0x0008
-SO_DONTROUTE = 0x0010
-SO_BROADCAST = 0x0020
-SO_USELOOPBACK = 0x0040
-SO_LINGER = 0x0080
-SO_OOBINLINE = 0x0100
-SO_L_BROADCAST = 0x0200
-SO_RCV_SHUTDOWN = 0x0400
-SO_SND_SHUTDOWN = 0x0800
-SO_SNDBUF = 0x1001
-SO_RCVBUF = 0x1002
-SO_SNDLOWAT = 0x1003
-SO_RCVLOWAT = 0x1004
-SO_SNDTIMEO = 0x1005
-SO_RCVTIMEO = 0x1006
-SO_ERROR = 0x1007
-SO_TYPE = 0x1008
-SO_OPTIONS = 0x1010
-SOL_SOCKET = 0xffff
-AF_UNSPEC = 0
-AF_UNIX = 1
-AF_INET = 2
-AF_IMPLINK = 3
-AF_PUP = 4
-AF_CHAOS = 5
-AF_NS = 6
-AF_NBS = 7
-AF_ISO = 7
-AF_OSI = AF_ISO
-AF_ECMA = 8
-AF_DATAKIT = 9
-AF_CCITT = 10
-AF_SNA = 11
-AF_DECnet = 12
-AF_DLI = 13
-AF_LAT = 14
-AF_HYLINK = 15
-AF_APPLETALK = 16
-AF_NB = 17
-AF_NETBIOS = AF_NB
-AF_OS2 = AF_UNIX
-AF_MAX = 18
-PF_UNSPEC = AF_UNSPEC
-PF_UNIX = AF_UNIX
-PF_INET = AF_INET
-PF_IMPLINK = AF_IMPLINK
-PF_PUP = AF_PUP
-PF_CHAOS = AF_CHAOS
-PF_NS = AF_NS
-PF_NBS = AF_NBS
-PF_ISO = AF_ISO
-PF_OSI = AF_ISO
-PF_ECMA = AF_ECMA
-PF_DATAKIT = AF_DATAKIT
-PF_CCITT = AF_CCITT
-PF_SNA = AF_SNA
-PF_DECnet = AF_DECnet
-PF_DLI = AF_DLI
-PF_LAT = AF_LAT
-PF_HYLINK = AF_HYLINK
-PF_APPLETALK = AF_APPLETALK
-PF_NB = AF_NB
-PF_NETBIOS = AF_NB
-PF_OS2 = AF_UNIX
-PF_MAX = AF_MAX
-SOMAXCONN = 5
-MSG_OOB = 0x1
-MSG_PEEK = 0x2
-MSG_DONTROUTE = 0x4
-MSG_EOR = 0x8
-MSG_TRUNC = 0x10
-MSG_CTRUNC = 0x20
-MSG_WAITALL = 0x40
-MSG_MAXIOVLEN = 16
-SCM_RIGHTS = 0x01
-MT_FREE = 0
-MT_DATA = 1
-MT_HEADER = 2
-MT_SOCKET = 3
-MT_PCB = 4
-MT_RTABLE = 5
-MT_HTABLE = 6
-MT_ATABLE = 7
-MT_SONAME = 8
-MT_ZOMBIE = 9
-MT_SOOPTS = 10
-MT_FTABLE = 11
-MT_RIGHTS = 12
-MT_IFADDR = 13
-MAXSOCKETS = 2048
diff --git a/Lib/plat-os2emx/_emx_link.py b/Lib/plat-os2emx/_emx_link.py
deleted file mode 100644
index 01e6b54..0000000
--- a/Lib/plat-os2emx/_emx_link.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# _emx_link.py
-
-# Written by Andrew I MacIntyre, December 2002.
-
-"""_emx_link.py is a simplistic emulation of the Unix link(2) library routine
-for creating so-called hard links. It is intended to be imported into
-the os module in place of the unimplemented (on OS/2) Posix link()
-function (os.link()).
-
-We do this on OS/2 by implementing a file copy, with link(2) semantics:-
- - the target cannot already exist;
- - we hope that the actual file open (if successful) is actually
- atomic...
-
-Limitations of this approach/implementation include:-
- - no support for correct link counts (EMX stat(target).st_nlink
- is always 1);
- - thread safety undefined;
- - default file permissions (r+w) used, can't be over-ridden;
- - implemented in Python so comparatively slow, especially for large
- source files;
- - need sufficient free disk space to store the copy.
-
-Behaviour:-
- - any exception should propagate to the caller;
- - want target to be an exact copy of the source, so use binary mode;
- - returns None, same as os.link() which is implemented in posixmodule.c;
- - target removed in the event of a failure where possible;
- - given the motivation to write this emulation came from trying to
- support a Unix resource lock implementation, where minimal overhead
- during creation of the target is desirable and the files are small,
- we read a source block before attempting to create the target so that
- we're ready to immediately write some data into it.
-"""
-
-import os
-import errno
-
-__all__ = ['link']
-
-def link(source, target):
- """link(source, target) -> None
-
- Attempt to hard link the source file to the target file name.
- On OS/2, this creates a complete copy of the source file.
- """
-
- s = os.open(source, os.O_RDONLY | os.O_BINARY)
- if os.isatty(s):
- raise OSError(errno.EXDEV, 'Cross-device link')
- data = os.read(s, 1024)
-
- try:
- t = os.open(target, os.O_WRONLY | os.O_BINARY | os.O_CREAT | os.O_EXCL)
- except OSError:
- os.close(s)
- raise
-
- try:
- while data:
- os.write(t, data)
- data = os.read(s, 1024)
- except OSError:
- os.close(s)
- os.close(t)
- os.unlink(target)
- raise
-
- os.close(s)
- os.close(t)
-
-if __name__ == '__main__':
- import sys
- try:
- link(sys.argv[1], sys.argv[2])
- except IndexError:
- print('Usage: emx_link <source> <target>')
- except OSError:
- print('emx_link: %s' % str(sys.exc_info()[1]))
diff --git a/Lib/plat-os2emx/grp.py b/Lib/plat-os2emx/grp.py
deleted file mode 100644
index ee63ef8..0000000
--- a/Lib/plat-os2emx/grp.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# this module is an OS/2 oriented replacement for the grp standard
-# extension module.
-
-# written by Andrew MacIntyre, April 2001.
-# updated July 2003, adding field accessor support
-
-# note that this implementation checks whether ":" or ";" as used as
-# the field separator character.
-
-"""Replacement for grp standard extension module, intended for use on
-OS/2 and similar systems which don't normally have an /etc/group file.
-
-The standard Unix group database is an ASCII text file with 4 fields per
-record (line), separated by a colon:
- - group name (string)
- - group password (optional encrypted string)
- - group id (integer)
- - group members (comma delimited list of userids, with no spaces)
-
-Note that members are only included in the group file for groups that
-aren't their primary groups.
-(see the section 8.2 of the Python Library Reference)
-
-This implementation differs from the standard Unix implementation by
-allowing use of the platform's native path separator character - ';' on OS/2,
-DOS and MS-Windows - as the field separator in addition to the Unix
-standard ":".
-
-The module looks for the group database at the following locations
-(in order first to last):
- - ${ETC_GROUP} (or %ETC_GROUP%)
- - ${ETC}/group (or %ETC%/group)
- - ${PYTHONHOME}/Etc/group (or %PYTHONHOME%/Etc/group)
-
-Classes
--------
-
-None
-
-Functions
----------
-
-getgrgid(gid) - return the record for group-id gid as a 4-tuple
-
-getgrnam(name) - return the record for group 'name' as a 4-tuple
-
-getgrall() - return a list of 4-tuples, each tuple being one record
- (NOTE: the order is arbitrary)
-
-Attributes
-----------
-
-group_file - the path of the group database file
-
-"""
-
-import os
-
-# try and find the group file
-__group_path = []
-if 'ETC_GROUP' in os.environ:
- __group_path.append(os.environ['ETC_GROUP'])
-if 'ETC' in os.environ:
- __group_path.append('%s/group' % os.environ['ETC'])
-if 'PYTHONHOME' in os.environ:
- __group_path.append('%s/Etc/group' % os.environ['PYTHONHOME'])
-
-group_file = None
-for __i in __group_path:
- try:
- __f = open(__i, 'r')
- __f.close()
- group_file = __i
- break
- except:
- pass
-
-# decide what field separator we can try to use - Unix standard, with
-# the platform's path separator as an option. No special field conversion
-# handlers are required for the group file.
-__field_sep = [':']
-if os.pathsep:
- if os.pathsep != ':':
- __field_sep.append(os.pathsep)
-
-# helper routine to identify which separator character is in use
-def __get_field_sep(record):
- fs = None
- for c in __field_sep:
- # there should be 3 delimiter characters (for 4 fields)
- if record.count(c) == 3:
- fs = c
- break
- if fs:
- return fs
- else:
- raise KeyError('>> group database fields not delimited <<')
-
-# class to match the new record field name accessors.
-# the resulting object is intended to behave like a read-only tuple,
-# with each member also accessible by a field name.
-class Group:
- def __init__(self, name, passwd, gid, mem):
- self.__dict__['gr_name'] = name
- self.__dict__['gr_passwd'] = passwd
- self.__dict__['gr_gid'] = gid
- self.__dict__['gr_mem'] = mem
- self.__dict__['_record'] = (self.gr_name, self.gr_passwd,
- self.gr_gid, self.gr_mem)
-
- def __len__(self):
- return 4
-
- def __getitem__(self, key):
- return self._record[key]
-
- def __setattr__(self, name, value):
- raise AttributeError('attribute read-only: %s' % name)
-
- def __repr__(self):
- return str(self._record)
-
- def __cmp__(self, other):
- this = str(self._record)
- if this == other:
- return 0
- elif this < other:
- return -1
- else:
- return 1
-
-
-# read the whole file, parsing each entry into tuple form
-# with dictionaries to speed recall by GID or group name
-def __read_group_file():
- if group_file:
- group = open(group_file, 'r')
- else:
- raise KeyError('>> no group database <<')
- gidx = {}
- namx = {}
- sep = None
- while 1:
- entry = group.readline().strip()
- if len(entry) > 3:
- if sep is None:
- sep = __get_field_sep(entry)
- fields = entry.split(sep)
- fields[2] = int(fields[2])
- fields[3] = [f.strip() for f in fields[3].split(',')]
- record = Group(*fields)
- if fields[2] not in gidx:
- gidx[fields[2]] = record
- if fields[0] not in namx:
- namx[fields[0]] = record
- elif len(entry) > 0:
- pass # skip empty or malformed records
- else:
- break
- group.close()
- if len(gidx) == 0:
- raise KeyError
- return (gidx, namx)
-
-# return the group database entry by GID
-def getgrgid(gid):
- g, n = __read_group_file()
- return g[gid]
-
-# return the group database entry by group name
-def getgrnam(name):
- g, n = __read_group_file()
- return n[name]
-
-# return all the group database entries
-def getgrall():
- g, n = __read_group_file()
- return g.values()
-
-# test harness
-if __name__ == '__main__':
- getgrall()
diff --git a/Lib/plat-os2emx/pwd.py b/Lib/plat-os2emx/pwd.py
deleted file mode 100644
index 2cb077f..0000000
--- a/Lib/plat-os2emx/pwd.py
+++ /dev/null
@@ -1,208 +0,0 @@
-# this module is an OS/2 oriented replacement for the pwd standard
-# extension module.
-
-# written by Andrew MacIntyre, April 2001.
-# updated July 2003, adding field accessor support
-
-# note that this implementation checks whether ":" or ";" as used as
-# the field separator character. Path conversions are are applied when
-# the database uses ":" as the field separator character.
-
-"""Replacement for pwd standard extension module, intended for use on
-OS/2 and similar systems which don't normally have an /etc/passwd file.
-
-The standard Unix password database is an ASCII text file with 7 fields
-per record (line), separated by a colon:
- - user name (string)
- - password (encrypted string, or "*" or "")
- - user id (integer)
- - group id (integer)
- - description (usually user's name)
- - home directory (path to user's home directory)
- - shell (path to the user's login shell)
-
-(see the section 8.1 of the Python Library Reference)
-
-This implementation differs from the standard Unix implementation by
-allowing use of the platform's native path separator character - ';' on OS/2,
-DOS and MS-Windows - as the field separator in addition to the Unix
-standard ":". Additionally, when ":" is the separator path conversions
-are applied to deal with any munging of the drive letter reference.
-
-The module looks for the password database at the following locations
-(in order first to last):
- - ${ETC_PASSWD} (or %ETC_PASSWD%)
- - ${ETC}/passwd (or %ETC%/passwd)
- - ${PYTHONHOME}/Etc/passwd (or %PYTHONHOME%/Etc/passwd)
-
-Classes
--------
-
-None
-
-Functions
----------
-
-getpwuid(uid) - return the record for user-id uid as a 7-tuple
-
-getpwnam(name) - return the record for user 'name' as a 7-tuple
-
-getpwall() - return a list of 7-tuples, each tuple being one record
- (NOTE: the order is arbitrary)
-
-Attributes
-----------
-
-passwd_file - the path of the password database file
-
-"""
-
-import os
-
-# try and find the passwd file
-__passwd_path = []
-if 'ETC_PASSWD' in os.environ:
- __passwd_path.append(os.environ['ETC_PASSWD'])
-if 'ETC' in os.environ:
- __passwd_path.append('%s/passwd' % os.environ['ETC'])
-if 'PYTHONHOME' in os.environ:
- __passwd_path.append('%s/Etc/passwd' % os.environ['PYTHONHOME'])
-
-passwd_file = None
-for __i in __passwd_path:
- try:
- __f = open(__i, 'r')
- __f.close()
- passwd_file = __i
- break
- except:
- pass
-
-# path conversion handlers
-def __nullpathconv(path):
- return path.replace(os.altsep, os.sep)
-
-def __unixpathconv(path):
- # two known drive letter variations: "x;" and "$x"
- if path[0] == '$':
- conv = path[1] + ':' + path[2:]
- elif path[1] == ';':
- conv = path[0] + ':' + path[2:]
- else:
- conv = path
- return conv.replace(os.altsep, os.sep)
-
-# decide what field separator we can try to use - Unix standard, with
-# the platform's path separator as an option. No special field conversion
-# handler is required when using the platform's path separator as field
-# separator, but are required for the home directory and shell fields when
-# using the standard Unix (":") field separator.
-__field_sep = {':': __unixpathconv}
-if os.pathsep:
- if os.pathsep != ':':
- __field_sep[os.pathsep] = __nullpathconv
-
-# helper routine to identify which separator character is in use
-def __get_field_sep(record):
- fs = None
- for c in __field_sep.keys():
- # there should be 6 delimiter characters (for 7 fields)
- if record.count(c) == 6:
- fs = c
- break
- if fs:
- return fs
- else:
- raise KeyError('>> passwd database fields not delimited <<')
-
-# class to match the new record field name accessors.
-# the resulting object is intended to behave like a read-only tuple,
-# with each member also accessible by a field name.
-class Passwd:
- def __init__(self, name, passwd, uid, gid, gecos, dir, shell):
- self.__dict__['pw_name'] = name
- self.__dict__['pw_passwd'] = passwd
- self.__dict__['pw_uid'] = uid
- self.__dict__['pw_gid'] = gid
- self.__dict__['pw_gecos'] = gecos
- self.__dict__['pw_dir'] = dir
- self.__dict__['pw_shell'] = shell
- self.__dict__['_record'] = (self.pw_name, self.pw_passwd,
- self.pw_uid, self.pw_gid,
- self.pw_gecos, self.pw_dir,
- self.pw_shell)
-
- def __len__(self):
- return 7
-
- def __getitem__(self, key):
- return self._record[key]
-
- def __setattr__(self, name, value):
- raise AttributeError('attribute read-only: %s' % name)
-
- def __repr__(self):
- return str(self._record)
-
- def __cmp__(self, other):
- this = str(self._record)
- if this == other:
- return 0
- elif this < other:
- return -1
- else:
- return 1
-
-
-# read the whole file, parsing each entry into tuple form
-# with dictionaries to speed recall by UID or passwd name
-def __read_passwd_file():
- if passwd_file:
- passwd = open(passwd_file, 'r')
- else:
- raise KeyError('>> no password database <<')
- uidx = {}
- namx = {}
- sep = None
- while True:
- entry = passwd.readline().strip()
- if len(entry) > 6:
- if sep is None:
- sep = __get_field_sep(entry)
- fields = entry.split(sep)
- for i in (2, 3):
- fields[i] = int(fields[i])
- for i in (5, 6):
- fields[i] = __field_sep[sep](fields[i])
- record = Passwd(*fields)
- if fields[2] not in uidx:
- uidx[fields[2]] = record
- if fields[0] not in namx:
- namx[fields[0]] = record
- elif len(entry) > 0:
- pass # skip empty or malformed records
- else:
- break
- passwd.close()
- if len(uidx) == 0:
- raise KeyError
- return (uidx, namx)
-
-# return the passwd database entry by UID
-def getpwuid(uid):
- u, n = __read_passwd_file()
- return u[uid]
-
-# return the passwd database entry by passwd name
-def getpwnam(name):
- u, n = __read_passwd_file()
- return n[name]
-
-# return all the passwd database entries
-def getpwall():
- u, n = __read_passwd_file()
- return n.values()
-
-# test harness
-if __name__ == '__main__':
- getpwall()
diff --git a/Lib/plat-os2emx/regen b/Lib/plat-os2emx/regen
deleted file mode 100755
index 3ecd2a8..0000000
--- a/Lib/plat-os2emx/regen
+++ /dev/null
@@ -1,7 +0,0 @@
-#! /bin/sh
-export INCLUDE=$C_INCLUDE_PATH
-set -v
-python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/fcntl.h
-python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/sys/socket.h
-python.exe ../../Tools/scripts/h2py.py -i '(u_long)' $C_INCLUDE_PATH/netinet/in.h
-#python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/termios.h
diff --git a/Lib/platform.py b/Lib/platform.py
index 2b8a24a..143138d 100755
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -122,7 +122,7 @@ try:
except AttributeError:
# os.devnull was added in Python 2.4, so emulate it for earlier
# Python versions
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# Use the old CP/M NUL as device name
DEV_NULL = 'NUL'
else:
@@ -316,7 +316,7 @@ def linux_distribution(distname='', version='', id='',
"""
try:
etc = os.listdir('/etc')
- except os.error:
+ except OSError:
# Probably not a Unix system
return distname,version,id
etc.sort()
@@ -403,13 +403,13 @@ _ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) '
def _syscmd_ver(system='', release='', version='',
- supported_platforms=('win32','win16','dos','os2')):
+ supported_platforms=('win32','win16','dos')):
""" Tries to figure out the OS version used and returns
a tuple (system,release,version).
It uses the "ver" shell command for this which is known
- to exists on Windows, DOS and OS/2. XXX Others too ?
+ to exists on Windows, DOS. XXX Others too ?
In case this fails, the given parameters are used as
defaults.
@@ -424,10 +424,10 @@ def _syscmd_ver(system='', release='', version='',
pipe = popen(cmd)
info = pipe.read()
if pipe.close():
- raise os.error('command failed')
+ raise OSError('command failed')
# XXX How can I suppress shell errors from being written
# to stderr ?
- except os.error as why:
+ except OSError as why:
#print 'Command %s failed: %s' % (cmd,why)
continue
except IOError as why:
@@ -581,7 +581,7 @@ def win32_ver(release='',version='',csd='',ptype=''):
# Discard any type that isn't REG_SZ
if type == REG_SZ and name.find("Server") != -1:
product_type = VER_NT_SERVER
- except WindowsError:
+ except OSError:
# Use default of VER_NT_WORKSTATION
pass
@@ -882,7 +882,7 @@ def _node(default=''):
return default
try:
return socket.gethostname()
- except socket.error:
+ except OSError:
# Still not working...
return default
@@ -901,12 +901,12 @@ def _syscmd_uname(option,default=''):
""" Interface to the system's uname command.
"""
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# XXX Others too ?
return default
try:
f = os.popen('uname %s 2> %s' % (option, DEV_NULL))
- except (AttributeError,os.error):
+ except (AttributeError, OSError):
return default
output = f.read().strip()
rc = f.close()
@@ -924,7 +924,7 @@ def _syscmd_file(target,default=''):
default in case the command should fail.
"""
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# XXX Others too ?
return default
target = _follow_symlinks(target)
@@ -932,7 +932,7 @@ def _syscmd_file(target,default=''):
proc = subprocess.Popen(['file', target],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- except (AttributeError,os.error):
+ except (AttributeError, OSError):
return default
output = proc.communicate()[0].decode('latin-1')
rc = proc.wait()
diff --git a/Lib/poplib.py b/Lib/poplib.py
index d42d9dd..0f12ae2 100644
--- a/Lib/poplib.py
+++ b/Lib/poplib.py
@@ -13,7 +13,15 @@ Based on the J. Myers POP3 draft, Jan. 96
# Imports
-import re, socket
+import errno
+import re
+import socket
+
+try:
+ import ssl
+ HAVE_SSL = True
+except ImportError:
+ HAVE_SSL = False
__all__ = ["POP3","error_proto"]
@@ -55,6 +63,8 @@ class POP3:
APOP name digest apop(name, digest)
TOP msg n top(msg, n)
UIDL [msg] uidl(msg = None)
+ CAPA capa()
+ STLS stls()
Raises one exception: 'error_proto'.
@@ -81,6 +91,7 @@ class POP3:
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
self.port = port
+ self._tls_established = False
self.sock = self._create_socket(timeout)
self.file = self.sock.makefile('rb')
self._debugging = 0
@@ -259,7 +270,14 @@ class POP3:
if self.file is not None:
self.file.close()
if self.sock is not None:
- self.sock.close()
+ try:
+ self.sock.shutdown(socket.SHUT_RDWR)
+ except OSError as e:
+ # The server might already have closed the connection
+ if e.errno != errno.ENOTCONN:
+ raise
+ finally:
+ self.sock.close()
self.file = self.sock = None
#__del__ = quit
@@ -315,21 +333,71 @@ class POP3:
return self._shortcmd('UIDL %s' % which)
return self._longcmd('UIDL')
-try:
- import ssl
-except ImportError:
- pass
-else:
+
+ def capa(self):
+ """Return server capabilities (RFC 2449) as a dictionary
+ >>> c=poplib.POP3('localhost')
+ >>> c.capa()
+ {'IMPLEMENTATION': ['Cyrus', 'POP3', 'server', 'v2.2.12'],
+ 'TOP': [], 'LOGIN-DELAY': ['0'], 'AUTH-RESP-CODE': [],
+ 'EXPIRE': ['NEVER'], 'USER': [], 'STLS': [], 'PIPELINING': [],
+ 'UIDL': [], 'RESP-CODES': []}
+ >>>
+
+ Really, according to RFC 2449, the cyrus folks should avoid
+ having the implementation splitted into multiple arguments...
+ """
+ def _parsecap(line):
+ lst = line.decode('ascii').split()
+ return lst[0], lst[1:]
+
+ caps = {}
+ try:
+ resp = self._longcmd('CAPA')
+ rawcaps = resp[1]
+ for capline in rawcaps:
+ capnm, capargs = _parsecap(capline)
+ caps[capnm] = capargs
+ except error_proto as _err:
+ raise error_proto('-ERR CAPA not supported by server')
+ return caps
+
+
+ def stls(self, context=None):
+ """Start a TLS session on the active connection as specified in RFC 2595.
+
+ context - a ssl.SSLContext
+ """
+ if not HAVE_SSL:
+ raise error_proto('-ERR TLS support missing')
+ if self._tls_established:
+ raise error_proto('-ERR TLS session already established')
+ caps = self.capa()
+ if not 'STLS' in caps:
+ raise error_proto('-ERR STLS not supported by server')
+ if context is None:
+ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ context.options |= ssl.OP_NO_SSLv2
+ resp = self._shortcmd('STLS')
+ self.sock = context.wrap_socket(self.sock)
+ self.file = self.sock.makefile('rb')
+ self._tls_established = True
+ return resp
+
+
+if HAVE_SSL:
class POP3_SSL(POP3):
"""POP3 client class over SSL connection
- Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None)
+ Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None,
+ context=None)
hostname - the hostname of the pop3 over ssl server
port - port number
keyfile - PEM formatted file that countains your private key
certfile - PEM formatted certificate chain file
+ context - a ssl.SSLContext
See the methods of the parent class POP3 for more documentation.
"""
@@ -355,6 +423,13 @@ else:
sock = ssl.wrap_socket(sock, self.keyfile, self.certfile)
return sock
+ def stls(self, keyfile=None, certfile=None, context=None):
+ """The method unconditionally raises an exception since the
+ STLS command doesn't make any sense on an already established
+ SSL/TLS session.
+ """
+ raise error_proto('-ERR TLS session already established')
+
__all__.append("POP3_SSL")
if __name__ == "__main__":
diff --git a/Lib/posixpath.py b/Lib/posixpath.py
index cb93796..eb5f45f 100644
--- a/Lib/posixpath.py
+++ b/Lib/posixpath.py
@@ -162,7 +162,7 @@ def islink(path):
"""Test whether a path is a symbolic link"""
try:
st = os.lstat(path)
- except (os.error, AttributeError):
+ except (OSError, AttributeError):
return False
return stat.S_ISLNK(st.st_mode)
@@ -172,7 +172,7 @@ def lexists(path):
"""Test whether a path exists. Returns True for broken symbolic links"""
try:
os.lstat(path)
- except os.error:
+ except OSError:
return False
return True
@@ -220,7 +220,7 @@ def ismount(path):
else:
parent = join(path, '..')
s2 = os.lstat(parent)
- except os.error:
+ except OSError:
return False # It doesn't exist -- so not a mount point :-)
dev1 = s1.st_dev
dev2 = s2.st_dev
diff --git a/Lib/pty.py b/Lib/pty.py
index 3ccf619..786ce04 100644
--- a/Lib/pty.py
+++ b/Lib/pty.py
@@ -57,17 +57,17 @@ def _open_terminal():
try:
tty_name, master_fd = sgi._getpty(os.O_RDWR, 0o666, 0)
except IOError as msg:
- raise os.error(msg)
+ raise OSError(msg)
return master_fd, tty_name
for x in 'pqrstuvwxyzPQRST':
for y in '0123456789abcdef':
pty_name = '/dev/pty' + x + y
try:
fd = os.open(pty_name, os.O_RDWR)
- except os.error:
+ except OSError:
continue
return (fd, '/dev/tty' + x + y)
- raise os.error('out of pty devices')
+ raise OSError('out of pty devices')
def slave_open(tty_name):
"""slave_open(tty_name) -> slave_fd
@@ -178,3 +178,4 @@ def spawn(argv, master_read=_read, stdin_read=_read):
tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
os.close(master_fd)
+ return os.waitpid(pid, 0)[1]
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index fa531e9..490eb21 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -1393,7 +1393,7 @@ def getpager():
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
- if sys.platform == 'win32' or sys.platform.startswith('os2'):
+ if sys.platform == 'win32':
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
diff --git a/Lib/random.py b/Lib/random.py
index 6388f29..2ad3809 100644
--- a/Lib/random.py
+++ b/Lib/random.py
@@ -252,10 +252,11 @@ class Random(_random.Random):
return seq[i]
def shuffle(self, x, random=None, int=int):
- """x, random=random.random -> shuffle list x in place; return None.
+ """Shuffle list x in place, and return None.
- Optional arg random is a 0-argument function returning a random
- float in [0.0, 1.0); by default, the standard random.random.
+ Optional argument random is a 0-argument function returning a
+ random float in [0.0, 1.0); if it is the default None, the
+ standard random.random will be used.
"""
randbelow = self._randbelow
diff --git a/Lib/shelve.py b/Lib/shelve.py
index cc1815e..cef580e 100644
--- a/Lib/shelve.py
+++ b/Lib/shelve.py
@@ -61,7 +61,7 @@ from io import BytesIO
import collections
-__all__ = ["Shelf","BsdDbShelf","DbfilenameShelf","open"]
+__all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"]
class _ClosedDict(collections.MutableMapping):
'Marker for a closed dict. Access attempts raise a ValueError.'
@@ -131,6 +131,12 @@ class Shelf(collections.MutableMapping):
except KeyError:
pass
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.close()
+
def close(self):
self.sync()
try:
@@ -147,6 +153,7 @@ class Shelf(collections.MutableMapping):
def __del__(self):
if not hasattr(self, 'writeback'):
# __init__ didn't succeed, so don't bother closing
+ # see http://bugs.python.org/issue1339007 for details
return
self.close()
diff --git a/Lib/shutil.py b/Lib/shutil.py
index 9c66008..b1e3017 100644
--- a/Lib/shutil.py
+++ b/Lib/shutil.py
@@ -39,17 +39,20 @@ __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
"ignore_patterns", "chown", "which"]
# disk_usage is added later, if available on the platform
-class Error(EnvironmentError):
+class Error(OSError):
pass
-class SpecialFileError(EnvironmentError):
+class SameFileError(Error):
+ """Raised when source and destination are the same file."""
+
+class SpecialFileError(OSError):
"""Raised when trying to do a kind of operation (e.g. copying) which is
not supported on a special file (e.g. a named pipe)"""
-class ExecError(EnvironmentError):
+class ExecError(OSError):
"""Raised when a command could not be executed"""
-class ReadError(EnvironmentError):
+class ReadError(OSError):
"""Raised when an archive cannot be read"""
class RegistryError(Exception):
@@ -57,11 +60,6 @@ class RegistryError(Exception):
and unpacking registeries fails"""
-try:
- WindowsError
-except NameError:
- WindowsError = None
-
def copyfileobj(fsrc, fdst, length=16*1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while 1:
@@ -90,7 +88,7 @@ def copyfile(src, dst, *, follow_symlinks=True):
"""
if _samefile(src, dst):
- raise Error("`%s` and `%s` are the same file" % (src, dst))
+ raise SameFileError("{!r} and {!r} are the same file".format(src, dst))
for fn in [src, dst]:
try:
@@ -215,6 +213,9 @@ def copy(src, dst, *, follow_symlinks=True):
If follow_symlinks is false, symlinks won't be followed. This
resembles GNU's "cp -P src dst".
+ If source and destination are the same file, a SameFileError will be
+ raised.
+
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
@@ -323,15 +324,13 @@ def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
# continue with other files
except Error as err:
errors.extend(err.args[0])
- except EnvironmentError as why:
+ except OSError as why:
errors.append((srcname, dstname, str(why)))
try:
copystat(src, dst)
except OSError as why:
- if WindowsError is not None and isinstance(why, WindowsError):
- # Copying file access times may fail on Windows
- pass
- else:
+ # Copying file access times may fail on Windows
+ if why.winerror is None:
errors.append((src, dst, str(why)))
if errors:
raise Error(errors)
@@ -350,24 +349,24 @@ def _rmtree_unsafe(path, onerror):
names = []
try:
names = os.listdir(path)
- except os.error:
+ except OSError:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
- except os.error:
+ except OSError:
mode = 0
if stat.S_ISDIR(mode):
_rmtree_unsafe(fullname, onerror)
else:
try:
os.unlink(fullname)
- except os.error:
+ except OSError:
onerror(os.unlink, fullname, sys.exc_info())
try:
os.rmdir(path)
- except os.error:
+ except OSError:
onerror(os.rmdir, path, sys.exc_info())
# Version using fd-based APIs to protect against races
@@ -458,7 +457,7 @@ def rmtree(path, ignore_errors=False, onerror=None):
_rmtree_safe_fd(fd, path, onerror)
try:
os.rmdir(path)
- except os.error:
+ except OSError:
onerror(os.rmdir, path, sys.exc_info())
else:
try:
diff --git a/Lib/site.py b/Lib/site.py
index b751006..982dbc8 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -196,7 +196,7 @@ def addsitedir(sitedir, known_paths=None):
known_paths.add(sitedircase)
try:
names = os.listdir(sitedir)
- except os.error:
+ except OSError:
return
names = [name for name in names if name.endswith(".pth")]
for name in sorted(names):
@@ -300,9 +300,7 @@ def getsitepackages(prefixes=None):
continue
seen.add(prefix)
- if sys.platform in ('os2emx', 'riscos'):
- sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
- elif os.sep == '/':
+ if os.sep == '/':
sitepackages.append(os.path.join(prefix, "lib",
"python" + sys.version[:3],
"site-packages"))
@@ -329,23 +327,6 @@ def addsitepackages(known_paths, prefixes=None):
return known_paths
-def setBEGINLIBPATH():
- """The OS/2 EMX port has optional extension modules that do double duty
- as DLLs (and must use the .DLL file extension) for other extensions.
- The library search path needs to be amended so these will be found
- during module import. Use BEGINLIBPATH so that these are at the start
- of the library search path.
-
- """
- dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
- libpath = os.environ['BEGINLIBPATH'].split(';')
- if libpath[-1]:
- libpath.append(dllpath)
- else:
- libpath[-1] = dllpath
- os.environ['BEGINLIBPATH'] = ';'.join(libpath)
-
-
def setquit():
"""Define new builtins 'quit' and 'exit'.
@@ -593,8 +574,6 @@ def main():
ENABLE_USER_SITE = check_enableusersite()
known_paths = addusersitepackages(known_paths)
known_paths = addsitepackages(known_paths)
- if sys.platform == 'os2emx':
- setBEGINLIBPATH()
setquit()
setcopyright()
sethelper()
diff --git a/Lib/smtpd.py b/Lib/smtpd.py
index 778d6d6..1e239a1 100755
--- a/Lib/smtpd.py
+++ b/Lib/smtpd.py
@@ -137,7 +137,7 @@ class SMTPChannel(asynchat.async_chat):
self.num_bytes = 0
try:
self.peer = conn.getpeername()
- except socket.error as err:
+ except OSError as err:
# a race condition may occur if the other end is closing
# before we can get the peername
self.close()
@@ -668,7 +668,7 @@ class PureProxy(SMTPServer):
except smtplib.SMTPRecipientsRefused as e:
print('got SMTPRecipientsRefused', file=DEBUGSTREAM)
refused = e.recipients
- except (socket.error, smtplib.SMTPException) as e:
+ except (OSError, smtplib.SMTPException) as e:
print('got', e.__class__, file=DEBUGSTREAM)
# All recipients were refused. If the exception had an associated
# error code, use it. Otherwise,fake it with a non-triggering
diff --git a/Lib/smtplib.py b/Lib/smtplib.py
index c949d77..44a144c 100644
--- a/Lib/smtplib.py
+++ b/Lib/smtplib.py
@@ -309,7 +309,7 @@ class SMTP:
try:
port = int(port)
except ValueError:
- raise socket.error("nonnumeric port")
+ raise OSError("nonnumeric port")
if not port:
port = self.default_port
if self.debuglevel > 0:
@@ -330,7 +330,7 @@ class SMTP:
s = s.encode("ascii")
try:
self.sock.sendall(s)
- except socket.error:
+ except OSError:
self.close()
raise SMTPServerDisconnected('Server not connected')
else:
@@ -363,7 +363,7 @@ class SMTP:
while 1:
try:
line = self.file.readline()
- except socket.error as e:
+ except OSError as e:
self.close()
raise SMTPServerDisconnected("Connection unexpectedly closed: "
+ str(e))
@@ -920,7 +920,7 @@ class LMTP(SMTP):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.file = None
self.sock.connect(host)
- except socket.error:
+ except OSError:
if self.debuglevel > 0:
print('connect fail:', host, file=stderr)
if self.sock:
diff --git a/Lib/socketserver.py b/Lib/socketserver.py
index a21318d..abc4f02 100644
--- a/Lib/socketserver.py
+++ b/Lib/socketserver.py
@@ -299,7 +299,7 @@ class BaseServer:
"""
try:
request, client_address = self.get_request()
- except socket.error:
+ except OSError:
return
if self.verify_request(request, client_address):
try:
@@ -479,7 +479,7 @@ class TCPServer(BaseServer):
#explicitly shutdown. socket.close() merely releases
#the socket and waits for GC to perform the actual close.
request.shutdown(socket.SHUT_WR)
- except socket.error:
+ except OSError:
pass #some platforms may raise ENOTCONN here
self.close_request(request)
@@ -532,7 +532,7 @@ class ForkingMixIn:
# children.
try:
pid, status = os.waitpid(0, 0)
- except os.error:
+ except OSError:
pid = None
if pid not in self.active_children: continue
self.active_children.remove(pid)
@@ -545,7 +545,7 @@ class ForkingMixIn:
for child in self.active_children:
try:
pid, status = os.waitpid(child, os.WNOHANG)
- except os.error:
+ except OSError:
pid = None
if not pid: continue
try:
diff --git a/Lib/ssl.py b/Lib/ssl.py
index 5e5a5ce..acbc700 100644
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -109,12 +109,14 @@ else:
_PROTOCOL_NAMES[PROTOCOL_SSLv2] = "SSLv2"
from socket import getnameinfo as _getnameinfo
-from socket import error as socket_error
from socket import socket, AF_INET, SOCK_STREAM, create_connection
import base64 # for DER-to-PEM translation
import traceback
import errno
+
+socket_error = OSError # keep that public name in module namespace
+
if _ssl.HAS_TLS_UNIQUE:
CHANNEL_BINDING_TYPES = ['tls-unique']
else:
@@ -279,7 +281,7 @@ class SSLSocket(socket):
# see if it's connected
try:
sock.getpeername()
- except socket_error as e:
+ except OSError as e:
if e.errno != errno.ENOTCONN:
raise
else:
@@ -305,7 +307,7 @@ class SSLSocket(socket):
raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
self.do_handshake()
- except socket_error as x:
+ except OSError as x:
self.close()
raise x
@@ -533,7 +535,7 @@ class SSLSocket(socket):
self.do_handshake()
self._connected = True
return rc
- except socket_error:
+ except OSError:
self._sslobj = None
raise
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index aa3e217..fd51048 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -396,8 +396,6 @@ if mswindows:
hStdOutput = None
hStdError = None
wShowWindow = 0
- class pywintypes:
- error = IOError
else:
import select
_has_poll = hasattr(select, 'poll')
@@ -821,7 +819,7 @@ class Popen(object):
for f in filter(None, (self.stdin, self.stdout, self.stderr)):
try:
f.close()
- except EnvironmentError:
+ except OSError:
pass # Ignore EBADF or other errors.
# Make sure the child pipes are closed as well.
@@ -835,7 +833,7 @@ class Popen(object):
for fd in to_close:
try:
os.close(fd)
- except EnvironmentError:
+ except OSError:
pass
raise
@@ -1031,23 +1029,6 @@ class Popen(object):
return Handle(h)
- def _find_w9xpopen(self):
- """Find and return absolut path to w9xpopen.exe"""
- w9xpopen = os.path.join(
- os.path.dirname(_winapi.GetModuleFileName(0)),
- "w9xpopen.exe")
- if not os.path.exists(w9xpopen):
- # Eeek - file-not-found - possibly an embedding
- # situation - see if we can locate it in sys.exec_prefix
- w9xpopen = os.path.join(os.path.dirname(sys.base_exec_prefix),
- "w9xpopen.exe")
- if not os.path.exists(w9xpopen):
- raise RuntimeError("Cannot locate w9xpopen.exe, which is "
- "needed for Popen to work with your "
- "shell or platform.")
- return w9xpopen
-
-
def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
@@ -1076,21 +1057,6 @@ class Popen(object):
startupinfo.wShowWindow = _winapi.SW_HIDE
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = '{} /c "{}"'.format (comspec, args)
- if (_winapi.GetVersion() >= 0x80000000 or
- os.path.basename(comspec).lower() == "command.com"):
- # Win9x, or using command.com on NT. We need to
- # use the w9xpopen intermediate program. For more
- # information, see KB Q150956
- # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
- w9xpopen = self._find_w9xpopen()
- args = '"%s" %s' % (w9xpopen, args)
- # Not passing CREATE_NEW_CONSOLE has been known to
- # cause random failures on win9x. Specifically a
- # dialog: "Your program accessed mem currently in
- # use at xxx" and a hopeful warning about the
- # stability of your system. Cost is Ctrl+C won't
- # kill children.
- creationflags |= _winapi.CREATE_NEW_CONSOLE
# Start the process
try:
@@ -1102,12 +1068,6 @@ class Popen(object):
env,
cwd,
startupinfo)
- except pywintypes.error as e:
- # Translate pywintypes.error to WindowsError, which is
- # a subclass of OSError. FIXME: We should really
- # translate errno using _sys_errlist (or similar), but
- # how can this be done from Python?
- raise WindowsError(*e.args)
finally:
# Child is launched. Close the parent's copy of those pipe
# handles that only the child should have open. You need
@@ -1412,13 +1372,13 @@ class Popen(object):
exception_name, hex_errno, err_msg = (
errpipe_data.split(b':', 2))
except ValueError:
- exception_name = b'RuntimeError'
+ exception_name = b'SubprocessError'
hex_errno = b'0'
err_msg = (b'Bad exception data from child: ' +
repr(errpipe_data))
child_exception_type = getattr(
builtins, exception_name.decode('ascii'),
- RuntimeError)
+ SubprocessError)
err_msg = err_msg.decode(errors="surrogatepass")
if issubclass(child_exception_type, OSError) and hex_errno:
errno_num = int(hex_errno, 16)
@@ -1448,11 +1408,11 @@ class Popen(object):
self.returncode = _WEXITSTATUS(sts)
else:
# Should never happen
- raise RuntimeError("Unknown child exit status!")
+ raise SubprocessError("Unknown child exit status!")
def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid,
- _WNOHANG=os.WNOHANG, _os_error=os.error, _ECHILD=errno.ECHILD):
+ _WNOHANG=os.WNOHANG, _ECHILD=errno.ECHILD):
"""Check if child process has terminated. Returns returncode
attribute.
@@ -1465,7 +1425,7 @@ class Popen(object):
pid, sts = _waitpid(self.pid, _WNOHANG)
if pid == self.pid:
self._handle_exitstatus(sts)
- except _os_error as e:
+ except OSError as e:
if _deadstate is not None:
self.returncode = _deadstate
elif e.errno == _ECHILD:
@@ -1622,7 +1582,7 @@ class Popen(object):
raise TimeoutExpired(self.args, orig_timeout)
try:
ready = poller.poll(timeout)
- except select.error as e:
+ except OSError as e:
if e.args[0] == errno.EINTR:
continue
raise
@@ -1690,7 +1650,7 @@ class Popen(object):
(rlist, wlist, xlist) = \
select.select(self._read_set, self._write_set, [],
timeout)
- except select.error as e:
+ except OSError as e:
if e.args[0] == errno.EINTR:
continue
raise
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index 71da1db..295a0a6 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -52,25 +52,6 @@ _INSTALL_SCHEMES = {
'scripts': '{base}/Scripts',
'data': '{base}',
},
- 'os2': {
- 'stdlib': '{installed_base}/Lib',
- 'platstdlib': '{base}/Lib',
- 'purelib': '{base}/Lib/site-packages',
- 'platlib': '{base}/Lib/site-packages',
- 'include': '{installed_base}/Include',
- 'platinclude': '{installed_base}/Include',
- 'scripts': '{base}/Scripts',
- 'data': '{base}',
- },
- 'os2_home': {
- 'stdlib': '{userbase}/lib/python{py_version_short}',
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
- 'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'include': '{userbase}/include/python{py_version_short}',
- 'scripts': '{userbase}/bin',
- 'data': '{userbase}',
- },
'nt_user': {
'stdlib': '{userbase}/Python{py_version_nodot}',
'platstdlib': '{userbase}/Python{py_version_nodot}',
@@ -210,7 +191,6 @@ def _getuserbase():
def joinuser(*args):
return os.path.expanduser(os.path.join(*args))
- # what about 'os2emx', 'riscos' ?
if os.name == "nt":
base = os.environ.get("APPDATA") or "~"
if env_base:
@@ -551,7 +531,7 @@ def get_config_vars(*args):
# sys.abiflags may not be defined on all platforms.
_CONFIG_VARS['abiflags'] = ''
- if os.name in ('nt', 'os2'):
+ if os.name == 'nt':
_init_non_posix(_CONFIG_VARS)
if os.name == 'posix':
_init_posix(_CONFIG_VARS)
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index 11b4b68..a0f132e 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -56,9 +56,9 @@ except ImportError:
# os.symlink on Windows prior to 6.0 raises NotImplementedError
symlink_exception = (AttributeError, NotImplementedError)
try:
- # WindowsError (1314) will be raised if the caller does not hold the
+ # OSError (winerror=1314) will be raised if the caller does not hold the
# SeCreateSymbolicLinkPrivilege privilege
- symlink_exception += (WindowsError,)
+ symlink_exception += (OSError,)
except NameError:
pass
@@ -2022,7 +2022,7 @@ class TarFile(object):
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
set_attrs=set_attrs)
- except EnvironmentError as e:
+ except OSError as e:
if self.errorlevel > 0:
raise
else:
@@ -2211,9 +2211,8 @@ class TarFile(object):
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
else:
- if sys.platform != "os2emx":
- os.chown(targetpath, u, g)
- except EnvironmentError as e:
+ os.chown(targetpath, u, g)
+ except OSError as e:
raise ExtractError("could not change owner")
def chmod(self, tarinfo, targetpath):
@@ -2222,7 +2221,7 @@ class TarFile(object):
if hasattr(os, 'chmod'):
try:
os.chmod(targetpath, tarinfo.mode)
- except EnvironmentError as e:
+ except OSError as e:
raise ExtractError("could not change mode")
def utime(self, tarinfo, targetpath):
@@ -2232,7 +2231,7 @@ class TarFile(object):
return
try:
os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
- except EnvironmentError as e:
+ except OSError as e:
raise ExtractError("could not change modification time")
#--------------------------------------------------------------------------
diff --git a/Lib/telnetlib.py b/Lib/telnetlib.py
index a59693e..b4b7cd4 100644
--- a/Lib/telnetlib.py
+++ b/Lib/telnetlib.py
@@ -273,7 +273,7 @@ class Telnet:
"""Write a string to the socket, doubling any IAC characters.
Can block if the connection is blocked. May raise
- socket.error if the connection is closed.
+ OSError if the connection is closed.
"""
if IAC in buffer:
@@ -313,7 +313,7 @@ class Telnet:
while i < 0 and not self.eof:
try:
ready = poller.poll(call_timeout)
- except select.error as e:
+ except OSError as e:
if e.errno == errno.EINTR:
if timeout is not None:
elapsed = time() - time_start
@@ -683,7 +683,7 @@ class Telnet:
while not m and not self.eof:
try:
ready = poller.poll(call_timeout)
- except select.error as e:
+ except OSError as e:
if e.errno == errno.EINTR:
if timeout is not None:
elapsed = time() - time_start
diff --git a/Lib/tempfile.py b/Lib/tempfile.py
index 47c60f4..90b3c20 100644
--- a/Lib/tempfile.py
+++ b/Lib/tempfile.py
@@ -665,7 +665,6 @@ class TemporaryDirectory(object):
_islink = staticmethod(_os.path.islink)
_remove = staticmethod(_os.remove)
_rmdir = staticmethod(_os.rmdir)
- _os_error = OSError
_warn = _warnings.warn
def _rmtree(self, path):
@@ -675,16 +674,16 @@ class TemporaryDirectory(object):
fullname = self._path_join(path, name)
try:
isdir = self._isdir(fullname) and not self._islink(fullname)
- except self._os_error:
+ except OSError:
isdir = False
if isdir:
self._rmtree(fullname)
else:
try:
self._remove(fullname)
- except self._os_error:
+ except OSError:
pass
try:
self._rmdir(path)
- except self._os_error:
+ except OSError:
pass
diff --git a/Lib/test/json_tests/test_indent.py b/Lib/test/json_tests/test_indent.py
index 4c70646..4eb4f89 100644
--- a/Lib/test/json_tests/test_indent.py
+++ b/Lib/test/json_tests/test_indent.py
@@ -32,6 +32,8 @@ class TestIndent:
d1 = self.dumps(h)
d2 = self.dumps(h, indent=2, sort_keys=True, separators=(',', ': '))
d3 = self.dumps(h, indent='\t', sort_keys=True, separators=(',', ': '))
+ d4 = self.dumps(h, indent=2, sort_keys=True)
+ d5 = self.dumps(h, indent='\t', sort_keys=True)
h1 = self.loads(d1)
h2 = self.loads(d2)
@@ -42,6 +44,8 @@ class TestIndent:
self.assertEqual(h3, h)
self.assertEqual(d2, expect.expandtabs(2))
self.assertEqual(d3, expect)
+ self.assertEqual(d4, d2)
+ self.assertEqual(d5, d3)
def test_indent0(self):
h = {3: 1}
diff --git a/Lib/test/mock_socket.py b/Lib/test/mock_socket.py
index d09e78c..8ef0ec8 100644
--- a/Lib/test/mock_socket.py
+++ b/Lib/test/mock_socket.py
@@ -140,12 +140,8 @@ def gethostbyname(name):
return ""
-class gaierror(Exception):
- pass
-
-
-class error(Exception):
- pass
+gaierror = socket_module.gaierror
+error = socket_module.error
# Constants
diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py
index 8f3b689..5d9d82d 100755
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -615,7 +615,7 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
sys.exit(2)
from queue import Queue
from subprocess import Popen, PIPE
- debug_output_pat = re.compile(r"\[\d+ refs\]$")
+ debug_output_pat = re.compile(r"\[\d+ refs, \d+ blocks\]$")
output = Queue()
pending = MultiprocessTests(tests)
opt_args = support.args_from_interpreter_flags()
@@ -763,20 +763,6 @@ def main(tests=None, testdir=None, verbose=0, quiet=False,
print(count(len(skipped), "test"), "skipped:")
printlist(skipped)
- e = _ExpectedSkips()
- plat = sys.platform
- if e.isvalid():
- surprise = set(skipped) - e.getexpected() - set(resource_denieds)
- if surprise:
- print(count(len(surprise), "skip"), \
- "unexpected on", plat + ":")
- printlist(surprise)
- else:
- print("Those skips are all expected on", plat + ".")
- else:
- print("Ask someone to teach regrtest.py about which tests are")
- print("expected to get skipped on", plat + ".")
-
if verbose2 and bad:
print("Re-running failed tests in verbose mode")
for test in bad:
@@ -1210,8 +1196,7 @@ def runtest_inner(test, verbose, quiet,
abstest = 'test.' + test
with saved_test_environment(test, verbose, quiet) as environment:
start_time = time.time()
- the_package = __import__(abstest, globals(), locals(), [])
- the_module = getattr(the_package, test)
+ the_module = importlib.import_module(abstest)
# If the test has a test_main, that will run the appropriate
# tests. If not, use normal unittest test loading.
test_runner = getattr(the_module, "test_main", None)
@@ -1335,33 +1320,50 @@ def dash_R(the_module, test, indirect_test, huntrleaks):
del sys.modules[the_module.__name__]
exec('import ' + the_module.__name__)
- deltas = []
nwarmup, ntracked, fname = huntrleaks
fname = os.path.join(support.SAVEDCWD, fname)
repcount = nwarmup + ntracked
+ rc_deltas = [0] * repcount
+ alloc_deltas = [0] * repcount
+
print("beginning", repcount, "repetitions", file=sys.stderr)
print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr)
sys.stderr.flush()
- dash_R_cleanup(fs, ps, pic, zdc, abcs)
for i in range(repcount):
- rc_before = sys.gettotalrefcount()
run_the_test()
+ alloc_after, rc_after = dash_R_cleanup(fs, ps, pic, zdc, abcs)
sys.stderr.write('.')
sys.stderr.flush()
- dash_R_cleanup(fs, ps, pic, zdc, abcs)
- rc_after = sys.gettotalrefcount()
if i >= nwarmup:
- deltas.append(rc_after - rc_before)
+ rc_deltas[i] = rc_after - rc_before
+ alloc_deltas[i] = alloc_after - alloc_before
+ alloc_before, rc_before = alloc_after, rc_after
print(file=sys.stderr)
- if any(deltas):
- msg = '%s leaked %s references, sum=%s' % (test, deltas, sum(deltas))
- print(msg, file=sys.stderr)
- sys.stderr.flush()
- with open(fname, "a") as refrep:
- print(msg, file=refrep)
- refrep.flush()
- return True
- return False
+ # These checkers return False on success, True on failure
+ def check_rc_deltas(deltas):
+ return any(deltas)
+ def check_alloc_deltas(deltas):
+ # At least 1/3rd of 0s
+ if 3 * deltas.count(0) < len(deltas):
+ return True
+ # Nothing else than 1s, 0s and -1s
+ if not set(deltas) <= {1,0,-1}:
+ return True
+ return False
+ failed = False
+ for deltas, item_name, checker in [
+ (rc_deltas, 'references', check_rc_deltas),
+ (alloc_deltas, 'memory blocks', check_alloc_deltas)]:
+ if checker(deltas):
+ msg = '%s leaked %s %s, sum=%s' % (
+ test, deltas[nwarmup:], item_name, sum(deltas))
+ print(msg, file=sys.stderr)
+ sys.stderr.flush()
+ with open(fname, "a") as refrep:
+ print(msg, file=refrep)
+ refrep.flush()
+ failed = True
+ return failed
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
import gc, copyreg
@@ -1427,8 +1429,11 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs):
else:
ctypes._reset_cache()
- # Collect cyclic trash.
+ # Collect cyclic trash and read memory statistics immediately after.
+ func1 = sys.getallocatedblocks
+ func2 = sys.gettotalrefcount
gc.collect()
+ return func1(), func2()
def warm_caches():
# char cache
@@ -1471,299 +1476,6 @@ def printlist(x, width=70, indent=4):
print(fill(' '.join(str(elt) for elt in sorted(x)), width,
initial_indent=blanks, subsequent_indent=blanks))
-# Map sys.platform to a string containing the basenames of tests
-# expected to be skipped on that platform.
-#
-# Special cases:
-# test_pep277
-# The _ExpectedSkips constructor adds this to the set of expected
-# skips if not os.path.supports_unicode_filenames.
-# test_timeout
-# Controlled by test_timeout.skip_expected. Requires the network
-# resource and a socket module.
-#
-# Tests that are expected to be skipped everywhere except on one platform
-# are also handled separately.
-
-_expectations = (
- ('win32',
- """
- test__locale
- test_crypt
- test_curses
- test_dbm
- test_devpoll
- test_fcntl
- test_fork1
- test_epoll
- test_dbm_gnu
- test_dbm_ndbm
- test_grp
- test_ioctl
- test_largefile
- test_kqueue
- test_openpty
- test_ossaudiodev
- test_pipes
- test_poll
- test_posix
- test_pty
- test_pwd
- test_resource
- test_signal
- test_syslog
- test_threadsignals
- test_wait3
- test_wait4
- """),
- ('linux',
- """
- test_curses
- test_devpoll
- test_largefile
- test_kqueue
- test_ossaudiodev
- """),
- ('unixware',
- """
- test_epoll
- test_largefile
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_sax
- test_sundry
- """),
- ('openunix',
- """
- test_epoll
- test_largefile
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_sax
- test_sundry
- """),
- ('sco_sv',
- """
- test_asynchat
- test_fork1
- test_epoll
- test_gettext
- test_largefile
- test_locale
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_queue
- test_sax
- test_sundry
- test_thread
- test_threaded_import
- test_threadedtempfile
- test_threading
- """),
- ('darwin',
- """
- test__locale
- test_curses
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_gdb
- test_largefile
- test_locale
- test_minidom
- test_ossaudiodev
- test_poll
- """),
- ('sunos',
- """
- test_curses
- test_dbm
- test_epoll
- test_kqueue
- test_dbm_gnu
- test_gzip
- test_openpty
- test_zipfile
- test_zlib
- """),
- ('hp-ux',
- """
- test_curses
- test_epoll
- test_dbm_gnu
- test_gzip
- test_largefile
- test_locale
- test_kqueue
- test_minidom
- test_openpty
- test_pyexpat
- test_sax
- test_zipfile
- test_zlib
- """),
- ('cygwin',
- """
- test_curses
- test_dbm
- test_devpoll
- test_epoll
- test_ioctl
- test_kqueue
- test_largefile
- test_locale
- test_ossaudiodev
- test_socketserver
- """),
- ('os2emx',
- """
- test_audioop
- test_curses
- test_epoll
- test_kqueue
- test_largefile
- test_mmap
- test_openpty
- test_ossaudiodev
- test_pty
- test_resource
- test_signal
- """),
- ('freebsd',
- """
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_locale
- test_ossaudiodev
- test_pep277
- test_pty
- test_socketserver
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_timeout
- test_urllibnet
- test_multiprocessing
- """),
- ('aix',
- """
- test_bz2
- test_epoll
- test_dbm_gnu
- test_gzip
- test_kqueue
- test_ossaudiodev
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_zipimport
- test_zlib
- """),
- ('openbsd',
- """
- test_ctypes
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_locale
- test_normalization
- test_ossaudiodev
- test_pep277
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_multiprocessing
- """),
- ('netbsd',
- """
- test_ctypes
- test_curses
- test_devpoll
- test_epoll
- test_dbm_gnu
- test_locale
- test_ossaudiodev
- test_pep277
- test_tcl
- test_tk
- test_ttk_guionly
- test_ttk_textonly
- test_multiprocessing
- """),
-)
-
-class _ExpectedSkips:
- def __init__(self):
- import os.path
- from test import test_timeout
-
- self.valid = False
- expected = None
- for item in _expectations:
- if sys.platform.startswith(item[0]):
- expected = item[1]
- break
- if expected is not None:
- self.expected = set(expected.split())
-
- # These are broken tests, for now skipped on every platform.
- # XXX Fix these!
- self.expected.add('test_nis')
-
- # expected to be skipped on every platform, even Linux
- if not os.path.supports_unicode_filenames:
- self.expected.add('test_pep277')
-
- # doctest, profile and cProfile tests fail when the codec for the
- # fs encoding isn't built in because PyUnicode_Decode() adds two
- # calls into Python.
- encs = ("utf-8", "latin-1", "ascii", "mbcs", "utf-16", "utf-32")
- if sys.getfilesystemencoding().lower() not in encs:
- self.expected.add('test_profile')
- self.expected.add('test_cProfile')
- self.expected.add('test_doctest')
-
- if test_timeout.skip_expected:
- self.expected.add('test_timeout')
-
- if sys.platform != "win32":
- # test_sqlite is only reliable on Windows where the library
- # is distributed with Python
- WIN_ONLY = {"test_unicode_file", "test_winreg",
- "test_winsound", "test_startfile",
- "test_sqlite", "test_msilib"}
- self.expected |= WIN_ONLY
-
- if sys.platform != 'sunos5':
- self.expected.add('test_nis')
-
- if support.python_is_optimized():
- self.expected.add("test_gdb")
-
- self.valid = True
-
- def isvalid(self):
- "Return true iff _ExpectedSkips knows about the current platform."
- return self.valid
-
- def getexpected(self):
- """Return set of test names we expect to skip on current platform.
-
- self.isvalid() must be true.
- """
-
- assert self.isvalid()
- return self.expected
def _make_temp_dir_for_build(TEMPDIR):
# When tests are run from the Python build directory, it is best practice
diff --git a/Lib/test/sortperf.py b/Lib/test/sortperf.py
index af7c0b4..a370ed9 100644
--- a/Lib/test/sortperf.py
+++ b/Lib/test/sortperf.py
@@ -35,7 +35,7 @@ def randfloats(n):
if fp:
try:
os.unlink(fn)
- except os.error:
+ except OSError:
pass
except IOError as msg:
print("can't write", fn, ":", msg)
diff --git a/Lib/test/ssl_servers.py b/Lib/test/ssl_servers.py
index 8686153..f4a0bed 100644
--- a/Lib/test/ssl_servers.py
+++ b/Lib/test/ssl_servers.py
@@ -35,7 +35,7 @@ class HTTPSServer(_HTTPServer):
try:
sock, addr = self.socket.accept()
sslconn = self.context.wrap_socket(sock, server_side=True)
- except socket.error as e:
+ except OSError as e:
# socket errors are silenced by the caller, print them here
if support.verbose:
sys.stderr.write("Got an error:\n%s\n" % e)
diff --git a/Lib/test/support.py b/Lib/test/support.py
index d0a37ea..d776b99 100644
--- a/Lib/test/support.py
+++ b/Lib/test/support.py
@@ -93,7 +93,8 @@ def _ignore_deprecated_imports(ignore=True):
"""Context manager to suppress package and module deprecation
warnings when importing them.
- If ignore is False, this context manager has no effect."""
+ If ignore is False, this context manager has no effect.
+ """
if ignore:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", ".+ (module|package)",
@@ -103,23 +104,29 @@ def _ignore_deprecated_imports(ignore=True):
yield
-def import_module(name, deprecated=False):
+def import_module(name, deprecated=False, *, required_on=()):
"""Import and return the module to be tested, raising SkipTest if
it is not available.
If deprecated is True, any module or package deprecation messages
- will be suppressed."""
+ will be suppressed. If a module is required on a platform but optional for
+ others, set required_on to an iterable of platform prefixes which will be
+ compared against sys.platform.
+ """
with _ignore_deprecated_imports(deprecated):
try:
return importlib.import_module(name)
except ImportError as msg:
+ if sys.platform.startswith(tuple(required_on)):
+ raise
raise unittest.SkipTest(str(msg))
def _save_and_remove_module(name, orig_modules):
"""Helper function to save and remove a module from sys.modules
- Raise ImportError if the module can't be imported."""
+ Raise ImportError if the module can't be imported.
+ """
# try to import the module and raise an error if it can't be imported
if name not in sys.modules:
__import__(name)
@@ -489,7 +496,7 @@ def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
the SO_REUSEADDR socket option having different semantics on Windows versus
Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind,
listen and then accept connections on identical host/ports. An EADDRINUSE
- socket.error will be raised at some point (depending on the platform and
+ OSError will be raised at some point (depending on the platform and
the order bind and listen were called on each socket).
However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE
@@ -563,7 +570,7 @@ def _is_ipv6_enabled():
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.bind(('::1', 0))
return True
- except (socket.error, socket.gaierror):
+ except OSError:
pass
finally:
if sock:
@@ -710,6 +717,9 @@ for name in (
b'\xae\xd5'
# undecodable from UTF-8 (UNIX and Mac OS X)
b'\xed\xb2\x80', b'\xed\xb4\x80',
+ # undecodable from shift_jis, cp869, cp874, cp932, cp1250, cp1251, cp1252,
+ # cp1253, cp1254, cp1255, cp1257, cp1258
+ b'\x81\x98',
):
try:
name.decode(TESTFN_ENCODING)
@@ -1088,7 +1098,7 @@ class TransientResource(object):
# with the Internet connection manifest themselves as exceptions.
# XXX deprecate these and use transient_internet() instead
time_out = TransientResource(IOError, errno=errno.ETIMEDOUT)
-socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET)
+socket_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET)
ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET)
@@ -1762,7 +1772,7 @@ def strip_python_stderr(stderr):
This will typically be run on the result of the communicate() method
of a subprocess.Popen object.
"""
- stderr = re.sub(br"\[\d+ refs\]\r?\n?", b"", stderr).strip()
+ stderr = re.sub(br"\[\d+ refs, \d+ blocks\]\r?\n?", b"", stderr).strip()
return stderr
def args_from_interpreter_flags():
diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py
index 653c957..3498524 100644
--- a/Lib/test/test_abc.py
+++ b/Lib/test/test_abc.py
@@ -96,6 +96,19 @@ class TestLegacyAPI(unittest.TestCase):
class TestABC(unittest.TestCase):
+ def test_ABC_helper(self):
+ # create an ABC using the helper class and perform basic checks
+ class C(abc.ABC):
+ @classmethod
+ @abc.abstractmethod
+ def foo(cls): return cls.__name__
+ self.assertEqual(type(C), abc.ABCMeta)
+ self.assertRaises(TypeError, C)
+ class D(C):
+ @classmethod
+ def foo(cls): return super().foo()
+ self.assertEqual(D.foo(), 'D')
+
def test_abstractmethod_basics(self):
@abc.abstractmethod
def foo(self): pass
diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py
index c06c940..00cde2e 100644
--- a/Lib/test/test_argparse.py
+++ b/Lib/test/test_argparse.py
@@ -14,6 +14,7 @@ import argparse
from io import StringIO
from test import support
+from unittest import mock
class StdIOBuffer(StringIO):
pass
@@ -1421,6 +1422,19 @@ class TestFileTypeRepr(TestCase):
type = argparse.FileType('wb', 1)
self.assertEqual("FileType('wb', 1)", repr(type))
+ def test_r_latin(self):
+ type = argparse.FileType('r', encoding='latin_1')
+ self.assertEqual("FileType('r', encoding='latin_1')", repr(type))
+
+ def test_w_big5_ignore(self):
+ type = argparse.FileType('w', encoding='big5', errors='ignore')
+ self.assertEqual("FileType('w', encoding='big5', errors='ignore')",
+ repr(type))
+
+ def test_r_1_replace(self):
+ type = argparse.FileType('r', 1, errors='replace')
+ self.assertEqual("FileType('r', 1, errors='replace')", repr(type))
+
class RFile(object):
seen = {}
@@ -1557,6 +1571,24 @@ class TestFileTypeWB(TempDirMixin, ParserTestCase):
]
+class TestFileTypeOpenArgs(TestCase):
+ """Test that open (the builtin) is correctly called"""
+
+ def test_open_args(self):
+ FT = argparse.FileType
+ cases = [
+ (FT('rb'), ('rb', -1, None, None)),
+ (FT('w', 1), ('w', 1, None, None)),
+ (FT('w', errors='replace'), ('w', -1, None, 'replace')),
+ (FT('wb', encoding='big5'), ('wb', -1, 'big5', None)),
+ (FT('w', 0, 'l1', 'strict'), ('w', 0, 'l1', 'strict')),
+ ]
+ with mock.patch('builtins.open') as m:
+ for type, args in cases:
+ type('foo')
+ m.assert_called_with('foo', *args)
+
+
class TestTypeCallable(ParserTestCase):
"""Test some callables as option/argument types"""
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index dc24126..36907b4 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -928,6 +928,9 @@ class ASTValidatorTests(unittest.TestCase):
def test_tuple(self):
self._sequence(ast.Tuple)
+ def test_nameconstant(self):
+ self.expr(ast.NameConstant(4), "singleton must be True, False, or None")
+
def test_stdlib_validates(self):
stdlib = os.path.dirname(ast.__file__)
tests = [fn for fn in os.listdir(stdlib) if fn.endswith(".py")]
@@ -959,13 +962,13 @@ def main():
#### EVERYTHING BELOW IS GENERATED #####
exec_results = [
-('Module', [('Expr', (1, 0), ('Name', (1, 0), 'None', ('Load',)))]),
+('Module', [('Expr', (1, 0), ('NameConstant', (1, 0), None))]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], None, None, [], []), [('Pass', (1, 9))], [], None)]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None)], None, None, [], None, None, [], []), [('Pass', (1, 10))], [], None)]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None)], None, None, [], None, None, [('Num', (1, 8), 0)], []), [('Pass', (1, 12))], [], None)]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], 'args', None, [], None, None, [], []), [('Pass', (1, 14))], [], None)]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], 'kwargs', None, [], []), [('Pass', (1, 17))], [], None)]),
-('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None), ('arg', 'b', None), ('arg', 'c', None), ('arg', 'd', None), ('arg', 'e', None)], 'args', None, [], 'kwargs', None, [('Num', (1, 11), 1), ('Name', (1, 16), 'None', ('Load',)), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])], []), [('Pass', (1, 52))], [], None)]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('arg', 'a', None), ('arg', 'b', None), ('arg', 'c', None), ('arg', 'd', None), ('arg', 'e', None)], 'args', None, [], 'kwargs', None, [('Num', (1, 11), 1), ('NameConstant', (1, 16), None), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])], []), [('Pass', (1, 52))], [], None)]),
('Module', [('ClassDef', (1, 0), 'C', [], [], None, None, [('Pass', (1, 8))], [])]),
('Module', [('ClassDef', (1, 0), 'C', [('Name', (1, 8), 'object', ('Load',))], [], None, None, [('Pass', (1, 17))], [])]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, [], None, None, [], []), [('Return', (1, 8), ('Num', (1, 15), 1))], [], None)]),
@@ -1002,14 +1005,14 @@ single_results = [
('Interactive', [('Expr', (1, 0), ('BinOp', (1, 0), ('Num', (1, 0), 1), ('Add',), ('Num', (1, 2), 2)))]),
]
eval_results = [
-('Expression', ('Name', (1, 0), 'None', ('Load',))),
+('Expression', ('NameConstant', (1, 0), None)),
('Expression', ('BoolOp', (1, 0), ('And',), [('Name', (1, 0), 'a', ('Load',)), ('Name', (1, 6), 'b', ('Load',))])),
('Expression', ('BinOp', (1, 0), ('Name', (1, 0), 'a', ('Load',)), ('Add',), ('Name', (1, 4), 'b', ('Load',)))),
('Expression', ('UnaryOp', (1, 0), ('Not',), ('Name', (1, 4), 'v', ('Load',)))),
-('Expression', ('Lambda', (1, 0), ('arguments', [], None, None, [], None, None, [], []), ('Name', (1, 7), 'None', ('Load',)))),
+('Expression', ('Lambda', (1, 0), ('arguments', [], None, None, [], None, None, [], []), ('NameConstant', (1, 7), None))),
('Expression', ('Dict', (1, 0), [('Num', (1, 2), 1)], [('Num', (1, 4), 2)])),
('Expression', ('Dict', (1, 0), [], [])),
-('Expression', ('Set', (1, 0), [('Name', (1, 1), 'None', ('Load',))])),
+('Expression', ('Set', (1, 0), [('NameConstant', (1, 1), None)])),
('Expression', ('Dict', (1, 0), [('Num', (2, 6), 1)], [('Num', (4, 10), 2)])),
('Expression', ('ListComp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), [('comprehension', ('Name', (1, 7), 'b', ('Store',)), ('Name', (1, 12), 'c', ('Load',)), [('Name', (1, 17), 'd', ('Load',))])])),
('Expression', ('GeneratorExp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), [('comprehension', ('Name', (1, 7), 'b', ('Store',)), ('Name', (1, 12), 'c', ('Load',)), [('Name', (1, 17), 'd', ('Load',))])])),
diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py
index 42a2525..bdb3dc0 100644
--- a/Lib/test/test_asyncore.py
+++ b/Lib/test/test_asyncore.py
@@ -756,7 +756,7 @@ class BaseTestAPI(unittest.TestCase):
s2 = asyncore.dispatcher()
s2.create_socket(self.family)
# EADDRINUSE indicates the socket was correctly bound
- self.assertRaises(socket.error, s2.bind, (self.addr[0], port))
+ self.assertRaises(OSError, s2.bind, (self.addr[0], port))
def test_set_reuse_addr(self):
if HAS_UNIX_SOCKETS and self.family == socket.AF_UNIX:
@@ -764,7 +764,7 @@ class BaseTestAPI(unittest.TestCase):
sock = socket.socket(self.family)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- except socket.error:
+ except OSError:
unittest.skip("SO_REUSEADDR not supported on this platform")
else:
# if SO_REUSEADDR succeeded for sock we expect asyncore
@@ -797,7 +797,7 @@ class BaseTestAPI(unittest.TestCase):
struct.pack('ii', 1, 0))
try:
s.connect(server.address)
- except socket.error:
+ except OSError:
pass
finally:
s.close()
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index bf005c5..036f4f2 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -462,6 +462,11 @@ class BuiltinTest(unittest.TestCase):
self.assertRaises(TypeError, eval, ())
self.assertRaises(SyntaxError, eval, bom[:2] + b'a')
+ class X:
+ def __getitem__(self, key):
+ raise ValueError
+ self.assertRaises(ValueError, eval, "foo", {}, X())
+
def test_general_eval(self):
# Tests that general mappings can be used for the locals argument
@@ -1474,17 +1479,11 @@ class BuiltinTest(unittest.TestCase):
# --------------------------------------------------------------------
# Issue #7994: object.__format__ with a non-empty format string is
# deprecated
- def test_deprecated_format_string(obj, fmt_str, should_raise_warning):
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always", DeprecationWarning)
- format(obj, fmt_str)
- if should_raise_warning:
- self.assertEqual(len(w), 1)
- self.assertIsInstance(w[0].message, DeprecationWarning)
- self.assertIn('object.__format__ with a non-empty format '
- 'string', str(w[0].message))
+ def test_deprecated_format_string(obj, fmt_str, should_raise):
+ if should_raise:
+ self.assertRaises(TypeError, format, obj, fmt_str)
else:
- self.assertEqual(len(w), 0)
+ format(obj, fmt_str)
fmt_strs = ['', 's']
diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py
index f88c242..0a6d446 100644
--- a/Lib/test/test_bytes.py
+++ b/Lib/test/test_bytes.py
@@ -288,8 +288,22 @@ class BaseBytesTest(unittest.TestCase):
self.assertEqual(self.type2test(b"").join(lst), b"abc")
self.assertEqual(self.type2test(b"").join(tuple(lst)), b"abc")
self.assertEqual(self.type2test(b"").join(iter(lst)), b"abc")
- self.assertEqual(self.type2test(b".").join([b"ab", b"cd"]), b"ab.cd")
- # XXX more...
+ dot_join = self.type2test(b".:").join
+ self.assertEqual(dot_join([b"ab", b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([memoryview(b"ab"), b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([b"ab", memoryview(b"cd")]), b"ab.:cd")
+ self.assertEqual(dot_join([bytearray(b"ab"), b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([b"ab", bytearray(b"cd")]), b"ab.:cd")
+ # Stress it with many items
+ seq = [b"abc"] * 1000
+ expected = b"abc" + b".:abc" * 999
+ self.assertEqual(dot_join(seq), expected)
+ # Error handling and cleanup when some item in the middle of the
+ # sequence has the wrong type.
+ with self.assertRaises(TypeError):
+ dot_join([bytearray(b"ab"), "cd", b"ef"])
+ with self.assertRaises(TypeError):
+ dot_join([memoryview(b"ab"), "cd", b"ef"])
def test_count(self):
b = self.type2test(b'mississippi')
@@ -759,7 +773,7 @@ class ByteArrayTest(BaseBytesTest):
finally:
try:
os.remove(tfn)
- except os.error:
+ except OSError:
pass
def test_reverse(self):
@@ -1273,6 +1287,11 @@ class BytearrayPEP3137Test(unittest.TestCase,
self.assertEqual(val, newval)
self.assertTrue(val is not newval,
expr+' returned val on a mutable object')
+ sep = self.marshal(b'')
+ newval = sep.join([val])
+ self.assertEqual(val, newval)
+ self.assertIsNot(val, newval)
+
class FixedStringTest(test.string_tests.BaseTest):
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index f4e81db..24a1813 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
from test import support
-from test.support import TESTFN, bigmemtest, _4G
+from test.support import bigmemtest, _4G
import unittest
from io import BytesIO
@@ -18,10 +18,10 @@ except ImportError:
bz2 = support.import_module('bz2')
from bz2 import BZ2File, BZ2Compressor, BZ2Decompressor
-has_cmdline_bunzip2 = sys.platform not in ("win32", "os2emx")
class BaseTest(unittest.TestCase):
"Base for other testcases."
+
TEXT_LINES = [
b'root:x:0:0:root:/root:/bin/bash\n',
b'bin:x:1:1:bin:/bin:\n',
@@ -49,13 +49,17 @@ class BaseTest(unittest.TestCase):
DATA = b'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`'
def setUp(self):
- self.filename = TESTFN
+ self.filename = support.TESTFN
def tearDown(self):
if os.path.isfile(self.filename):
os.unlink(self.filename)
- if has_cmdline_bunzip2:
+ if sys.platform == "win32":
+ # bunzip2 isn't available to run on Windows.
+ def decompress(self, data):
+ return bz2.decompress(data)
+ else:
def decompress(self, data):
pop = subprocess.Popen("bunzip2", shell=True,
stdin=subprocess.PIPE,
@@ -69,31 +73,21 @@ class BaseTest(unittest.TestCase):
ret = bz2.decompress(data)
return ret
- else:
- # bunzip2 isn't available to run on Windows.
- def decompress(self, data):
- return bz2.decompress(data)
class BZ2FileTest(BaseTest):
- "Test BZ2File type miscellaneous methods."
+ "Test the BZ2File class."
def createTempFile(self, streams=1):
with open(self.filename, "wb") as f:
f.write(self.DATA * streams)
def testBadArgs(self):
- with self.assertRaises(TypeError):
- BZ2File(123.456)
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "z")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "rx")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "rbt")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", compresslevel=0)
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", compresslevel=10)
+ self.assertRaises(TypeError, BZ2File, 123.456)
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "z")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "rx")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "rbt")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=0)
+ self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=10)
def testRead(self):
self.createTempFile()
@@ -214,9 +208,8 @@ class BZ2FileTest(BaseTest):
self.createTempFile()
bz2f = BZ2File(self.filename)
bz2f.close()
- self.assertRaises(ValueError, bz2f.__next__)
- # This call will deadlock if the above .__next__ call failed to
- # release the lock.
+ self.assertRaises(ValueError, next, bz2f)
+ # This call will deadlock if the above call failed to release the lock.
self.assertRaises(ValueError, bz2f.readlines)
def testWrite(self):
@@ -379,7 +372,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.seekable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertFalse(bz2f.seekable())
finally:
@@ -405,7 +398,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.readable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertFalse(bz2f.readable())
finally:
@@ -422,7 +415,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.writable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertTrue(bz2f.writable())
finally:
@@ -476,7 +469,7 @@ class BZ2FileTest(BaseTest):
# Issue #7205: Using a BZ2File from several threads shouldn't deadlock.
data = b"1" * 2**20
nthreads = 10
- with bz2.BZ2File(self.filename, 'wb') as f:
+ with BZ2File(self.filename, 'wb') as f:
def comp():
for i in range(5):
f.write(data)
@@ -487,28 +480,27 @@ class BZ2FileTest(BaseTest):
t.join()
def testWithoutThreading(self):
- bz2 = support.import_fresh_module("bz2", blocked=("threading",))
- with bz2.BZ2File(self.filename, "wb") as f:
+ module = support.import_fresh_module("bz2", blocked=("threading",))
+ with module.BZ2File(self.filename, "wb") as f:
f.write(b"abc")
- with bz2.BZ2File(self.filename, "rb") as f:
+ with module.BZ2File(self.filename, "rb") as f:
self.assertEqual(f.read(), b"abc")
def testMixedIterationAndReads(self):
self.createTempFile()
linelen = len(self.TEXT_LINES[0])
halflen = linelen // 2
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.read(halflen)
self.assertEqual(next(bz2f), self.TEXT_LINES[0][halflen:])
self.assertEqual(bz2f.read(), self.TEXT[linelen:])
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.readline()
self.assertEqual(next(bz2f), self.TEXT_LINES[1])
self.assertEqual(bz2f.readline(), self.TEXT_LINES[2])
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.readlines()
- with self.assertRaises(StopIteration):
- next(bz2f)
+ self.assertRaises(StopIteration, next, bz2f)
self.assertEqual(bz2f.readlines(), [])
def testMultiStreamOrdering(self):
@@ -576,6 +568,7 @@ class BZ2FileTest(BaseTest):
bz2f.seek(-150, 1)
self.assertEqual(bz2f.read(), self.TEXT[500-150:])
+
class BZ2CompressorTest(BaseTest):
def testCompress(self):
bz2c = BZ2Compressor()
@@ -688,97 +681,102 @@ class CompressDecompressTest(BaseTest):
class OpenTest(BaseTest):
+ "Test the open function."
+
+ def open(self, *args, **kwargs):
+ return bz2.open(*args, **kwargs)
+
def test_binary_modes(self):
- with bz2.open(self.filename, "wb") as f:
+ with self.open(self.filename, "wb") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT)
- with bz2.open(self.filename, "rb") as f:
+ with self.open(self.filename, "rb") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(self.filename, "ab") as f:
+ with self.open(self.filename, "ab") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT * 2)
def test_implicit_binary_modes(self):
# Test implicit binary modes (no "b" or "t" in mode string).
- with bz2.open(self.filename, "w") as f:
+ with self.open(self.filename, "w") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT)
- with bz2.open(self.filename, "r") as f:
+ with self.open(self.filename, "r") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(self.filename, "a") as f:
+ with self.open(self.filename, "a") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT * 2)
def test_text_modes(self):
text = self.TEXT.decode("ascii")
text_native_eol = text.replace("\n", os.linesep)
- with bz2.open(self.filename, "wt") as f:
+ with self.open(self.filename, "wt") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("ascii")
+ file_data = self.decompress(f.read()).decode("ascii")
self.assertEqual(file_data, text_native_eol)
- with bz2.open(self.filename, "rt") as f:
+ with self.open(self.filename, "rt") as f:
self.assertEqual(f.read(), text)
- with bz2.open(self.filename, "at") as f:
+ with self.open(self.filename, "at") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("ascii")
+ file_data = self.decompress(f.read()).decode("ascii")
self.assertEqual(file_data, text_native_eol * 2)
def test_fileobj(self):
- with bz2.open(BytesIO(self.DATA), "r") as f:
+ with self.open(BytesIO(self.DATA), "r") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(BytesIO(self.DATA), "rb") as f:
+ with self.open(BytesIO(self.DATA), "rb") as f:
self.assertEqual(f.read(), self.TEXT)
text = self.TEXT.decode("ascii")
- with bz2.open(BytesIO(self.DATA), "rt") as f:
+ with self.open(BytesIO(self.DATA), "rt") as f:
self.assertEqual(f.read(), text)
def test_bad_params(self):
# Test invalid parameter combinations.
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "wbt")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", encoding="utf-8")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", errors="ignore")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", newline="\n")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "wbt")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", encoding="utf-8")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", errors="ignore")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", newline="\n")
def test_encoding(self):
# Test non-default encoding.
text = self.TEXT.decode("ascii")
text_native_eol = text.replace("\n", os.linesep)
- with bz2.open(self.filename, "wt", encoding="utf-16-le") as f:
+ with self.open(self.filename, "wt", encoding="utf-16-le") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("utf-16-le")
+ file_data = self.decompress(f.read()).decode("utf-16-le")
self.assertEqual(file_data, text_native_eol)
- with bz2.open(self.filename, "rt", encoding="utf-16-le") as f:
+ with self.open(self.filename, "rt", encoding="utf-16-le") as f:
self.assertEqual(f.read(), text)
def test_encoding_error_handler(self):
# Test with non-default encoding error handler.
- with bz2.open(self.filename, "wb") as f:
+ with self.open(self.filename, "wb") as f:
f.write(b"foo\xffbar")
- with bz2.open(self.filename, "rt", encoding="ascii", errors="ignore") \
+ with self.open(self.filename, "rt", encoding="ascii", errors="ignore") \
as f:
self.assertEqual(f.read(), "foobar")
def test_newline(self):
# Test with explicit newline (universal newline mode disabled).
text = self.TEXT.decode("ascii")
- with bz2.open(self.filename, "wt", newline="\n") as f:
+ with self.open(self.filename, "wt", newline="\n") as f:
f.write(text)
- with bz2.open(self.filename, "rt", newline="\r") as f:
+ with self.open(self.filename, "rt", newline="\r") as f:
self.assertEqual(f.readlines(), [text])
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
index a89d7e4..6684e51 100644
--- a/Lib/test/test_cmd_line.py
+++ b/Lib/test/test_cmd_line.py
@@ -213,6 +213,23 @@ class CmdLineTest(unittest.TestCase):
self.assertIn(path1.encode('ascii'), out)
self.assertIn(path2.encode('ascii'), out)
+ def test_empty_PYTHONPATH_issue16309(self):
+ # On Posix, it is documented that setting PATH to the
+ # empty string is equivalent to not setting PATH at all,
+ # which is an exception to the rule that in a string like
+ # "/bin::/usr/bin" the empty string in the middle gets
+ # interpreted as '.'
+ code = """if 1:
+ import sys
+ path = ":".join(sys.path)
+ path = path.encode("ascii", "backslashreplace")
+ sys.stdout.buffer.write(path)"""
+ rc1, out1, err1 = assert_python_ok('-c', code, PYTHONPATH="")
+ rc2, out2, err2 = assert_python_ok('-c', code)
+ # regarding to Posix specification, outputs should be equal
+ # for empty and unset PYTHONPATH
+ self.assertEqual(out1, out2)
+
def test_displayhook_unencodable(self):
for encoding in ('ascii', 'latin-1', 'utf-8'):
env = os.environ.copy()
@@ -290,7 +307,7 @@ class CmdLineTest(unittest.TestCase):
rc, out, err = assert_python_ok('-c', code)
self.assertEqual(b'', out)
self.assertRegex(err.decode('ascii', 'ignore'),
- 'Exception OSError: .* ignored')
+ 'Exception ignored in.*\nOSError: .*')
def test_closed_stdout(self):
# Issue #13444: if stdout has been explicitly closed, we should
diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py
index f066204..6051e18 100644
--- a/Lib/test/test_cmd_line_script.py
+++ b/Lib/test/test_cmd_line_script.py
@@ -367,11 +367,10 @@ class CmdLineTest(unittest.TestCase):
# Mac OS X denies the creation of a file with an invalid UTF-8 name.
# Windows allows to create a name with an arbitrary bytes name, but
# Python cannot a undecodable bytes argument to a subprocess.
- #if (support.TESTFN_UNDECODABLE
- #and sys.platform not in ('win32', 'darwin')):
- # name = os.fsdecode(support.TESTFN_UNDECODABLE)
- #elif support.TESTFN_NONASCII:
- if support.TESTFN_NONASCII:
+ if (support.TESTFN_UNDECODABLE
+ and sys.platform not in ('win32', 'darwin')):
+ name = os.fsdecode(support.TESTFN_UNDECODABLE)
+ elif support.TESTFN_NONASCII:
name = support.TESTFN_NONASCII
else:
self.skipTest("need support.TESTFN_NONASCII")
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
index f2a1ae3..be85a4c 100644
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -2035,8 +2035,8 @@ class CodePageTest(unittest.TestCase):
def test_invalid_code_page(self):
self.assertRaises(ValueError, codecs.code_page_encode, -1, 'a')
self.assertRaises(ValueError, codecs.code_page_decode, -1, b'a')
- self.assertRaises(WindowsError, codecs.code_page_encode, 123, 'a')
- self.assertRaises(WindowsError, codecs.code_page_decode, 123, b'a')
+ self.assertRaises(OSError, codecs.code_page_encode, 123, 'a')
+ self.assertRaises(OSError, codecs.code_page_decode, 123, b'a')
def test_code_page_name(self):
self.assertRaisesRegex(UnicodeEncodeError, 'cp932',
diff --git a/Lib/test/test_complex.py b/Lib/test/test_complex.py
index 6b34ddc..2a85bf4 100644
--- a/Lib/test/test_complex.py
+++ b/Lib/test/test_complex.py
@@ -221,6 +221,8 @@ class ComplexTest(unittest.TestCase):
self.assertRaises(TypeError, complex, OS(None))
self.assertRaises(TypeError, complex, NS(None))
self.assertRaises(TypeError, complex, {})
+ self.assertRaises(TypeError, complex, NS(1.5))
+ self.assertRaises(TypeError, complex, NS(1))
self.assertAlmostEqual(complex("1+10j"), 1+10j)
self.assertAlmostEqual(complex(10), 10+0j)
diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py
index 6ae450d..4ad3309 100644
--- a/Lib/test/test_concurrent_futures.py
+++ b/Lib/test/test_concurrent_futures.py
@@ -15,6 +15,7 @@ import sys
import threading
import time
import unittest
+import weakref
from concurrent import futures
from concurrent.futures._base import (
@@ -52,6 +53,11 @@ def sleep_and_print(t, msg):
sys.stdout.flush()
+class MyObject(object):
+ def my_method(self):
+ pass
+
+
class ExecutorMixin:
worker_count = 5
@@ -396,6 +402,22 @@ class ExecutorTest(unittest.TestCase):
self.executor.map(str, [2] * (self.worker_count + 1))
self.executor.shutdown()
+ @test.support.cpython_only
+ def test_no_stale_references(self):
+ # Issue #16284: check that the executors don't unnecessarily hang onto
+ # references.
+ my_object = MyObject()
+ my_object_collected = threading.Event()
+ my_object_callback = weakref.ref(
+ my_object, lambda obj: my_object_collected.set())
+ # Deliberately discarding the future.
+ self.executor.submit(my_object.my_method)
+ del my_object
+
+ collected = my_object_collected.wait(timeout=5.0)
+ self.assertTrue(collected,
+ "Stale reference not collected within timeout.")
+
class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest):
def test_map_submits_without_iteration(self):
diff --git a/Lib/test/test_devpoll.py b/Lib/test/test_devpoll.py
index bef4e18..ec350cd 100644
--- a/Lib/test/test_devpoll.py
+++ b/Lib/test/test_devpoll.py
@@ -8,7 +8,7 @@ from test.support import TESTFN, run_unittest
try:
select.devpoll
except AttributeError:
- raise unittest.SkipTest("select.devpoll not defined -- skipping test_devpoll")
+ raise unittest.SkipTest("select.devpoll not defined")
def find_ready_matching(ready, flag):
diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py
index 8f8c7c7..3e36f19 100644
--- a/Lib/test/test_doctest.py
+++ b/Lib/test/test_doctest.py
@@ -1409,8 +1409,40 @@ However, output from `report_start` is not suppressed:
2
TestResults(failed=3, attempted=5)
-For the purposes of REPORT_ONLY_FIRST_FAILURE, unexpected exceptions
-count as failures:
+The FAIL_FAST flag causes the runner to exit after the first failing example,
+so subsequent examples are not even attempted:
+
+ >>> flags = doctest.FAIL_FAST
+ >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
+ ... # doctest: +ELLIPSIS
+ **********************************************************************
+ File ..., line 5, in f
+ Failed example:
+ print(2) # first failure
+ Expected:
+ 200
+ Got:
+ 2
+ TestResults(failed=1, attempted=2)
+
+Specifying both FAIL_FAST and REPORT_ONLY_FIRST_FAILURE is equivalent to
+FAIL_FAST only:
+
+ >>> flags = doctest.FAIL_FAST | doctest.REPORT_ONLY_FIRST_FAILURE
+ >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
+ ... # doctest: +ELLIPSIS
+ **********************************************************************
+ File ..., line 5, in f
+ Failed example:
+ print(2) # first failure
+ Expected:
+ 200
+ Got:
+ 2
+ TestResults(failed=1, attempted=2)
+
+For the purposes of both REPORT_ONLY_FIRST_FAILURE and FAIL_FAST, unexpected
+exceptions count as failures:
>>> def f(x):
... r'''
@@ -1437,6 +1469,17 @@ count as failures:
...
ValueError: 2
TestResults(failed=3, attempted=5)
+ >>> flags = doctest.FAIL_FAST
+ >>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
+ ... # doctest: +ELLIPSIS
+ **********************************************************************
+ File ..., line 5, in f
+ Failed example:
+ raise ValueError(2) # first failure
+ Exception raised:
+ ...
+ ValueError: 2
+ TestResults(failed=1, attempted=2)
New option flags can also be registered, via register_optionflag(). Here
we reach into doctest's internals a bit.
diff --git a/Lib/test/test_enumerate.py b/Lib/test/test_enumerate.py
index 2e904cf..a2d18d0 100644
--- a/Lib/test/test_enumerate.py
+++ b/Lib/test/test_enumerate.py
@@ -1,4 +1,5 @@
import unittest
+import operator
import sys
import pickle
@@ -168,15 +169,12 @@ class TestReversed(unittest.TestCase, PickleTest):
x = range(1)
self.assertEqual(type(reversed(x)), type(iter(x)))
- @support.cpython_only
def test_len(self):
- # This is an implementation detail, not an interface requirement
- from test.test_iterlen import len
for s in ('hello', tuple('hello'), list('hello'), range(5)):
- self.assertEqual(len(reversed(s)), len(s))
+ self.assertEqual(operator.length_hint(reversed(s)), len(s))
r = reversed(s)
list(r)
- self.assertEqual(len(r), 0)
+ self.assertEqual(operator.length_hint(r), 0)
class SeqWithWeirdLen:
called = False
def __len__(self):
@@ -187,7 +185,7 @@ class TestReversed(unittest.TestCase, PickleTest):
def __getitem__(self, index):
return index
r = reversed(SeqWithWeirdLen())
- self.assertRaises(ZeroDivisionError, len, r)
+ self.assertRaises(ZeroDivisionError, operator.length_hint, r)
def test_gc(self):
diff --git a/Lib/test/test_epoll.py b/Lib/test/test_epoll.py
index 7f9547f..a3d4e2d 100644
--- a/Lib/test/test_epoll.py
+++ b/Lib/test/test_epoll.py
@@ -56,7 +56,7 @@ class TestEPoll(unittest.TestCase):
client.setblocking(False)
try:
client.connect(('127.0.0.1', self.serverSocket.getsockname()[1]))
- except socket.error as e:
+ except OSError as e:
self.assertEqual(e.args[0], errno.EINPROGRESS)
else:
raise AssertionError("Connect should have raised EINPROGRESS")
@@ -87,6 +87,13 @@ class TestEPoll(unittest.TestCase):
self.assertRaises(TypeError, select.epoll, ['foo'])
self.assertRaises(TypeError, select.epoll, {})
+ def test_context_manager(self):
+ with select.epoll(16) as ep:
+ self.assertGreater(ep.fileno(), 0)
+ self.assertFalse(ep.closed)
+ self.assertTrue(ep.closed)
+ self.assertRaises(ValueError, ep.fileno)
+
def test_add(self):
server, client = self._connected_pair()
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 1ad7f97..6fa1f5b 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -8,8 +8,8 @@ import weakref
import errno
from test.support import (TESTFN, captured_output, check_impl_detail,
- cpython_only, gc_collect, run_unittest, no_tracing,
- unlink)
+ check_warnings, cpython_only, gc_collect, run_unittest,
+ no_tracing, unlink)
class NaiveException(Exception):
def __init__(self, x):
@@ -255,11 +255,11 @@ class ExceptionTests(unittest.TestCase):
'errno' : 'foo', 'strerror' : 'bar'}),
(IOError, ('foo', 'bar', 'baz', 'quux'),
{'args' : ('foo', 'bar', 'baz', 'quux')}),
- (EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'),
+ (OSError, ('errnoStr', 'strErrorStr', 'filenameStr'),
{'args' : ('errnoStr', 'strErrorStr'),
'strerror' : 'strErrorStr', 'errno' : 'errnoStr',
'filename' : 'filenameStr'}),
- (EnvironmentError, (1, 'strErrorStr', 'filenameStr'),
+ (OSError, (1, 'strErrorStr', 'filenameStr'),
{'args' : (1, 'strErrorStr'), 'errno' : 1,
'strerror' : 'strErrorStr', 'filename' : 'filenameStr'}),
(SyntaxError, (), {'msg' : None, 'text' : None,
@@ -409,7 +409,7 @@ class ExceptionTests(unittest.TestCase):
self.assertIsNone(e.__context__)
self.assertIsNone(e.__cause__)
- class MyException(EnvironmentError):
+ class MyException(OSError):
pass
e = MyException()
@@ -947,9 +947,10 @@ class ImportErrorTests(unittest.TestCase):
def test_non_str_argument(self):
# Issue #15778
- arg = b'abc'
- exc = ImportError(arg)
- self.assertEqual(str(arg), str(exc))
+ with check_warnings(('', BytesWarning), quiet=True):
+ arg = b'abc'
+ exc = ImportError(arg)
+ self.assertEqual(str(arg), str(exc))
def test_main():
diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py
index 29af99c..6d9ee0b 100644
--- a/Lib/test/test_fcntl.py
+++ b/Lib/test/test_fcntl.py
@@ -1,7 +1,4 @@
"""Test program for the fcntl C module.
-
-OS/2+EMX doesn't support the file locking operations.
-
"""
import os
import struct
@@ -35,8 +32,6 @@ def get_lockdata():
fcntl.F_WRLCK, 0)
elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']:
lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
- elif sys.platform in ['os2emx']:
- lockdata = None
else:
lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0)
if lockdata:
@@ -62,18 +57,16 @@ class TestFcntl(unittest.TestCase):
rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
if verbose:
print('Status from fcntl with O_NONBLOCK: ', rv)
- if sys.platform not in ['os2emx']:
- rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata)
- if verbose:
- print('String from fcntl with F_SETLKW: ', repr(rv))
+ rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata)
+ if verbose:
+ print('String from fcntl with F_SETLKW: ', repr(rv))
self.f.close()
def test_fcntl_file_descriptor(self):
# again, but pass the file rather than numeric descriptor
self.f = open(TESTFN, 'wb')
rv = fcntl.fcntl(self.f, fcntl.F_SETFL, os.O_NONBLOCK)
- if sys.platform not in ['os2emx']:
- rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata)
+ rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata)
self.f.close()
def test_fcntl_64_bit(self):
diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py
index 0847961..9615c3a 100644
--- a/Lib/test/test_fileio.py
+++ b/Lib/test/test_fileio.py
@@ -285,7 +285,7 @@ class OtherFileTests(unittest.TestCase):
if sys.platform != "win32":
try:
f = _FileIO("/dev/tty", "a")
- except EnvironmentError:
+ except OSError:
# When run in a cron job there just aren't any
# ttys, so skip the test. This also handles other
# OS'es that don't support /dev/tty.
diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py
index b6e2540..e6b0d20 100644
--- a/Lib/test/test_format.py
+++ b/Lib/test/test_format.py
@@ -307,6 +307,22 @@ class FormatTest(unittest.TestCase):
finally:
locale.setlocale(locale.LC_ALL, oldloc)
+ @support.cpython_only
+ def test_optimisations(self):
+ text = "abcde" # 5 characters
+
+ self.assertIs("%s" % text, text)
+ self.assertIs("%.5s" % text, text)
+ self.assertIs("%.10s" % text, text)
+ self.assertIs("%1s" % text, text)
+ self.assertIs("%5s" % text, text)
+
+ self.assertIs("{0}".format(text), text)
+ self.assertIs("{0:s}".format(text), text)
+ self.assertIs("{0:.5s}".format(text), text)
+ self.assertIs("{0:.10s}".format(text), text)
+ self.assertIs("{0:1s}".format(text), text)
+ self.assertIs("{0:5s}".format(text), text)
def test_main():
diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py
index 1fad921..3336532 100644
--- a/Lib/test/test_fractions.py
+++ b/Lib/test/test_fractions.py
@@ -146,9 +146,10 @@ class FractionTest(unittest.TestCase):
self.assertEqual((0, 1), _components(F(-0.0)))
self.assertEqual((3602879701896397, 36028797018963968),
_components(F(0.1)))
- self.assertRaises(TypeError, F, float('nan'))
- self.assertRaises(TypeError, F, float('inf'))
- self.assertRaises(TypeError, F, float('-inf'))
+ # bug 16469: error types should be consistent with float -> int
+ self.assertRaises(ValueError, F, float('nan'))
+ self.assertRaises(OverflowError, F, float('inf'))
+ self.assertRaises(OverflowError, F, float('-inf'))
def testInitFromDecimal(self):
self.assertEqual((11, 10),
@@ -157,10 +158,11 @@ class FractionTest(unittest.TestCase):
_components(F(Decimal('3.5e-2'))))
self.assertEqual((0, 1),
_components(F(Decimal('.000e20'))))
- self.assertRaises(TypeError, F, Decimal('nan'))
- self.assertRaises(TypeError, F, Decimal('snan'))
- self.assertRaises(TypeError, F, Decimal('inf'))
- self.assertRaises(TypeError, F, Decimal('-inf'))
+ # bug 16469: error types should be consistent with decimal -> int
+ self.assertRaises(ValueError, F, Decimal('nan'))
+ self.assertRaises(ValueError, F, Decimal('snan'))
+ self.assertRaises(OverflowError, F, Decimal('inf'))
+ self.assertRaises(OverflowError, F, Decimal('-inf'))
def testFromString(self):
self.assertEqual((5, 1), _components(F("5")))
@@ -248,14 +250,15 @@ class FractionTest(unittest.TestCase):
inf = 1e1000
nan = inf - inf
+ # bug 16469: error types should be consistent with float -> int
self.assertRaisesMessage(
- TypeError, "Cannot convert inf to Fraction.",
+ OverflowError, "Cannot convert inf to Fraction.",
F.from_float, inf)
self.assertRaisesMessage(
- TypeError, "Cannot convert -inf to Fraction.",
+ OverflowError, "Cannot convert -inf to Fraction.",
F.from_float, -inf)
self.assertRaisesMessage(
- TypeError, "Cannot convert nan to Fraction.",
+ ValueError, "Cannot convert nan to Fraction.",
F.from_float, nan)
def testFromDecimal(self):
@@ -268,17 +271,18 @@ class FractionTest(unittest.TestCase):
self.assertEqual(1 - F(1, 10**30),
F.from_decimal(Decimal("0." + "9" * 30)))
+ # bug 16469: error types should be consistent with decimal -> int
self.assertRaisesMessage(
- TypeError, "Cannot convert Infinity to Fraction.",
+ OverflowError, "Cannot convert Infinity to Fraction.",
F.from_decimal, Decimal("inf"))
self.assertRaisesMessage(
- TypeError, "Cannot convert -Infinity to Fraction.",
+ OverflowError, "Cannot convert -Infinity to Fraction.",
F.from_decimal, Decimal("-inf"))
self.assertRaisesMessage(
- TypeError, "Cannot convert NaN to Fraction.",
+ ValueError, "Cannot convert NaN to Fraction.",
F.from_decimal, Decimal("nan"))
self.assertRaisesMessage(
- TypeError, "Cannot convert sNaN to Fraction.",
+ ValueError, "Cannot convert sNaN to Fraction.",
F.from_decimal, Decimal("snan"))
def testLimitDenominator(self):
diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py
index 824b7c1..861d670 100644
--- a/Lib/test/test_ftplib.py
+++ b/Lib/test/test_ftplib.py
@@ -321,7 +321,7 @@ if ssl is not None:
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
- except socket.error as err:
+ except OSError as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
@@ -335,7 +335,7 @@ if ssl is not None:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
- except socket.error as err:
+ except OSError as err:
# Any "socket error" corresponds to a SSL_ERROR_SYSCALL return
# from OpenSSL's SSL_shutdown(), corresponding to a
# closed socket condition. See also:
@@ -676,7 +676,7 @@ class TestFTPClass(TestCase):
return False
try:
self.client.sendcmd('noop')
- except (socket.error, EOFError):
+ except (OSError, EOFError):
return False
return True
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index c4910a7..ee1f5db 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -1,4 +1,3 @@
-import functools
import collections
import sys
import unittest
@@ -7,17 +6,31 @@ from weakref import proxy
import pickle
from random import choice
-@staticmethod
-def PythonPartial(func, *args, **keywords):
- 'Pure Python approximation of partial()'
- def newfunc(*fargs, **fkeywords):
- newkeywords = keywords.copy()
- newkeywords.update(fkeywords)
- return func(*(args + fargs), **newkeywords)
- newfunc.func = func
- newfunc.args = args
- newfunc.keywords = keywords
- return newfunc
+import functools
+
+original_functools = functools
+py_functools = support.import_fresh_module('functools', blocked=['_functools'])
+c_functools = support.import_fresh_module('functools', fresh=['_functools'])
+
+class BaseTest(unittest.TestCase):
+
+ """Base class required for testing C and Py implementations."""
+
+ def setUp(self):
+
+ # The module must be explicitly set so that the proper
+ # interaction between the c module and the python module
+ # can be controlled.
+ self.partial = self.module.partial
+ super(BaseTest, self).setUp()
+
+class BaseTestC(BaseTest):
+ module = c_functools
+
+class BaseTestPy(BaseTest):
+ module = py_functools
+
+PythonPartial = py_functools.partial
def capture(*args, **kw):
"""capture all positional and keyword arguments"""
@@ -27,31 +40,32 @@ def signature(part):
""" return the signature of a partial object """
return (part.func, part.args, part.keywords, part.__dict__)
-class TestPartial(unittest.TestCase):
+class TestPartial(object):
- thetype = functools.partial
+ partial = functools.partial
def test_basic_examples(self):
- p = self.thetype(capture, 1, 2, a=10, b=20)
+ p = self.partial(capture, 1, 2, a=10, b=20)
+ self.assertTrue(callable(p))
self.assertEqual(p(3, 4, b=30, c=40),
((1, 2, 3, 4), dict(a=10, b=30, c=40)))
- p = self.thetype(map, lambda x: x*10)
+ p = self.partial(map, lambda x: x*10)
self.assertEqual(list(p([1,2,3,4])), [10, 20, 30, 40])
def test_attributes(self):
- p = self.thetype(capture, 1, 2, a=10, b=20)
+ p = self.partial(capture, 1, 2, a=10, b=20)
# attributes should be readable
self.assertEqual(p.func, capture)
self.assertEqual(p.args, (1, 2))
self.assertEqual(p.keywords, dict(a=10, b=20))
# attributes should not be writable
- if not isinstance(self.thetype, type):
+ if not isinstance(self.partial, type):
return
self.assertRaises(AttributeError, setattr, p, 'func', map)
self.assertRaises(AttributeError, setattr, p, 'args', (1, 2))
self.assertRaises(AttributeError, setattr, p, 'keywords', dict(a=1, b=2))
- p = self.thetype(hex)
+ p = self.partial(hex)
try:
del p.__dict__
except TypeError:
@@ -60,9 +74,9 @@ class TestPartial(unittest.TestCase):
self.fail('partial object allowed __dict__ to be deleted')
def test_argument_checking(self):
- self.assertRaises(TypeError, self.thetype) # need at least a func arg
+ self.assertRaises(TypeError, self.partial) # need at least a func arg
try:
- self.thetype(2)()
+ self.partial(2)()
except TypeError:
pass
else:
@@ -73,7 +87,7 @@ class TestPartial(unittest.TestCase):
def func(a=10, b=20):
return a
d = {'a':3}
- p = self.thetype(func, a=5)
+ p = self.partial(func, a=5)
self.assertEqual(p(**d), 3)
self.assertEqual(d, {'a':3})
p(b=7)
@@ -82,20 +96,20 @@ class TestPartial(unittest.TestCase):
def test_arg_combinations(self):
# exercise special code paths for zero args in either partial
# object or the caller
- p = self.thetype(capture)
+ p = self.partial(capture)
self.assertEqual(p(), ((), {}))
self.assertEqual(p(1,2), ((1,2), {}))
- p = self.thetype(capture, 1, 2)
+ p = self.partial(capture, 1, 2)
self.assertEqual(p(), ((1,2), {}))
self.assertEqual(p(3,4), ((1,2,3,4), {}))
def test_kw_combinations(self):
# exercise special code paths for no keyword args in
# either the partial object or the caller
- p = self.thetype(capture)
+ p = self.partial(capture)
self.assertEqual(p(), ((), {}))
self.assertEqual(p(a=1), ((), {'a':1}))
- p = self.thetype(capture, a=1)
+ p = self.partial(capture, a=1)
self.assertEqual(p(), ((), {'a':1}))
self.assertEqual(p(b=2), ((), {'a':1, 'b':2}))
# keyword args in the call override those in the partial object
@@ -104,7 +118,7 @@ class TestPartial(unittest.TestCase):
def test_positional(self):
# make sure positional arguments are captured correctly
for args in [(), (0,), (0,1), (0,1,2), (0,1,2,3)]:
- p = self.thetype(capture, *args)
+ p = self.partial(capture, *args)
expected = args + ('x',)
got, empty = p('x')
self.assertTrue(expected == got and empty == {})
@@ -112,14 +126,14 @@ class TestPartial(unittest.TestCase):
def test_keyword(self):
# make sure keyword arguments are captured correctly
for a in ['a', 0, None, 3.5]:
- p = self.thetype(capture, a=a)
+ p = self.partial(capture, a=a)
expected = {'a':a,'x':None}
empty, got = p(x=None)
self.assertTrue(expected == got and empty == ())
def test_no_side_effects(self):
# make sure there are no side effects that affect subsequent calls
- p = self.thetype(capture, 0, a=1)
+ p = self.partial(capture, 0, a=1)
args1, kw1 = p(1, b=2)
self.assertTrue(args1 == (0,1) and kw1 == {'a':1,'b':2})
args2, kw2 = p()
@@ -128,13 +142,13 @@ class TestPartial(unittest.TestCase):
def test_error_propagation(self):
def f(x, y):
x / y
- self.assertRaises(ZeroDivisionError, self.thetype(f, 1, 0))
- self.assertRaises(ZeroDivisionError, self.thetype(f, 1), 0)
- self.assertRaises(ZeroDivisionError, self.thetype(f), 1, 0)
- self.assertRaises(ZeroDivisionError, self.thetype(f, y=0), 1)
+ self.assertRaises(ZeroDivisionError, self.partial(f, 1, 0))
+ self.assertRaises(ZeroDivisionError, self.partial(f, 1), 0)
+ self.assertRaises(ZeroDivisionError, self.partial(f), 1, 0)
+ self.assertRaises(ZeroDivisionError, self.partial(f, y=0), 1)
def test_weakref(self):
- f = self.thetype(int, base=16)
+ f = self.partial(int, base=16)
p = proxy(f)
self.assertEqual(f.func, p.func)
f = None
@@ -142,9 +156,9 @@ class TestPartial(unittest.TestCase):
def test_with_bound_and_unbound_methods(self):
data = list(map(str, range(10)))
- join = self.thetype(str.join, '')
+ join = self.partial(str.join, '')
self.assertEqual(join(data), '0123456789')
- join = self.thetype(''.join)
+ join = self.partial(''.join)
self.assertEqual(join(data), '0123456789')
def test_repr(self):
@@ -152,49 +166,57 @@ class TestPartial(unittest.TestCase):
args_repr = ', '.join(repr(a) for a in args)
kwargs = {'a': object(), 'b': object()}
kwargs_repr = ', '.join("%s=%r" % (k, v) for k, v in kwargs.items())
- if self.thetype is functools.partial:
+ if self.partial is functools.partial:
name = 'functools.partial'
else:
- name = self.thetype.__name__
+ name = self.partial.__name__
- f = self.thetype(capture)
+ f = self.partial(capture)
self.assertEqual('{}({!r})'.format(name, capture),
repr(f))
- f = self.thetype(capture, *args)
+ f = self.partial(capture, *args)
self.assertEqual('{}({!r}, {})'.format(name, capture, args_repr),
repr(f))
- f = self.thetype(capture, **kwargs)
+ f = self.partial(capture, **kwargs)
self.assertEqual('{}({!r}, {})'.format(name, capture, kwargs_repr),
repr(f))
- f = self.thetype(capture, *args, **kwargs)
+ f = self.partial(capture, *args, **kwargs)
self.assertEqual('{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr),
repr(f))
def test_pickle(self):
- f = self.thetype(signature, 'asdf', bar=True)
+ f = self.partial(signature, 'asdf', bar=True)
f.add_something_to__dict__ = True
f_copy = pickle.loads(pickle.dumps(f))
self.assertEqual(signature(f), signature(f_copy))
-class PartialSubclass(functools.partial):
+class TestPartialC(BaseTestC, TestPartial):
pass
-class TestPartialSubclass(TestPartial):
+class TestPartialPy(BaseTestPy, TestPartial):
- thetype = PartialSubclass
+ def test_pickle(self):
+ raise unittest.SkipTest("Python implementation of partial isn't picklable")
+
+ def test_repr(self):
+ raise unittest.SkipTest("Python implementation of partial uses own repr")
+
+class TestPartialCSubclass(BaseTestC, TestPartial):
+
+ class PartialSubclass(c_functools.partial):
+ pass
-class TestPythonPartial(TestPartial):
+ partial = staticmethod(PartialSubclass)
- thetype = PythonPartial
+class TestPartialPySubclass(TestPartialPy):
- # the python version hasn't a nice repr
- def test_repr(self): pass
+ class PartialSubclass(c_functools.partial):
+ pass
- # the python version isn't picklable
- def test_pickle(self): pass
+ partial = staticmethod(PartialSubclass)
class TestUpdateWrapper(unittest.TestCase):
@@ -320,7 +342,7 @@ class TestWraps(TestUpdateWrapper):
self.assertEqual(wrapper.__qualname__, f.__qualname__)
self.assertEqual(wrapper.attr, 'This is also a test')
- @unittest.skipIf(not sys.flags.optimize <= 1,
+ @unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_default_update_doc(self):
wrapper, _ = self._default_update()
@@ -441,24 +463,28 @@ class TestReduce(unittest.TestCase):
d = {"one": 1, "two": 2, "three": 3}
self.assertEqual(self.func(add, d), "".join(d.keys()))
-class TestCmpToKey(unittest.TestCase):
+class TestCmpToKey(object):
def test_cmp_to_key(self):
def cmp1(x, y):
return (x > y) - (x < y)
- key = functools.cmp_to_key(cmp1)
+ key = self.cmp_to_key(cmp1)
self.assertEqual(key(3), key(3))
self.assertGreater(key(3), key(1))
+ self.assertGreaterEqual(key(3), key(3))
+
def cmp2(x, y):
return int(x) - int(y)
- key = functools.cmp_to_key(cmp2)
+ key = self.cmp_to_key(cmp2)
self.assertEqual(key(4.0), key('4'))
self.assertLess(key(2), key('35'))
+ self.assertLessEqual(key(2), key('35'))
+ self.assertNotEqual(key(2), key('35'))
def test_cmp_to_key_arguments(self):
def cmp1(x, y):
return (x > y) - (x < y)
- key = functools.cmp_to_key(mycmp=cmp1)
+ key = self.cmp_to_key(mycmp=cmp1)
self.assertEqual(key(obj=3), key(obj=3))
self.assertGreater(key(obj=3), key(obj=1))
with self.assertRaises((TypeError, AttributeError)):
@@ -466,10 +492,10 @@ class TestCmpToKey(unittest.TestCase):
with self.assertRaises((TypeError, AttributeError)):
1 < key(3) # lhs is not a K object
with self.assertRaises(TypeError):
- key = functools.cmp_to_key() # too few args
+ key = self.cmp_to_key() # too few args
with self.assertRaises(TypeError):
- key = functools.cmp_to_key(cmp1, None) # too many args
- key = functools.cmp_to_key(cmp1)
+ key = self.module.cmp_to_key(cmp1, None) # too many args
+ key = self.cmp_to_key(cmp1)
with self.assertRaises(TypeError):
key() # too few args
with self.assertRaises(TypeError):
@@ -478,7 +504,7 @@ class TestCmpToKey(unittest.TestCase):
def test_bad_cmp(self):
def cmp1(x, y):
raise ZeroDivisionError
- key = functools.cmp_to_key(cmp1)
+ key = self.cmp_to_key(cmp1)
with self.assertRaises(ZeroDivisionError):
key(3) > key(1)
@@ -493,13 +519,13 @@ class TestCmpToKey(unittest.TestCase):
def test_obj_field(self):
def cmp1(x, y):
return (x > y) - (x < y)
- key = functools.cmp_to_key(mycmp=cmp1)
+ key = self.cmp_to_key(mycmp=cmp1)
self.assertEqual(key(50).obj, 50)
def test_sort_int(self):
def mycmp(x, y):
return y - x
- self.assertEqual(sorted(range(5), key=functools.cmp_to_key(mycmp)),
+ self.assertEqual(sorted(range(5), key=self.cmp_to_key(mycmp)),
[4, 3, 2, 1, 0])
def test_sort_int_str(self):
@@ -507,18 +533,24 @@ class TestCmpToKey(unittest.TestCase):
x, y = int(x), int(y)
return (x > y) - (x < y)
values = [5, '3', 7, 2, '0', '1', 4, '10', 1]
- values = sorted(values, key=functools.cmp_to_key(mycmp))
+ values = sorted(values, key=self.cmp_to_key(mycmp))
self.assertEqual([int(value) for value in values],
[0, 1, 1, 2, 3, 4, 5, 7, 10])
def test_hash(self):
def mycmp(x, y):
return y - x
- key = functools.cmp_to_key(mycmp)
+ key = self.cmp_to_key(mycmp)
k = key(10)
self.assertRaises(TypeError, hash, k)
self.assertNotIsInstance(k, collections.Hashable)
+class TestCmpToKeyC(BaseTestC, TestCmpToKey):
+ cmp_to_key = c_functools.cmp_to_key
+
+class TestCmpToKeyPy(BaseTestPy, TestCmpToKey):
+ cmp_to_key = staticmethod(py_functools.cmp_to_key)
+
class TestTotalOrdering(unittest.TestCase):
def test_total_ordering_lt(self):
@@ -623,7 +655,7 @@ class TestLRU(unittest.TestCase):
def test_lru(self):
def orig(x, y):
- return 3*x+y
+ return 3 * x + y
f = functools.lru_cache(maxsize=20)(orig)
hits, misses, maxsize, currsize = f.cache_info()
self.assertEqual(maxsize, 20)
@@ -728,7 +760,7 @@ class TestLRU(unittest.TestCase):
# Verify that user_function exceptions get passed through without
# creating a hard-to-read chained exception.
# http://bugs.python.org/issue13177
- for maxsize in (None, 100):
+ for maxsize in (None, 128):
@functools.lru_cache(maxsize)
def func(i):
return 'abc'[i]
@@ -741,7 +773,7 @@ class TestLRU(unittest.TestCase):
func(15)
def test_lru_with_types(self):
- for maxsize in (None, 100):
+ for maxsize in (None, 128):
@functools.lru_cache(maxsize=maxsize, typed=True)
def square(x):
return x * x
@@ -756,14 +788,46 @@ class TestLRU(unittest.TestCase):
self.assertEqual(square.cache_info().hits, 4)
self.assertEqual(square.cache_info().misses, 4)
+ def test_lru_with_keyword_args(self):
+ @functools.lru_cache()
+ def fib(n):
+ if n < 2:
+ return n
+ return fib(n=n-1) + fib(n=n-2)
+ self.assertEqual(
+ [fib(n=number) for number in range(16)],
+ [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]
+ )
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=28, misses=16, maxsize=128, currsize=16))
+ fib.cache_clear()
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=0, misses=0, maxsize=128, currsize=0))
+
+ def test_lru_with_keyword_args_maxsize_none(self):
+ @functools.lru_cache(maxsize=None)
+ def fib(n):
+ if n < 2:
+ return n
+ return fib(n=n-1) + fib(n=n-2)
+ self.assertEqual([fib(n=number) for number in range(16)],
+ [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610])
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16))
+ fib.cache_clear()
+ self.assertEqual(fib.cache_info(),
+ functools._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0))
+
def test_main(verbose=None):
test_classes = (
- TestPartial,
- TestPartialSubclass,
- TestPythonPartial,
+ TestPartialC,
+ TestPartialPy,
+ TestPartialCSubclass,
+ TestPartialPySubclass,
TestUpdateWrapper,
TestTotalOrdering,
- TestCmpToKey,
+ TestCmpToKeyC,
+ TestCmpToKeyPy,
TestWraps,
TestReduce,
TestLRU,
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index c59b72e..85dbc97 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -610,6 +610,32 @@ class GCTests(unittest.TestCase):
stderr = run_command(code % "gc.DEBUG_SAVEALL")
self.assertNotIn(b"uncollectable objects at shutdown", stderr)
+ def test_get_stats(self):
+ stats = gc.get_stats()
+ self.assertEqual(len(stats), 3)
+ for st in stats:
+ self.assertIsInstance(st, dict)
+ self.assertEqual(set(st),
+ {"collected", "collections", "uncollectable"})
+ self.assertGreaterEqual(st["collected"], 0)
+ self.assertGreaterEqual(st["collections"], 0)
+ self.assertGreaterEqual(st["uncollectable"], 0)
+ # Check that collection counts are incremented correctly
+ if gc.isenabled():
+ self.addCleanup(gc.enable)
+ gc.disable()
+ old = gc.get_stats()
+ gc.collect(0)
+ new = gc.get_stats()
+ self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
+ self.assertEqual(new[1]["collections"], old[1]["collections"])
+ self.assertEqual(new[2]["collections"], old[2]["collections"])
+ gc.collect(2)
+ new = gc.get_stats()
+ self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
+ self.assertEqual(new[1]["collections"], old[1]["collections"])
+ self.assertEqual(new[2]["collections"], old[2]["collections"] + 1)
+
class GCCallbackTests(unittest.TestCase):
def setUp(self):
diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py
index 06f67c2..f5c1a7d 100644
--- a/Lib/test/test_generators.py
+++ b/Lib/test/test_generators.py
@@ -1728,9 +1728,7 @@ Our ill-behaved code should be invoked during GC:
>>> g = f()
>>> next(g)
>>> del g
->>> sys.stderr.getvalue().startswith(
-... "Exception RuntimeError: 'generator ignored GeneratorExit' in "
-... )
+>>> "RuntimeError: generator ignored GeneratorExit" in sys.stderr.getvalue()
True
>>> sys.stderr = old
@@ -1840,22 +1838,23 @@ to test.
... sys.stderr = io.StringIO()
... class Leaker:
... def __del__(self):
-... raise RuntimeError
+... def invoke(message):
+... raise RuntimeError(message)
+... invoke("test")
...
... l = Leaker()
... del l
... err = sys.stderr.getvalue().strip()
-... err.startswith(
-... "Exception RuntimeError: RuntimeError() in <"
-... )
-... err.endswith("> ignored")
-... len(err.splitlines())
+... "Exception ignored in" in err
+... "RuntimeError: test" in err
+... "Traceback" in err
+... "in invoke" in err
... finally:
... sys.stderr = old
True
True
-1
-
+True
+True
These refleak tests should perhaps be in a testfile of their own,
diff --git a/Lib/test/test_genericpath.py b/Lib/test/test_genericpath.py
index 3eadd58..084dbb2 100644
--- a/Lib/test/test_genericpath.py
+++ b/Lib/test/test_genericpath.py
@@ -308,19 +308,19 @@ class CommonTest(GenericTest):
for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'):
self.assertIsInstance(abspath(path), str)
- @unittest.skipIf(sys.platform == 'darwin',
- "Mac OS X denies the creation of a directory with an invalid utf8 name")
def test_nonascii_abspath(self):
- name = b'\xe7w\xf0'
- if sys.platform == 'win32':
- try:
- os.fsdecode(name)
- except UnicodeDecodeError:
- self.skipTest("the filename %a is not decodable "
- "from the ANSI code page %s"
- % (name, sys.getfilesystemencoding()))
-
- # Test non-ASCII, non-UTF8 bytes in the path.
+ if (support.TESTFN_UNDECODABLE
+ # Mac OS X denies the creation of a directory with an invalid
+ # UTF-8 name. Windows allows to create a directory with an
+ # arbitrary bytes name, but fails to enter this directory
+ # (when the bytes name is used).
+ and sys.platform not in ('win32', 'darwin')):
+ name = support.TESTFN_UNDECODABLE
+ elif support.TESTFN_NONASCII:
+ name = support.TESTFN_NONASCII
+ else:
+ self.skipTest("need support.TESTFN_NONASCII")
+
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with support.temp_cwd(name):
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index 32f85e9..54201a1 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -36,7 +36,10 @@ def hexstr(s):
class HashLibTestCase(unittest.TestCase):
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
'sha224', 'SHA224', 'sha256', 'SHA256',
- 'sha384', 'SHA384', 'sha512', 'SHA512' )
+ 'sha384', 'SHA384', 'sha512', 'SHA512',
+ 'sha3_224', 'sha3_256', 'sha3_384',
+ 'sha3_512', 'SHA3_224', 'SHA3_256',
+ 'SHA3_384', 'SHA3_512' )
# Issue #14693: fallback modules are always compiled under POSIX
_warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG
@@ -93,6 +96,12 @@ class HashLibTestCase(unittest.TestCase):
if _sha512:
self.constructors_to_test['sha384'].add(_sha512.sha384)
self.constructors_to_test['sha512'].add(_sha512.sha512)
+ _sha3 = self._conditional_import_module('_sha3')
+ if _sha3:
+ self.constructors_to_test['sha3_224'].add(_sha3.sha3_224)
+ self.constructors_to_test['sha3_256'].add(_sha3.sha3_256)
+ self.constructors_to_test['sha3_384'].add(_sha3.sha3_384)
+ self.constructors_to_test['sha3_512'].add(_sha3.sha3_512)
super(HashLibTestCase, self).__init__(*args, **kwargs)
@@ -158,6 +167,7 @@ class HashLibTestCase(unittest.TestCase):
self.assertEqual(m1.digest(), m2.digest())
def check(self, name, data, digest):
+ digest = digest.lower()
constructors = self.constructors_to_test[name]
# 2 is for hashlib.name(...) and hashlib.new(name, ...)
self.assertGreaterEqual(len(constructors), 2)
@@ -183,6 +193,10 @@ class HashLibTestCase(unittest.TestCase):
self.check_no_unicode('sha256')
self.check_no_unicode('sha384')
self.check_no_unicode('sha512')
+ self.check_no_unicode('sha3_224')
+ self.check_no_unicode('sha3_256')
+ self.check_no_unicode('sha3_384')
+ self.check_no_unicode('sha3_512')
def test_case_md5_0(self):
self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e')
@@ -318,11 +332,122 @@ class HashLibTestCase(unittest.TestCase):
"e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
"de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
+ # SHA-3 family
+ def test_case_sha3_224_0(self):
+ self.check('sha3_224', b"",
+ "F71837502BA8E10837BDD8D365ADB85591895602FC552B48B7390ABD")
+
+ def test_case_sha3_224_1(self):
+ self.check('sha3_224', bytes.fromhex("CC"),
+ "A9CAB59EB40A10B246290F2D6086E32E3689FAF1D26B470C899F2802")
+
+ def test_case_sha3_224_2(self):
+ self.check('sha3_224', bytes.fromhex("41FB"),
+ "615BA367AFDC35AAC397BC7EB5D58D106A734B24986D5D978FEFD62C")
+
+ def test_case_sha3_224_3(self):
+ self.check('sha3_224', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "62B10F1B6236EBC2DA72957742A8D4E48E213B5F8934604BFD4D2C3A")
+
+ @bigmemtest(size=_4G + 5, memuse=1)
+ def test_case_sha3_224_huge(self, size):
+ if size == _4G + 5:
+ try:
+ self.check('sha3_224', b'A'*size,
+ '58ef60057c9dddb6a87477e9ace5a26f0d9db01881cf9b10a9f8c224')
+ except OverflowError:
+ pass # 32-bit arch
+
+
+ def test_case_sha3_256_0(self):
+ self.check('sha3_256', b"",
+ "C5D2460186F7233C927E7DB2DCC703C0E500B653CA82273B7BFAD8045D85A470")
+
+ def test_case_sha3_256_1(self):
+ self.check('sha3_256', bytes.fromhex("CC"),
+ "EEAD6DBFC7340A56CAEDC044696A168870549A6A7F6F56961E84A54BD9970B8A")
+
+ def test_case_sha3_256_2(self):
+ self.check('sha3_256', bytes.fromhex("41FB"),
+ "A8EACEDA4D47B3281A795AD9E1EA2122B407BAF9AABCB9E18B5717B7873537D2")
+
+ def test_case_sha3_256_3(self):
+ self.check('sha3_256', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "CE87A5173BFFD92399221658F801D45C294D9006EE9F3F9D419C8D427748DC41")
+
+
+ def test_case_sha3_384_0(self):
+ self.check('sha3_384', b"",
+ "2C23146A63A29ACF99E73B88F8C24EAA7DC60AA771780CCC006AFBFA8FE2479B"+
+ "2DD2B21362337441AC12B515911957FF")
+
+ def test_case_sha3_384_1(self):
+ self.check('sha3_384', bytes.fromhex("CC"),
+ "1B84E62A46E5A201861754AF5DC95C4A1A69CAF4A796AE405680161E29572641"+
+ "F5FA1E8641D7958336EE7B11C58F73E9")
+
+ def test_case_sha3_384_2(self):
+ self.check('sha3_384', bytes.fromhex("41FB"),
+ "495CCE2714CD72C8C53C3363D22C58B55960FE26BE0BF3BBC7A3316DD563AD1D"+
+ "B8410E75EEFEA655E39D4670EC0B1792")
+
+ def test_case_sha3_384_3(self):
+ self.check('sha3_384', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "135114508DD63E279E709C26F7817C0482766CDE49132E3EDF2EEDD8996F4E35"+
+ "96D184100B384868249F1D8B8FDAA2C9")
+
+
+ def test_case_sha3_512_0(self):
+ self.check('sha3_512', b"",
+ "0EAB42DE4C3CEB9235FC91ACFFE746B29C29A8C366B7C60E4E67C466F36A4304"+
+ "C00FA9CAF9D87976BA469BCBE06713B435F091EF2769FB160CDAB33D3670680E")
+
+ def test_case_sha3_512_1(self):
+ self.check('sha3_512', bytes.fromhex("CC"),
+ "8630C13CBD066EA74BBE7FE468FEC1DEE10EDC1254FB4C1B7C5FD69B646E4416"+
+ "0B8CE01D05A0908CA790DFB080F4B513BC3B6225ECE7A810371441A5AC666EB9")
+
+ def test_case_sha3_512_2(self):
+ self.check('sha3_512', bytes.fromhex("41FB"),
+ "551DA6236F8B96FCE9F97F1190E901324F0B45E06DBBB5CDB8355D6ED1DC34B3"+
+ "F0EAE7DCB68622FF232FA3CECE0D4616CDEB3931F93803662A28DF1CD535B731")
+
+ def test_case_sha3_512_3(self):
+ self.check('sha3_512', bytes.fromhex(
+ "433C5303131624C0021D868A30825475E8D0BD3052A022180398F4CA4423B9"+
+ "8214B6BEAAC21C8807A2C33F8C93BD42B092CC1B06CEDF3224D5ED1EC29784"+
+ "444F22E08A55AA58542B524B02CD3D5D5F6907AFE71C5D7462224A3F9D9E53"+
+ "E7E0846DCBB4CE"),
+ "527D28E341E6B14F4684ADB4B824C496C6482E51149565D3D17226828884306B"+
+ "51D6148A72622C2B75F5D3510B799D8BDC03EAEDE453676A6EC8FE03A1AD0EAB")
+
+
def test_gil(self):
# Check things work fine with an input larger than the size required
# for multithreaded operation (which is hardwired to 2048).
gil_minsize = 2048
+ for name in self.supported_hash_names:
+ m = hashlib.new(name)
+ m.update(b'1')
+ m.update(b'#' * gil_minsize)
+ m.update(b'1')
+
+ m = hashlib.new(name, b'x' * gil_minsize)
+ m.update(b'1')
+
m = hashlib.md5()
m.update(b'1')
m.update(b'#' * gil_minsize)
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index 5ebcfcb..9c53e9c 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -45,7 +45,7 @@ class EPipeSocket(FakeSocket):
def sendall(self, data):
if self.pipe_trigger in data:
- raise socket.error(errno.EPIPE, "gotcha")
+ raise OSError(errno.EPIPE, "gotcha")
self.data += data
def close(self):
@@ -515,7 +515,7 @@ class BasicTest(TestCase):
b"Content-Length")
conn = client.HTTPConnection("example.com")
conn.sock = sock
- self.assertRaises(socket.error,
+ self.assertRaises(OSError,
lambda: conn.request("PUT", "/url", "body"))
resp = conn.getresponse()
self.assertEqual(401, resp.status)
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
index 75133c9..c5db620 100644
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -92,6 +92,9 @@ class BaseHTTPServerTestCase(BaseTestCase):
def do_KEYERROR(self):
self.send_error(999)
+ def do_NOTFOUND(self):
+ self.send_error(404)
+
def do_CUSTOM(self):
self.send_response(999)
self.send_header('Content-Type', 'text/html')
@@ -211,6 +214,14 @@ class BaseHTTPServerTestCase(BaseTestCase):
self.assertEqual(res.getheader('X-Special'), 'Dängerous Mind')
self.assertEqual(res.read(), 'Ärger mit Unicode'.encode('utf-8'))
+ def test_error_content_length(self):
+ # Issue #16088: standard error responses should have a content-length
+ self.con.request('NOTFOUND', '/')
+ res = self.con.getresponse()
+ self.assertEqual(res.status, 404)
+ data = res.read()
+ self.assertEqual(int(res.getheader('Content-Length')), len(data))
+
class SimpleHTTPServerTestCase(BaseTestCase):
class request_handler(NoLogRequestHandler, SimpleHTTPRequestHandler):
diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py
index 65c9f25..52c4399 100644
--- a/Lib/test/test_imp.py
+++ b/Lib/test/test_imp.py
@@ -217,6 +217,20 @@ class ImportTests(unittest.TestCase):
mod = imp.load_module(example, *x)
self.assertEqual(mod.__name__, example)
+ def test_issue16421_multiple_modules_in_one_dll(self):
+ # Issue 16421: loading several modules from the same compiled file fails
+ m = '_testimportmultiple'
+ fileobj, pathname, description = imp.find_module(m)
+ fileobj.close()
+ mod0 = imp.load_dynamic(m, pathname)
+ mod1 = imp.load_dynamic('_testimportmultiple_foo', pathname)
+ mod2 = imp.load_dynamic('_testimportmultiple_bar', pathname)
+ self.assertEqual(mod0.__name__, m)
+ self.assertEqual(mod1.__name__, '_testimportmultiple_foo')
+ self.assertEqual(mod2.__name__, '_testimportmultiple_bar')
+ with self.assertRaises(ImportError):
+ imp.load_dynamic('nonexistent', pathname)
+
def test_load_dynamic_ImportError_path(self):
# Issue #1559549 added `name` and `path` attributes to ImportError
# in order to provide better detail. Issue #10854 implemented those
diff --git a/Lib/test/test_importlib/import_/test_fromlist.py b/Lib/test/test_importlib/import_/test_fromlist.py
index 27b2270..c16c337 100644
--- a/Lib/test/test_importlib/import_/test_fromlist.py
+++ b/Lib/test/test_importlib/import_/test_fromlist.py
@@ -80,7 +80,7 @@ class HandlingFromlist(unittest.TestCase):
with util.import_state(meta_path=[importer]):
with self.assertRaises(ImportError) as exc:
import_util.import_('pkg', fromlist=['mod'])
- self.assertEquals('i_do_not_exist', exc.exception.name)
+ self.assertEqual('i_do_not_exist', exc.exception.name)
def test_empty_string(self):
with util.mock_modules('pkg.__init__', 'pkg.mod') as importer:
diff --git a/Lib/test/test_importlib/source/test_abc_loader.py b/Lib/test/test_importlib/source/test_abc_loader.py
index 0d912b6..cce7b07 100644
--- a/Lib/test/test_importlib/source/test_abc_loader.py
+++ b/Lib/test/test_importlib/source/test_abc_loader.py
@@ -70,483 +70,9 @@ class SourceLoaderMock(SourceOnlyLoaderMock):
return path == self.bytecode_path
-class PyLoaderMock(abc.PyLoader):
-
- # Globals that should be defined for all modules.
- source = (b"_ = '::'.join([__name__, __file__, __package__, "
- b"repr(__loader__)])")
-
- def __init__(self, data):
- """Take a dict of 'module_name: path' pairings.
-
- Paths should have no file extension, allowing packages to be denoted by
- ending in '__init__'.
-
- """
- self.module_paths = data
- self.path_to_module = {val:key for key,val in data.items()}
-
- def get_data(self, path):
- if path not in self.path_to_module:
- raise IOError
- return self.source
-
- def is_package(self, name):
- filename = os.path.basename(self.get_filename(name))
- return os.path.splitext(filename)[0] == '__init__'
-
- def source_path(self, name):
- try:
- return self.module_paths[name]
- except KeyError:
- raise ImportError
-
- def get_filename(self, name):
- """Silence deprecation warning."""
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always")
- path = super().get_filename(name)
- assert len(w) == 1
- assert issubclass(w[0].category, DeprecationWarning)
- return path
-
- def module_repr(self):
- return '<module>'
-
-
-class PyLoaderCompatMock(PyLoaderMock):
-
- """Mock that matches what is suggested to have a loader that is compatible
- from Python 3.1 onwards."""
-
- def get_filename(self, fullname):
- try:
- return self.module_paths[fullname]
- except KeyError:
- raise ImportError
-
- def source_path(self, fullname):
- try:
- return self.get_filename(fullname)
- except ImportError:
- return None
-
-
-class PyPycLoaderMock(abc.PyPycLoader, PyLoaderMock):
-
- default_mtime = 1
-
- def __init__(self, source, bc={}):
- """Initialize mock.
-
- 'bc' is a dict keyed on a module's name. The value is dict with
- possible keys of 'path', 'mtime', 'magic', and 'bc'. Except for 'path',
- each of those keys control if any part of created bytecode is to
- deviate from default values.
-
- """
- super().__init__(source)
- self.module_bytecode = {}
- self.path_to_bytecode = {}
- self.bytecode_to_path = {}
- for name, data in bc.items():
- self.path_to_bytecode[data['path']] = name
- self.bytecode_to_path[name] = data['path']
- magic = data.get('magic', imp.get_magic())
- mtime = importlib._w_long(data.get('mtime', self.default_mtime))
- source_size = importlib._w_long(len(self.source) & 0xFFFFFFFF)
- if 'bc' in data:
- bc = data['bc']
- else:
- bc = self.compile_bc(name)
- self.module_bytecode[name] = magic + mtime + source_size + bc
-
- def compile_bc(self, name):
- source_path = self.module_paths.get(name, '<test>') or '<test>'
- code = compile(self.source, source_path, 'exec')
- return marshal.dumps(code)
-
- def source_mtime(self, name):
- if name in self.module_paths:
- return self.default_mtime
- elif name in self.module_bytecode:
- return None
- else:
- raise ImportError
-
- def bytecode_path(self, name):
- try:
- return self.bytecode_to_path[name]
- except KeyError:
- if name in self.module_paths:
- return None
- else:
- raise ImportError
-
- def write_bytecode(self, name, bytecode):
- self.module_bytecode[name] = bytecode
- return True
-
- def get_data(self, path):
- if path in self.path_to_module:
- return super().get_data(path)
- elif path in self.path_to_bytecode:
- name = self.path_to_bytecode[path]
- return self.module_bytecode[name]
- else:
- raise IOError
-
- def is_package(self, name):
- try:
- return super().is_package(name)
- except TypeError:
- return '__init__' in self.bytecode_to_path[name]
-
- def get_code(self, name):
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter("always")
- code_object = super().get_code(name)
- assert len(w) == 1
- assert issubclass(w[0].category, DeprecationWarning)
- return code_object
-
-class PyLoaderTests(testing_abc.LoaderTests):
-
- """Tests for importlib.abc.PyLoader."""
-
- mocker = PyLoaderMock
-
- def eq_attrs(self, ob, **kwargs):
- for attr, val in kwargs.items():
- found = getattr(ob, attr)
- self.assertEqual(found, val,
- "{} attribute: {} != {}".format(attr, found, val))
-
- def test_module(self):
- name = '<module>'
- path = os.path.join('', 'path', 'to', 'module')
- mock = self.mocker({name: path})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertIn(name, sys.modules)
- self.eq_attrs(module, __name__=name, __file__=path, __package__='',
- __loader__=mock)
- self.assertTrue(not hasattr(module, '__path__'))
- return mock, name
-
- def test_package(self):
- name = '<pkg>'
- path = os.path.join('path', 'to', name, '__init__')
- mock = self.mocker({name: path})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertIn(name, sys.modules)
- self.eq_attrs(module, __name__=name, __file__=path,
- __path__=[os.path.dirname(path)], __package__=name,
- __loader__=mock)
- return mock, name
-
- def test_lacking_parent(self):
- name = 'pkg.mod'
- path = os.path.join('path', 'to', 'pkg', 'mod')
- mock = self.mocker({name: path})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertIn(name, sys.modules)
- self.eq_attrs(module, __name__=name, __file__=path, __package__='pkg',
- __loader__=mock)
- self.assertFalse(hasattr(module, '__path__'))
- return mock, name
-
- def test_module_reuse(self):
- name = 'mod'
- path = os.path.join('path', 'to', 'mod')
- module = imp.new_module(name)
- mock = self.mocker({name: path})
- with util.uncache(name):
- sys.modules[name] = module
- loaded_module = mock.load_module(name)
- self.assertIs(loaded_module, module)
- self.assertIs(sys.modules[name], module)
- return mock, name
-
- def test_state_after_failure(self):
- name = "mod"
- module = imp.new_module(name)
- module.blah = None
- mock = self.mocker({name: os.path.join('path', 'to', 'mod')})
- mock.source = b"1/0"
- with util.uncache(name):
- sys.modules[name] = module
- with self.assertRaises(ZeroDivisionError):
- mock.load_module(name)
- self.assertIs(sys.modules[name], module)
- self.assertTrue(hasattr(module, 'blah'))
- return mock
-
- def test_unloadable(self):
- name = "mod"
- mock = self.mocker({name: os.path.join('path', 'to', 'mod')})
- mock.source = b"1/0"
- with util.uncache(name):
- with self.assertRaises(ZeroDivisionError):
- mock.load_module(name)
- self.assertNotIn(name, sys.modules)
- return mock
-
-
-class PyLoaderCompatTests(PyLoaderTests):
-
- """Test that the suggested code to make a loader that is compatible from
- Python 3.1 forward works."""
-
- mocker = PyLoaderCompatMock
-
-
-class PyLoaderInterfaceTests(unittest.TestCase):
-
- """Tests for importlib.abc.PyLoader to make sure that when source_path()
- doesn't return a path everything works as expected."""
-
- def test_no_source_path(self):
- # No source path should lead to ImportError.
- name = 'mod'
- mock = PyLoaderMock({})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.load_module(name)
-
- def test_source_path_is_None(self):
- name = 'mod'
- mock = PyLoaderMock({name: None})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.load_module(name)
-
- def test_get_filename_with_source_path(self):
- # get_filename() should return what source_path() returns.
- name = 'mod'
- path = os.path.join('path', 'to', 'source')
- mock = PyLoaderMock({name: path})
- with util.uncache(name):
- self.assertEqual(mock.get_filename(name), path)
-
- def test_get_filename_no_source_path(self):
- # get_filename() should raise ImportError if source_path returns None.
- name = 'mod'
- mock = PyLoaderMock({name: None})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.get_filename(name)
-
-
-class PyPycLoaderTests(PyLoaderTests):
-
- """Tests for importlib.abc.PyPycLoader."""
-
- mocker = PyPycLoaderMock
-
- @source_util.writes_bytecode_files
- def verify_bytecode(self, mock, name):
- assert name in mock.module_paths
- self.assertIn(name, mock.module_bytecode)
- magic = mock.module_bytecode[name][:4]
- self.assertEqual(magic, imp.get_magic())
- mtime = importlib._r_long(mock.module_bytecode[name][4:8])
- self.assertEqual(mtime, 1)
- source_size = mock.module_bytecode[name][8:12]
- self.assertEqual(len(mock.source) & 0xFFFFFFFF,
- importlib._r_long(source_size))
- bc = mock.module_bytecode[name][12:]
- self.assertEqual(bc, mock.compile_bc(name))
-
- def test_module(self):
- mock, name = super().test_module()
- self.verify_bytecode(mock, name)
-
- def test_package(self):
- mock, name = super().test_package()
- self.verify_bytecode(mock, name)
-
- def test_lacking_parent(self):
- mock, name = super().test_lacking_parent()
- self.verify_bytecode(mock, name)
-
- def test_module_reuse(self):
- mock, name = super().test_module_reuse()
- self.verify_bytecode(mock, name)
-
- def test_state_after_failure(self):
- super().test_state_after_failure()
-
- def test_unloadable(self):
- super().test_unloadable()
-
-
-class PyPycLoaderInterfaceTests(unittest.TestCase):
-
- """Test for the interface of importlib.abc.PyPycLoader."""
-
- def get_filename_check(self, src_path, bc_path, expect):
- name = 'mod'
- mock = PyPycLoaderMock({name: src_path}, {name: {'path': bc_path}})
- with util.uncache(name):
- assert mock.source_path(name) == src_path
- assert mock.bytecode_path(name) == bc_path
- self.assertEqual(mock.get_filename(name), expect)
-
- def test_filename_with_source_bc(self):
- # When source and bytecode paths present, return the source path.
- self.get_filename_check('source_path', 'bc_path', 'source_path')
-
- def test_filename_with_source_no_bc(self):
- # With source but no bc, return source path.
- self.get_filename_check('source_path', None, 'source_path')
-
- def test_filename_with_no_source_bc(self):
- # With not source but bc, return the bc path.
- self.get_filename_check(None, 'bc_path', 'bc_path')
-
- def test_filename_with_no_source_or_bc(self):
- # With no source or bc, raise ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({name: None}, {name: {'path': None}})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.get_filename(name)
-
-
-class SkipWritingBytecodeTests(unittest.TestCase):
-
- """Test that bytecode is properly handled based on
- sys.dont_write_bytecode."""
-
- @source_util.writes_bytecode_files
- def run_test(self, dont_write_bytecode):
- name = 'mod'
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')})
- sys.dont_write_bytecode = dont_write_bytecode
- with util.uncache(name):
- mock.load_module(name)
- self.assertIsNot(name in mock.module_bytecode, dont_write_bytecode)
-
- def test_no_bytecode_written(self):
- self.run_test(True)
-
- def test_bytecode_written(self):
- self.run_test(False)
-
-
-class RegeneratedBytecodeTests(unittest.TestCase):
-
- """Test that bytecode is regenerated as expected."""
-
- @source_util.writes_bytecode_files
- def test_different_magic(self):
- # A different magic number should lead to new bytecode.
- name = 'mod'
- bad_magic = b'\x00\x00\x00\x00'
- assert bad_magic != imp.get_magic()
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')},
- {name: {'path': os.path.join('path', 'to',
- 'mod.bytecode'),
- 'magic': bad_magic}})
- with util.uncache(name):
- mock.load_module(name)
- self.assertIn(name, mock.module_bytecode)
- magic = mock.module_bytecode[name][:4]
- self.assertEqual(magic, imp.get_magic())
-
- @source_util.writes_bytecode_files
- def test_old_mtime(self):
- # Bytecode with an older mtime should be regenerated.
- name = 'mod'
- old_mtime = PyPycLoaderMock.default_mtime - 1
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')},
- {name: {'path': 'path/to/mod.bytecode', 'mtime': old_mtime}})
- with util.uncache(name):
- mock.load_module(name)
- self.assertIn(name, mock.module_bytecode)
- mtime = importlib._r_long(mock.module_bytecode[name][4:8])
- self.assertEqual(mtime, PyPycLoaderMock.default_mtime)
-
-
-class BadBytecodeFailureTests(unittest.TestCase):
-
- """Test import failures when there is no source and parts of the bytecode
- is bad."""
-
- def test_bad_magic(self):
- # A bad magic number should lead to an ImportError.
- name = 'mod'
- bad_magic = b'\x00\x00\x00\x00'
- bc = {name:
- {'path': os.path.join('path', 'to', 'mod'),
- 'magic': bad_magic}}
- mock = PyPycLoaderMock({name: None}, bc)
- with util.uncache(name), self.assertRaises(ImportError) as cm:
- mock.load_module(name)
- self.assertEqual(cm.exception.name, name)
-
- def test_no_bytecode(self):
- # Missing code object bytecode should lead to an EOFError.
- name = 'mod'
- bc = {name: {'path': os.path.join('path', 'to', 'mod'), 'bc': b''}}
- mock = PyPycLoaderMock({name: None}, bc)
- with util.uncache(name), self.assertRaises(EOFError):
- mock.load_module(name)
-
- def test_bad_bytecode(self):
- # Malformed code object bytecode should lead to a ValueError.
- name = 'mod'
- bc = {name: {'path': os.path.join('path', 'to', 'mod'), 'bc': b'1234'}}
- mock = PyPycLoaderMock({name: None}, bc)
- with util.uncache(name), self.assertRaises(ValueError):
- mock.load_module(name)
-
-
def raise_ImportError(*args, **kwargs):
raise ImportError
-class MissingPathsTests(unittest.TestCase):
-
- """Test what happens when a source or bytecode path does not exist (either
- from *_path returning None or raising ImportError)."""
-
- def test_source_path_None(self):
- # Bytecode should be used when source_path returns None, along with
- # __file__ being set to the bytecode path.
- name = 'mod'
- bytecode_path = 'path/to/mod'
- mock = PyPycLoaderMock({name: None}, {name: {'path': bytecode_path}})
- with util.uncache(name):
- module = mock.load_module(name)
- self.assertEqual(module.__file__, bytecode_path)
-
- # Testing for bytecode_path returning None handled by all tests where no
- # bytecode initially exists.
-
- def test_all_paths_None(self):
- # If all *_path methods return None, raise ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({name: None})
- with util.uncache(name), self.assertRaises(ImportError) as cm:
- mock.load_module(name)
- self.assertEqual(cm.exception.name, name)
-
- def test_source_path_ImportError(self):
- # An ImportError from source_path should trigger an ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({}, {name: {'path': os.path.join('path', 'to',
- 'mod')}})
- with util.uncache(name), self.assertRaises(ImportError):
- mock.load_module(name)
-
- def test_bytecode_path_ImportError(self):
- # An ImportError from bytecode_path should trigger an ImportError.
- name = 'mod'
- mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')})
- bad_meth = types.MethodType(raise_ImportError, mock)
- mock.bytecode_path = bad_meth
- with util.uncache(name), self.assertRaises(ImportError) as cm:
- mock.load_module(name)
-
class SourceLoaderTestHarness(unittest.TestCase):
@@ -622,6 +148,11 @@ class SourceOnlyLoaderTests(SourceLoaderTestHarness):
code_object = self.loader.get_code(self.name)
self.verify_code(code_object)
+ def test_source_to_code(self):
+ # Verify the compiled code object.
+ code = self.loader.source_to_code(self.loader.source, self.path)
+ self.verify_code(code)
+
def test_load_module(self):
# Loading a module should set __name__, __loader__, __package__,
# __path__ (for packages), __file__, and __cached__.
@@ -801,6 +332,7 @@ class AbstractMethodImplTests(unittest.TestCase):
class Loader(abc.Loader):
def load_module(self, fullname):
super().load_module(fullname)
+
def module_repr(self, module):
super().module_repr(module)
@@ -825,20 +357,6 @@ class AbstractMethodImplTests(unittest.TestCase):
class SourceLoader(ResourceLoader, ExecutionLoader, abc.SourceLoader):
pass
- class PyLoader(ResourceLoader, InspectLoader, abc.PyLoader):
- def source_path(self, _):
- super().source_path(_)
-
- class PyPycLoader(PyLoader, abc.PyPycLoader):
- def bytecode_path(self, _):
- super().bytecode_path(_)
-
- def source_mtime(self, _):
- super().source_mtime(_)
-
- def write_bytecode(self, _, _2):
- super().write_bytecode(_, _2)
-
def raises_NotImplementedError(self, ins, *args):
for method_name in args:
method = getattr(ins, method_name)
@@ -877,29 +395,15 @@ class AbstractMethodImplTests(unittest.TestCase):
# Required abstractmethods.
self.raises_NotImplementedError(ins, 'get_filename', 'get_data')
# Optional abstractmethods.
- self.raises_NotImplementedError(ins,'path_stats', 'set_data')
-
- def test_PyLoader(self):
- self.raises_NotImplementedError(self.PyLoader(), 'source_path',
- 'get_data', 'is_package')
-
- def test_PyPycLoader(self):
- self.raises_NotImplementedError(self.PyPycLoader(), 'source_path',
- 'source_mtime', 'bytecode_path',
- 'write_bytecode')
+ self.raises_NotImplementedError(ins, 'path_stats', 'set_data')
def test_main():
from test.support import run_unittest
- run_unittest(PyLoaderTests, PyLoaderCompatTests,
- PyLoaderInterfaceTests,
- PyPycLoaderTests, PyPycLoaderInterfaceTests,
- SkipWritingBytecodeTests, RegeneratedBytecodeTests,
- BadBytecodeFailureTests, MissingPathsTests,
- SourceOnlyLoaderTests,
- SourceLoaderBytecodeTests,
- SourceLoaderGetSourceTests,
- AbstractMethodImplTests)
+ run_unittest(SourceOnlyLoaderTests,
+ SourceLoaderBytecodeTests,
+ SourceLoaderGetSourceTests,
+ AbstractMethodImplTests)
if __name__ == '__main__':
diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py
index c620c37..a8d8c2e 100644
--- a/Lib/test/test_importlib/test_abc.py
+++ b/Lib/test/test_importlib/test_abc.py
@@ -43,11 +43,6 @@ class PathEntryFinder(InheritanceTests, unittest.TestCase):
subclasses = [machinery.FileFinder]
-class Loader(InheritanceTests, unittest.TestCase):
-
- subclasses = [abc.PyLoader]
-
-
class ResourceLoader(InheritanceTests, unittest.TestCase):
superclasses = [abc.Loader]
@@ -56,14 +51,13 @@ class ResourceLoader(InheritanceTests, unittest.TestCase):
class InspectLoader(InheritanceTests, unittest.TestCase):
superclasses = [abc.Loader]
- subclasses = [abc.PyLoader, machinery.BuiltinImporter,
+ subclasses = [machinery.BuiltinImporter,
machinery.FrozenImporter, machinery.ExtensionFileLoader]
class ExecutionLoader(InheritanceTests, unittest.TestCase):
superclasses = [abc.InspectLoader]
- subclasses = [abc.PyLoader]
class FileLoader(InheritanceTests, unittest.TestCase):
@@ -78,16 +72,6 @@ class SourceLoader(InheritanceTests, unittest.TestCase):
subclasses = [machinery.SourceFileLoader]
-class PyLoader(InheritanceTests, unittest.TestCase):
-
- superclasses = [abc.Loader, abc.ResourceLoader, abc.ExecutionLoader]
-
-
-class PyPycLoader(InheritanceTests, unittest.TestCase):
-
- superclasses = [abc.PyLoader]
-
-
def test_main():
from test.support import run_unittest
classes = []
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index 6931e0a..28d3e0f 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -832,6 +832,14 @@ class SizeofTest:
bufio = self.tp(rawio, buffer_size=bufsize2)
self.assertEqual(sys.getsizeof(bufio), size + bufsize2)
+ @support.cpython_only
+ def test_buffer_freeing(self) :
+ bufsize = 4096
+ rawio = self.MockRawIO()
+ bufio = self.tp(rawio, buffer_size=bufsize)
+ size = sys.getsizeof(bufio) - bufsize
+ bufio.close()
+ self.assertEqual(sys.getsizeof(bufio), size)
class BufferedReaderTest(unittest.TestCase, CommonBufferedTests):
read_mode = "rb"
@@ -2845,7 +2853,7 @@ class MiscIOTest(unittest.TestCase):
for fd in fds:
try:
os.close(fd)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EBADF:
raise
self.addCleanup(cleanup_fds)
diff --git a/Lib/test/test_iterlen.py b/Lib/test/test_iterlen.py
index 7469a31..57f7101 100644
--- a/Lib/test/test_iterlen.py
+++ b/Lib/test/test_iterlen.py
@@ -45,31 +45,21 @@ import unittest
from test import support
from itertools import repeat
from collections import deque
-from builtins import len as _len
+from operator import length_hint
n = 10
-def len(obj):
- try:
- return _len(obj)
- except TypeError:
- try:
- # note: this is an internal undocumented API,
- # don't rely on it in your own programs
- return obj.__length_hint__()
- except AttributeError:
- raise TypeError
class TestInvariantWithoutMutations(unittest.TestCase):
def test_invariant(self):
it = self.it
for i in reversed(range(1, n+1)):
- self.assertEqual(len(it), i)
+ self.assertEqual(length_hint(it), i)
next(it)
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
self.assertRaises(StopIteration, next, it)
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
class TestTemporarilyImmutable(TestInvariantWithoutMutations):
@@ -78,12 +68,12 @@ class TestTemporarilyImmutable(TestInvariantWithoutMutations):
# length immutability during iteration
it = self.it
- self.assertEqual(len(it), n)
+ self.assertEqual(length_hint(it), n)
next(it)
- self.assertEqual(len(it), n-1)
+ self.assertEqual(length_hint(it), n-1)
self.mutate()
self.assertRaises(RuntimeError, next, it)
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
## ------- Concrete Type Tests -------
@@ -92,10 +82,6 @@ class TestRepeat(TestInvariantWithoutMutations):
def setUp(self):
self.it = repeat(None, n)
- def test_no_len_for_infinite_repeat(self):
- # The repeat() object can also be infinite
- self.assertRaises(TypeError, len, repeat(None))
-
class TestXrange(TestInvariantWithoutMutations):
def setUp(self):
@@ -167,14 +153,15 @@ class TestList(TestInvariantWithoutMutations):
it = iter(d)
next(it)
next(it)
- self.assertEqual(len(it), n-2)
+ self.assertEqual(length_hint(it), n - 2)
d.append(n)
- self.assertEqual(len(it), n-1) # grow with append
+ self.assertEqual(length_hint(it), n - 1) # grow with append
d[1:] = []
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), [])
d.extend(range(20))
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
+
class TestListReversed(TestInvariantWithoutMutations):
@@ -186,32 +173,41 @@ class TestListReversed(TestInvariantWithoutMutations):
it = reversed(d)
next(it)
next(it)
- self.assertEqual(len(it), n-2)
+ self.assertEqual(length_hint(it), n - 2)
d.append(n)
- self.assertEqual(len(it), n-2) # ignore append
+ self.assertEqual(length_hint(it), n - 2) # ignore append
d[1:] = []
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), []) # confirm invariant
d.extend(range(20))
- self.assertEqual(len(it), 0)
+ self.assertEqual(length_hint(it), 0)
## -- Check to make sure exceptions are not suppressed by __length_hint__()
class BadLen(object):
- def __iter__(self): return iter(range(10))
+ def __iter__(self):
+ return iter(range(10))
+
def __len__(self):
raise RuntimeError('hello')
+
class BadLengthHint(object):
- def __iter__(self): return iter(range(10))
+ def __iter__(self):
+ return iter(range(10))
+
def __length_hint__(self):
raise RuntimeError('hello')
+
class NoneLengthHint(object):
- def __iter__(self): return iter(range(10))
+ def __iter__(self):
+ return iter(range(10))
+
def __length_hint__(self):
- return None
+ return NotImplemented
+
class TestLengthHintExceptions(unittest.TestCase):
diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py
index 90e85a9..ece7f99 100644
--- a/Lib/test/test_itertools.py
+++ b/Lib/test/test_itertools.py
@@ -1723,9 +1723,8 @@ class TestVariousIteratorArgs(unittest.TestCase):
class LengthTransparency(unittest.TestCase):
def test_repeat(self):
- from test.test_iterlen import len
- self.assertEqual(len(repeat(None, 50)), 50)
- self.assertRaises(TypeError, len, repeat(None))
+ self.assertEqual(operator.length_hint(repeat(None, 50)), 50)
+ self.assertEqual(operator.length_hint(repeat(None), 12), 12)
class RegressionTests(unittest.TestCase):
diff --git a/Lib/test/test_kqueue.py b/Lib/test/test_kqueue.py
index 4e93013..6312ddd 100644
--- a/Lib/test/test_kqueue.py
+++ b/Lib/test/test_kqueue.py
@@ -94,7 +94,7 @@ class TestKQueue(unittest.TestCase):
client.setblocking(False)
try:
client.connect(('127.0.0.1', serverSocket.getsockname()[1]))
- except socket.error as e:
+ except OSError as e:
self.assertEqual(e.args[0], errno.EINPROGRESS)
else:
#raise AssertionError("Connect should have raised EINPROGRESS")
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index cb908fb..d8ce315 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py