summaryrefslogtreecommitdiffstats
path: root/Modules/CMakeTestFortranCompiler.cmake
blob: cc8c565e63b221ea3b37cf61429e652b6938f171 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66

# This file is used by EnableLanguage in cmGlobalGenerator to
# determine that that selected Fortran compiler can actually compile
# and link the most basic of programs.   If not, a fatal error
# is set and cmake stops processing commands and will not generate
# any makefiles or projects.
IF(NOT CMAKE_Fortran_COMPILER_WORKS)
  MESSAGE(STATUS "Check for working Fortran compiler: ${CMAKE_Fortran_COMPILER}")
  FILE(WRITE ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler.f "
        PROGRAM TESTFortran
        PRINT *, 'Hello'
        END
  ")
  TRY_COMPILE(CMAKE_Fortran_COMPILER_WORKS ${CMAKE_BINARY_DIR} 
    ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler.f
    OUTPUT_VARIABLE OUTPUT)
  SET(FORTRAN_TEST_WAS_RUN 1)
ENDIF(NOT CMAKE_Fortran_COMPILER_WORKS)

IF(NOT CMAKE_Fortran_COMPILER_WORKS)
  MESSAGE(STATUS "Check for working Fortran compiler: ${CMAKE_Fortran_COMPILER} -- broken")
  FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
    "Determining if the Fortran compiler works failed with "
    "the following output:\n${OUTPUT}\n\n")
  MESSAGE(FATAL_ERROR "The Fortran compiler \"${CMAKE_Fortran_COMPILER}\" "
    "is not able to compile a simple test program.\nIt fails "
    "with the following output:\n ${OUTPUT}\n\n"
    "CMake will not be able to correctly generate this project.")
ELSE(NOT CMAKE_Fortran_COMPILER_WORKS)
  IF(FORTRAN_TEST_WAS_RUN)
    MESSAGE(STATUS "Check for working Fortran compiler: ${CMAKE_Fortran_COMPILER} -- works")
    FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
      "Determining if the Fortran compiler works passed with "
      "the following output:\n${OUTPUT}\n\n")
  ENDIF(FORTRAN_TEST_WAS_RUN)
  SET(CMAKE_Fortran_COMPILER_WORKS 1 CACHE INTERNAL "")
ENDIF(NOT CMAKE_Fortran_COMPILER_WORKS)

IF(CMAKE_Fortran_COMPILER_WORKS)
  # Test for Fortran 90 support by using an f90-specific construct.
  IF(DEFINED CMAKE_Fortran_COMPILER_SUPPORTS_F90)
  ELSE(DEFINED CMAKE_Fortran_COMPILER_SUPPORTS_F90)
    MESSAGE(STATUS "Checking whether ${CMAKE_Fortran_COMPILER} supports Fortran 90")
    FILE(WRITE ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompilerF90.f90 "
      PROGRAM TESTFortran90
      stop = 1 ; do while ( stop .eq. 0 ) ; end do
      END PROGRAM TESTFortran90
  ")
    TRY_COMPILE(CMAKE_Fortran_COMPILER_SUPPORTS_F90 ${CMAKE_BINARY_DIR}
      ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompilerF90.f90
      OUTPUT_VARIABLE OUTPUT)
    IF(CMAKE_Fortran_COMPILER_SUPPORTS_F90)
      MESSAGE(STATUS "Checking whether ${CMAKE_Fortran_COMPILER} supports Fortran 90 -- yes")
      FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
        "Determining if the Fortran compiler supports Fortran 90 passed with "
        "the following output:\n${OUTPUT}\n\n")
      SET(CMAKE_Fortran_COMPILER_SUPPORTS_F90 1 CACHE INTERNAL "")
    ELSE(CMAKE_Fortran_COMPILER_SUPPORTS_F90)
      MESSAGE(STATUS "Checking whether ${CMAKE_Fortran_COMPILER} supports Fortran 90 -- no")
      FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
        "Determining if the Fortran compiler supports Fortran 90 failed with "
        "the following output:\n${OUTPUT}\n\n")
      SET(CMAKE_Fortran_COMPILER_SUPPORTS_F90 0 CACHE INTERNAL "")
    ENDIF(CMAKE_Fortran_COMPILER_SUPPORTS_F90)
  ENDIF(DEFINED CMAKE_Fortran_COMPILER_SUPPORTS_F90)
ENDIF(CMAKE_Fortran_COMPILER_WORKS)
'file diffstat' width='100%'> -rw-r--r--Doc/library/os.rst2
-rw-r--r--Doc/library/pty.rst3
-rw-r--r--Doc/library/shutil.rst15
-rw-r--r--Doc/library/sys.rst3
-rw-r--r--Doc/library/sysconfig.rst2
-rw-r--r--Doc/library/timeit.rst2
-rw-r--r--Doc/library/tkinter.rst26
-rw-r--r--Doc/library/traceback.rst2
-rw-r--r--Doc/library/unicodedata.rst4
-rw-r--r--Doc/library/venv.rst5
-rw-r--r--Doc/license.rst21
-rw-r--r--Doc/reference/datamodel.rst9
-rw-r--r--Doc/tools/sphinxext/indexsidebar.html2
-rw-r--r--Doc/tools/sphinxext/pyspecific.py2
-rw-r--r--Doc/tutorial/interpreter.rst14
-rw-r--r--Doc/tutorial/stdlib.rst2
-rw-r--r--Doc/tutorial/stdlib2.rst2
-rw-r--r--Doc/using/venv-create.inc29
-rw-r--r--Doc/whatsnew/3.4.rst204
-rw-r--r--Doc/whatsnew/index.rst1
-rw-r--r--Include/abstract.h3
-rw-r--r--Include/grammar.h2
-rw-r--r--Include/object.h42
-rw-r--r--Include/osdefs.h8
-rw-r--r--Include/patchlevel.h6
-rw-r--r--Include/pyport.h17
-rw-r--r--Include/symtable.h1
-rw-r--r--Include/token.h2
-rw-r--r--Include/unicodeobject.h27
-rw-r--r--LICENSE1
-rw-r--r--Lib/_osx_support.py2
-rw-r--r--Lib/argparse.py3
-rw-r--r--Lib/bz2.py44
-rw-r--r--Lib/collections/abc.py3
-rw-r--r--Lib/concurrent/futures/_base.py3
-rw-r--r--Lib/concurrent/futures/process.py6
-rw-r--r--Lib/concurrent/futures/thread.py2
-rw-r--r--Lib/dbm/__init__.py6
-rw-r--r--Lib/decimal.py4
-rw-r--r--Lib/difflib.py21
-rw-r--r--Lib/distutils/__init__.py2
-rw-r--r--Lib/distutils/ccompiler.py5
-rw-r--r--Lib/distutils/command/bdist.py3
-rw-r--r--Lib/distutils/command/bdist_dumb.py8
-rw-r--r--Lib/distutils/command/build_ext.py26
-rw-r--r--Lib/distutils/command/install.py15
-rw-r--r--Lib/distutils/emxccompiler.py315
-rw-r--r--Lib/distutils/spawn.py24
-rw-r--r--Lib/distutils/sysconfig.py24
-rw-r--r--Lib/distutils/tests/test_bdist_dumb.py2
-rw-r--r--Lib/distutils/tests/test_install.py2
-rw-r--r--Lib/distutils/tests/test_util.py2
-rw-r--r--Lib/distutils/util.py6
-rw-r--r--Lib/email/_header_value_parser.py3
-rw-r--r--Lib/email/iterators.py6
-rw-r--r--Lib/glob.py3
-rw-r--r--Lib/hashlib.py15
-rw-r--r--Lib/http/cookiejar.py3
-rw-r--r--Lib/http/server.py4
-rw-r--r--Lib/idlelib/NEWS.txt2
-rw-r--r--Lib/idlelib/PyShell.py10
-rw-r--r--Lib/idlelib/help.txt2
-rw-r--r--Lib/idlelib/idlever.py2
-rw-r--r--Lib/importlib/_bootstrap.py11
-rw-r--r--Lib/importlib/abc.py177
-rw-r--r--Lib/json/encoder.py15
-rw-r--r--Lib/lib2to3/btm_utils.py6
-rw-r--r--Lib/lib2to3/pytree.py9
-rw-r--r--Lib/logging/__init__.py31
-rw-r--r--Lib/logging/config.py68
-rw-r--r--Lib/logging/handlers.py48
-rw-r--r--Lib/lzma.py130
-rw-r--r--Lib/mailbox.py12
-rw-r--r--Lib/mimetypes.py2
-rw-r--r--Lib/multiprocessing/__init__.py7
-rw-r--r--Lib/multiprocessing/forking.py22
-rw-r--r--Lib/multiprocessing/queues.py4
-rw-r--r--Lib/ntpath.py6
-rw-r--r--Lib/os.py32
-rw-r--r--Lib/os2emxpath.py158
-rw-r--r--Lib/pkgutil.py6
-rw-r--r--Lib/plat-os2emx/IN.py82
-rw-r--r--Lib/plat-os2emx/SOCKET.py106
-rw-r--r--Lib/plat-os2emx/_emx_link.py79
-rw-r--r--Lib/plat-os2emx/grp.py182
-rw-r--r--Lib/plat-os2emx/pwd.py208
-rwxr-xr-xLib/plat-os2emx/regen7
-rwxr-xr-xLib/platform.py10
-rw-r--r--Lib/pty.py1
-rwxr-xr-xLib/pydoc.py2
-rw-r--r--Lib/random.py7
-rw-r--r--Lib/shelve.py9
-rw-r--r--Lib/shutil.py8
-rw-r--r--Lib/site.py21
-rw-r--r--Lib/sysconfig.py23
-rw-r--r--Lib/tarfile.py3
-rwxr-xr-xLib/test/regrtest.py14
-rw-r--r--Lib/test/support.py63
-rw-r--r--Lib/test/test_bytes.py23
-rw-r--r--Lib/test/test_bz2.py136
-rw-r--r--Lib/test/test_cmd_line.py25
-rw-r--r--Lib/test/test_cmd_line_script.py19
-rw-r--r--Lib/test/test_concurrent_futures.py22
-rw-r--r--Lib/test/test_enumerate.py10
-rw-r--r--Lib/test/test_exceptions.py9
-rw-r--r--Lib/test/test_fcntl.py15
-rw-r--r--Lib/test/test_format.py16
-rw-r--r--Lib/test/test_gc.py26
-rw-r--r--Lib/test/test_generators.py21
-rw-r--r--Lib/test/test_genericpath.py33
-rw-r--r--Lib/test/test_hashlib.py127
-rw-r--r--Lib/test/test_httpservers.py11
-rw-r--r--Lib/test/test_importlib/import_/test_fromlist.py2
-rw-r--r--Lib/test/test_importlib/source/test_abc_loader.py505
-rw-r--r--Lib/test/test_importlib/test_abc.py18
-rw-r--r--Lib/test/test_io.py8
-rw-r--r--Lib/test/test_iterlen.py62
-rw-r--r--Lib/test/test_itertools.py5
-rw-r--r--Lib/test/test_logging.py128
-rw-r--r--Lib/test/test_mailbox.py2
-rw-r--r--Lib/test/test_mimetypes.py2
-rw-r--r--Lib/test/test_multiprocessing.py172
-rw-r--r--Lib/test/test_operator.py25
-rw-r--r--Lib/test/test_os.py103
-rw-r--r--Lib/test/test_pep277.py4
-rw-r--r--Lib/test/test_random.py33
-rw-r--r--Lib/test/test_range.py2
-rw-r--r--Lib/test/test_select.py2
-rw-r--r--Lib/test/test_set.py2
-rw-r--r--Lib/test/test_shelve.py13
-rw-r--r--Lib/test/test_shutil.py17
-rw-r--r--Lib/test/test_signal.py2
-rw-r--r--Lib/test/test_site.py2
-rw-r--r--Lib/test/test_socketserver.py16
-rw-r--r--Lib/test/test_sundry.py2
-rw-r--r--Lib/test/test_sys.py2
-rw-r--r--Lib/test/test_sysconfig.py44
-rw-r--r--Lib/test/test_tempfile.py6
-rw-r--r--Lib/test/test_thread.py2
-rw-r--r--Lib/test/test_threading.py6
-rw-r--r--Lib/test/test_threadsignals.py2
-rw-r--r--Lib/test/test_unicode.py30
-rw-r--r--Lib/test/test_unicodedata.py2
-rw-r--r--Lib/test/test_urllib.py18
-rw-r--r--Lib/test/test_urllib2.py1
-rw-r--r--Lib/test/test_venv.py59
-rw-r--r--Lib/tkinter/__init__.py39
-rw-r--r--Lib/traceback.py3
-rw-r--r--Lib/unittest/loader.py3
-rw-r--r--Lib/urllib/request.py8
-rw-r--r--Lib/venv/__init__.py23
-rw-r--r--Lib/venv/scripts/posix/activate.csh37
-rw-r--r--Lib/venv/scripts/posix/activate.fish74
-rw-r--r--Lib/weakref.py3
-rw-r--r--Lib/webbrowser.py11
-rw-r--r--Lib/xml/etree/ElementPath.py6
-rw-r--r--Lib/xml/etree/ElementTree.py6
-rw-r--r--Mac/Tools/bundlebuilder.py2
-rw-r--r--Makefile.pre.in1
-rw-r--r--Misc/ACKS3
-rw-r--r--Misc/HISTORY4101
-rw-r--r--Misc/NEWS128
-rw-r--r--Misc/RPM/python-3.4.spec (renamed from Misc/RPM/python-3.3.spec)7
-rw-r--r--Modules/_dbmmodule.c4
-rw-r--r--Modules/_hashopenssl.c22
-rw-r--r--Modules/_io/bufferedio.c5
-rw-r--r--Modules/_io/fileio.c7
-rw-r--r--Modules/_io/textio.c18
-rw-r--r--Modules/_json.c21
-rw-r--r--Modules/_lsprof.c4
-rw-r--r--Modules/_multiprocessing/multiprocessing.c21
-rw-r--r--Modules/_multiprocessing/multiprocessing.h6
-rw-r--r--Modules/_multiprocessing/semaphore.c14
-rwxr-xr-xModules/_sha3/cleanup.py49
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-32-rvk.macros555
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-32-s1.macros1187
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-32-s2.macros1187
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-32.macros26
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-64.macros728
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-int-set.h6
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-interface.h46
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-opt32-settings.h6
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-opt32.c524
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-opt64-settings.h9
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-opt64.c510
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-simd128.macros651
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-simd64.macros517
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-unrolling.macros124
-rw-r--r--Modules/_sha3/keccak/KeccakF-1600-xop.macros573
-rw-r--r--Modules/_sha3/keccak/KeccakNISTInterface.c83
-rw-r--r--Modules/_sha3/keccak/KeccakNISTInterface.h72
-rw-r--r--Modules/_sha3/keccak/KeccakSponge.c266
-rw-r--r--Modules/_sha3/keccak/KeccakSponge.h76
-rwxr-xr-xModules/_sha3/keccak/brg_endian.h142
-rw-r--r--Modules/_sha3/keccak/crypto_hash.h0
-rw-r--r--Modules/_sha3/sha3module.c583
-rw-r--r--Modules/_sre.c14
-rw-r--r--Modules/_struct.c21
-rw-r--r--Modules/_tkinter.c653
-rw-r--r--Modules/arraymodule.c7
-rw-r--r--Modules/cjkcodecs/cjkcodecs.h11
-rw-r--r--Modules/faulthandler.c9
-rw-r--r--Modules/fcntlmodule.c6
-rw-r--r--Modules/gcmodule.c64
-rw-r--r--Modules/getpath.c4
-rw-r--r--Modules/hashlib.h33
-rw-r--r--Modules/main.c6
-rw-r--r--Modules/md5module.c2
-rw-r--r--Modules/operator.c27
-rw-r--r--Modules/posixmodule.c868
-rw-r--r--Modules/pwdmodule.c4
-rw-r--r--Modules/readline.c4
-rw-r--r--Modules/selectmodule.c5
-rw-r--r--Modules/sha1module.c2
-rw-r--r--Modules/sha256module.c22
-rw-r--r--Modules/sha512module.c22
-rw-r--r--Modules/signalmodule.c5
-rw-r--r--Modules/socketmodule.c97
-rw-r--r--Modules/socketmodule.h2
-rw-r--r--Modules/timemodule.c55
-rw-r--r--Modules/unicodedata_db.h616
-rw-r--r--Modules/unicodename_db.h22399
-rw-r--r--Modules/zlibmodule.c4
-rw-r--r--Objects/abstract.c77
-rw-r--r--Objects/bytearrayobject.c74
-rw-r--r--Objects/bytesobject.c109
-rw-r--r--Objects/frameobject.c8
-rw-r--r--Objects/iterobject.c11
-rw-r--r--Objects/listobject.c2
-rw-r--r--Objects/longobject.c15
-rw-r--r--Objects/object.c20
-rw-r--r--Objects/stringlib/codecs.h6
-rw-r--r--Objects/stringlib/join.h132
-rw-r--r--Objects/typeobject.c2
-rw-r--r--Objects/unicodeobject.c2989
-rw-r--r--Objects/unicodetype_db.h13
-rw-r--r--PC/VC6/pythoncore.dsp4
-rw-r--r--PC/VC6/readme.txt4
-rw-r--r--PC/VS7.1/pythoncore.vcproj24
-rw-r--r--PC/VS7.1/readme.txt4
-rw-r--r--PC/VS8.0/build_ssl.bat4
-rw-r--r--PC/VS8.0/kill_python.c2
-rw-r--r--PC/VS8.0/pyproject.vsprops2
-rw-r--r--PC/VS9.0/_sha3.vcproj513
-rw-r--r--PC/VS9.0/kill_python.c2
-rw-r--r--PC/VS9.0/pcbuild.sln18
-rw-r--r--PC/VS9.0/pyproject.vsprops2
-rw-r--r--PC/example_nt/example.vcproj4
-rw-r--r--PC/getpathp.c2
-rw-r--r--PC/os2emx/Makefile672
-rw-r--r--PC/os2emx/README.os2emx663
-rw-r--r--PC/os2emx/config.c164
-rw-r--r--PC/os2emx/dlfcn.c223
-rw-r--r--PC/os2emx/dlfcn.h51
-rw-r--r--PC/os2emx/dllentry.c42
-rw-r--r--PC/os2emx/getpathp.c418
-rw-r--r--PC/os2emx/pyconfig.h332
-rw-r--r--PC/os2emx/python33.def1314
-rw-r--r--PC/os2emx/pythonpm.c124
-rw-r--r--PC/os2vacpp/_tkinter.def8
-rw-r--r--PC/os2vacpp/config.c99
-rw-r--r--PC/os2vacpp/getpathp.c482
-rw-r--r--PC/os2vacpp/makefile1549
-rw-r--r--PC/os2vacpp/makefile.omk1047
-rw-r--r--PC/os2vacpp/pyconfig.h212
-rw-r--r--PC/os2vacpp/python.def479
-rw-r--r--PC/os2vacpp/readme.txt119
-rw-r--r--PC/pyconfig.h4
-rw-r--r--PC/python.mk5
-rw-r--r--PC/python3.def1394
-rw-r--r--PC/python3.mak10
-rw-r--r--PC/python34gen.py (renamed from PC/python33gen.py)8
-rw-r--r--PC/python34stub.def (renamed from PC/python33stub.def)2
-rw-r--r--PC/readme.txt13
-rw-r--r--PCbuild/_sha3.vcxproj218
-rw-r--r--PCbuild/build_ssl.bat4
-rw-r--r--PCbuild/kill_python.c2
-rw-r--r--PCbuild/pcbuild.sln18
-rw-r--r--PCbuild/pyproject.props2
-rw-r--r--PCbuild/readme.txt2
-rw-r--r--Parser/grammar1.c2
-rw-r--r--Parser/tokenizer.c2
-rw-r--r--Python/bltinmodule.c6
-rw-r--r--Python/ceval.c2104
-rw-r--r--Python/codecs.c4
-rw-r--r--Python/dynload_os2.c42
-rw-r--r--Python/dynload_shlib.c36
-rw-r--r--Python/errors.c15
-rw-r--r--Python/fileutils.c4
-rw-r--r--Python/formatter_unicode.c3
-rw-r--r--Python/importdl.h5
-rw-r--r--Python/importlib.h7507
-rw-r--r--Python/symtable.c57
-rw-r--r--Python/sysmodule.c23
-rw-r--r--Python/thread.c4
-rw-r--r--Python/thread_os2.h267
-rw-r--r--README16
-rwxr-xr-xTools/freeze/freeze.py2
-rw-r--r--Tools/importbench/importbench.py31
-rw-r--r--Tools/msi/msi.py3
-rw-r--r--Tools/scripts/parse_html5_entities.py105
-rwxr-xr-xTools/stringbench/stringbench.py2
-rw-r--r--Tools/unicode/makeunicodedata.py2
-rwxr-xr-xconfigure20
-rw-r--r--configure.ac2
-rw-r--r--setup.py11
316 files changed, 34510 insertions, 31593 deletions
diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst
index d895547..e4769b3 100644
--- a/Doc/c-api/object.rst
+++ b/Doc/c-api/object.rst
@@ -342,6 +342,15 @@ is considered sufficient for this determination.
returned. This is the equivalent to the Python expression ``len(o)``.
+.. c:function:: Py_ssize_t PyObject_LengthHint(PyObject *o, Py_ssize_t default)
+
+ Return an estimated length for the object *o*. First trying to return its
+ actual length, then an estimate using ``__length_hint__``, and finally
+ returning the default value. On error ``-1`` is returned. This is the
+ equivalent to the Python expression ``operator.length_hint(o, default)``.
+
+ .. versionadded:: 3.4
+
.. c:function:: PyObject* PyObject_GetItem(PyObject *o, PyObject *key)
Return element of *o* corresponding to the object *key* or *NULL* on failure.
diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst
index 92af0ec..b336a4a 100644
--- a/Doc/howto/logging-cookbook.rst
+++ b/Doc/howto/logging-cookbook.rst
@@ -741,9 +741,7 @@ the basis for code meeting your own specific requirements::
break
logger = logging.getLogger(record.name)
logger.handle(record) # No level or filter logic applied - just do it!
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
+ except Exception:
import sys, traceback
print('Whoops! Problem:', file=sys.stderr)
traceback.print_exc(file=sys.stderr)
diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst
index 053ba56..ccc6005 100644
--- a/Doc/library/exceptions.rst
+++ b/Doc/library/exceptions.rst
@@ -246,6 +246,12 @@ The following exceptions are the exceptions that are usually raised.
:exc:`VMSError`, :exc:`socket.error`, :exc:`select.error` and
:exc:`mmap.error` have been merged into :exc:`OSError`.
+ .. versionchanged:: 3.4
+
+ The :attr:`filename` attribute is now the original file name passed to
+ the function, instead of the name encoded to or decoded from the
+ filesystem encoding.
+
.. exception:: OverflowError
diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst
index 41bda1e..95df2f8 100644
--- a/Doc/library/gc.rst
+++ b/Doc/library/gc.rst
@@ -67,6 +67,24 @@ The :mod:`gc` module provides the following functions:
returned.
+.. function:: get_stats()
+
+ Return a list of 3 per-generation dictionaries containing collection
+ statistics since interpreter start. At this moment, each dictionary will
+ contain the following items:
+
+ * ``collections`` is the number of times this generation was collected;
+
+ * ``collected`` is the total number of objects collected inside this
+ generation;
+
+ * ``uncollectable`` is the total number of objects which were found
+ to be uncollectable (and were therefore moved to the :data:`garbage`
+ list) inside this generation.
+
+ .. versionadded:: 3.4
+
+
.. function:: set_threshold(threshold0[, threshold1[, threshold2]])
Set the garbage collection thresholds (the collection frequency). Setting
diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst
index bc8ab2c..4f5aac9 100644
--- a/Doc/library/hashlib.rst
+++ b/Doc/library/hashlib.rst
@@ -51,9 +51,13 @@ concatenation of the data fed to it so far using the :meth:`digest` or
.. index:: single: OpenSSL; (use in module hashlib)
Constructors for hash algorithms that are always present in this module are
-:func:`md5`, :func:`sha1`, :func:`sha224`, :func:`sha256`, :func:`sha384`, and
-:func:`sha512`. Additional algorithms may also be available depending upon the
-OpenSSL library that Python uses on your platform.
+:func:`md5`, :func:`sha1`, :func:`sha224`, :func:`sha256`, :func:`sha384`,
+:func:`sha512`, :func:`sha3_224`, :func:`sha3_256`, :func:`sha3_384`, and
+:func:`sha3_512`. Additional algorithms may also be available depending upon
+the OpenSSL library that Python uses on your platform.
+
+ .. versionchanged:: 3.4
+ Add sha3 family of hash algorithms.
For example, to obtain the digest of the byte string ``b'Nobody inspects the
spammish repetition'``::
diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst
index cbad3ed..0577d5e 100644
--- a/Doc/library/http.server.rst
+++ b/Doc/library/http.server.rst
@@ -177,6 +177,9 @@ of which this module provides three different variants:
complete set of headers is sent, followed by text composed using the
:attr:`error_message_format` class variable.
+ .. versionchanged:: 3.4
+ The error response includes a Content-Length header.
+
.. method:: send_response(code, message=None)
Adds a response header to the headers buffer and logs the accepted
diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst
index 681f1d9..55bd59e 100644
--- a/Doc/library/importlib.rst
+++ b/Doc/library/importlib.rst
@@ -133,8 +133,6 @@ ABC hierarchy::
+-- ExecutionLoader --+
+-- FileLoader
+-- SourceLoader
- +-- PyLoader (deprecated)
- +-- PyPycLoader (deprecated)
.. class:: Finder
@@ -431,142 +429,6 @@ ABC hierarchy::
itself does not end in ``__init__``.
-.. class:: PyLoader
-
- An abstract base class inheriting from
- :class:`ExecutionLoader` and
- :class:`ResourceLoader` designed to ease the loading of
- Python source modules (bytecode is not handled; see
- :class:`SourceLoader` for a source/bytecode ABC). A subclass
- implementing this ABC will only need to worry about exposing how the source
- code is stored; all other details for loading Python source code will be
- handled by the concrete implementations of key methods.
-
- .. deprecated:: 3.2
- This class has been deprecated in favor of :class:`SourceLoader` and is
- slated for removal in Python 3.4. See below for how to create a
- subclass that is compatible with Python 3.1 onwards.
-
- If compatibility with Python 3.1 is required, then use the following idiom
- to implement a subclass that will work with Python 3.1 onwards (make sure
- to implement :meth:`ExecutionLoader.get_filename`)::
-
- try:
- from importlib.abc import SourceLoader
- except ImportError:
- from importlib.abc import PyLoader as SourceLoader
-
-
- class CustomLoader(SourceLoader):
- def get_filename(self, fullname):
- """Return the path to the source file."""
- # Implement ...
-
- def source_path(self, fullname):
- """Implement source_path in terms of get_filename."""
- try:
- return self.get_filename(fullname)
- except ImportError:
- return None
-
- def is_package(self, fullname):
- """Implement is_package by looking for an __init__ file
- name as returned by get_filename."""
- filename = os.path.basename(self.get_filename(fullname))
- return os.path.splitext(filename)[0] == '__init__'
-
-
- .. method:: source_path(fullname)
-
- An abstract method that returns the path to the source code for a
- module. Should return ``None`` if there is no source code.
- Raises :exc:`ImportError` if the loader knows it cannot handle the
- module.
-
- .. method:: get_filename(fullname)
-
- A concrete implementation of
- :meth:`importlib.abc.ExecutionLoader.get_filename` that
- relies on :meth:`source_path`. If :meth:`source_path` returns
- ``None``, then :exc:`ImportError` is raised.
-
- .. method:: load_module(fullname)
-
- A concrete implementation of :meth:`importlib.abc.Loader.load_module`
- that loads Python source code. All needed information comes from the
- abstract methods required by this ABC. The only pertinent assumption
- made by this method is that when loading a package
- :attr:`__path__` is set to ``[os.path.dirname(__file__)]``.
-
- .. method:: get_code(fullname)
-
- A concrete implementation of
- :meth:`importlib.abc.InspectLoader.get_code` that creates code objects
- from Python source code, by requesting the source code (using
- :meth:`source_path` and :meth:`get_data`) and compiling it with the
- built-in :func:`compile` function.
-
- .. method:: get_source(fullname)
-
- A concrete implementation of
- :meth:`importlib.abc.InspectLoader.get_source`. Uses
- :meth:`importlib.abc.ResourceLoader.get_data` and :meth:`source_path`
- to get the source code. It tries to guess the source encoding using
- :func:`tokenize.detect_encoding`.
-
-
-.. class:: PyPycLoader
-
- An abstract base class inheriting from :class:`PyLoader`.
- This ABC is meant to help in creating loaders that support both Python
- source and bytecode.
-
- .. deprecated:: 3.2
- This class has been deprecated in favor of :class:`SourceLoader` and to
- properly support :pep:`3147`. If compatibility is required with
- Python 3.1, implement both :class:`SourceLoader` and :class:`PyLoader`;
- instructions on how to do so are included in the documentation for
- :class:`PyLoader`. Do note that this solution will not support
- sourceless/bytecode-only loading; only source *and* bytecode loading.
-
- .. versionchanged:: 3.3
- Updated to parse (but not use) the new source size field in bytecode
- files when reading and to write out the field properly when writing.
-
- .. method:: source_mtime(fullname)
-
- An abstract method which returns the modification time for the source
- code of the specified module. The modification time should be an
- integer. If there is no source code, return ``None``. If the
- module cannot be found then :exc:`ImportError` is raised.
-
- .. method:: bytecode_path(fullname)
-
- An abstract method which returns the path to the bytecode for the
- specified module, if it exists. It returns ``None``
- if no bytecode exists (yet).
- Raises :exc:`ImportError` if the loader knows it cannot handle the
- module.
-
- .. method:: get_filename(fullname)
-
- A concrete implementation of
- :meth:`ExecutionLoader.get_filename` that relies on
- :meth:`PyLoader.source_path` and :meth:`bytecode_path`.
- If :meth:`source_path` returns a path, then that value is returned.
- Else if :meth:`bytecode_path` returns a path, that path will be
- returned. If a path is not available from both methods,
- :exc:`ImportError` is raised.
-
- .. method:: write_bytecode(fullname, bytecode)
-
- An abstract method which has the loader write *bytecode* for future
- use. If the bytecode is written, return ``True``. Return
- ``False`` if the bytecode could not be written. This method
- should not be called if :data:`sys.dont_write_bytecode` is true.
- The *bytecode* argument should be a bytes string or bytes array.
-
-
:mod:`importlib.machinery` -- Importers and path hooks
------------------------------------------------------
diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst
index 1391ed2..16d3294 100644
--- a/Doc/library/logging.config.rst
+++ b/Doc/library/logging.config.rst
@@ -76,11 +76,23 @@ in :mod:`logging` itself) and defining handlers which are declared either in
.. function:: fileConfig(fname, defaults=None, disable_existing_loggers=True)
- Reads the logging configuration from a :mod:`configparser`\-format file
- named *fname*. This function can be called several times from an
- application, allowing an end user to select from various pre-canned
- configurations (if the developer provides a mechanism to present the choices
- and load the chosen configuration).
+ Reads the logging configuration from a :mod:`configparser`\-format file.
+ This function can be called several times from an application, allowing an
+ end user to select from various pre-canned configurations (if the developer
+ provides a mechanism to present the choices and load the chosen
+ configuration).
+
+ :param fname: A filename, or a file-like object, or an instance derived
+ from :class:`~configparser.RawConfigParser`. If a
+ ``RawConfigParser``-derived instance is passed, it is used as
+ is. Otherwise, a :class:`~configparser.Configparser` is
+ instantiated, and the configuration read by it from the
+ object passed in ``fname``. If that has a :meth:`readline`
+ method, it is assumed to be a file-like object and read using
+ :meth:`~configparser.ConfigParser.read_file`; otherwise,
+ it is assumed to be a filename and passed to
+ :meth:`~configparser.ConfigParser.read`.
+
:param defaults: Defaults to be passed to the ConfigParser can be specified
in this argument.
@@ -94,8 +106,17 @@ in :mod:`logging` itself) and defining handlers which are declared either in
their ancestors are explicitly named in the
logging configuration.
+ .. versionchanged:: 3.4
+ An instance of a subclass of :class:`~configparser.RawConfigParser` is
+ now accepted as a value for ``fname``. This facilitates:
+
+ * Use of a configuration file where logging configuration is just part
+ of the overall application configuration.
+ * Use of a configuration read from a file, and then modified by the using
+ application (e.g. based on command-line parameters or other aspects
+ of the runtime environment) before being passed to ``fileConfig``.
-.. function:: listen(port=DEFAULT_LOGGING_CONFIG_PORT)
+.. function:: listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None)
Starts up a socket server on the specified port, and listens for new
configurations. If no port is specified, the module's default
@@ -105,6 +126,17 @@ in :mod:`logging` itself) and defining handlers which are declared either in
server, and which you can :meth:`join` when appropriate. To stop the server,
call :func:`stopListening`.
+ The ``verify`` argument, if specified, should be a callable which should
+ verify whether bytes received across the socket are valid and should be
+ processed. This could be done by encrypting and/or signing what is sent
+ across the socket, such that the ``verify`` callable can perform
+ signature verification and/or decryption. The ``verify`` callable is called
+ with a single argument - the bytes received across the socket - and should
+ return the bytes to be processed, or None to indicate that the bytes should
+ be discarded. The returned bytes could be the same as the passed in bytes
+ (e.g. when only verification is done), or they could be completely different
+ (perhaps if decryption were performed).
+
To send a configuration to the socket, read in the configuration file and
send it to the socket as a string of bytes preceded by a four-byte length
string packed in binary using ``struct.pack('>L', n)``.
@@ -121,7 +153,12 @@ in :mod:`logging` itself) and defining handlers which are declared either in
:func:`listen` socket and sending a configuration which runs whatever
code the attacker wants to have executed in the victim's process. This is
especially easy to do if the default port is used, but not hard even if a
- different port is used).
+ different port is used). To avoid the risk of this happening, use the
+ ``verify`` argument to :func:`listen` to prevent unrecognised
+ configurations from being applied.
+
+ .. versionchanged:: 3.4.
+ The ``verify`` argument was added.
.. function:: stopListening()
diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst
index 3860880..877a790 100644
--- a/Doc/library/operator.rst
+++ b/Doc/library/operator.rst
@@ -235,6 +235,14 @@ their character equivalents.
.. XXX: find a better, readable, example
+.. function:: length_hint(obj, default=0)
+
+ Return an estimated length for the object *o*. First trying to return its
+ actual length, then an estimate using :meth:`object.__length_hint__`, and
+ finally returning the default value.
+
+ .. versionadded:: 3.4
+
The :mod:`operator` module also defines tools for generalized attribute and item
lookups. These are useful for making fast field extractors as arguments for
:func:`map`, :func:`sorted`, :meth:`itertools.groupby`, or other functions that
diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst
index 20a84b6..1053fa3 100644
--- a/Doc/library/os.path.rst
+++ b/Doc/library/os.path.rst
@@ -37,7 +37,6 @@ applications should use string objects to access all files.
* :mod:`posixpath` for UNIX-style paths
* :mod:`ntpath` for Windows paths
* :mod:`macpath` for old-style MacOS paths
- * :mod:`os2emxpath` for OS/2 EMX paths
.. function:: abspath(path)
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
index e253aac..f4e4d6f 100644
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -54,7 +54,7 @@ Notes on the availability of these functions:
The name of the operating system dependent module imported. The following
names have currently been registered: ``'posix'``, ``'nt'``, ``'mac'``,
- ``'os2'``, ``'ce'``, ``'java'``.
+ ``'ce'``, ``'java'``.
.. seealso::
:attr:`sys.platform` has a finer granularity. :func:`os.uname` gives
diff --git a/Doc/library/pty.rst b/Doc/library/pty.rst
index 2b9385b..90baec5 100644
--- a/Doc/library/pty.rst
+++ b/Doc/library/pty.rst
@@ -45,6 +45,9 @@ The :mod:`pty` module defines the following functions:
a file descriptor. The defaults try to read 1024 bytes each time they are
called.
+ .. versionchanged:: 3.4
+ :func:`spawn` now returns the status value from :func:`os.waitpid`
+ on the child process.
Example
-------
diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst
index 080c923..b2efcbd 100644
--- a/Doc/library/shutil.rst
+++ b/Doc/library/shutil.rst
@@ -53,7 +53,7 @@ Directory and files operations
*dst* and return *dst*. *src* and *dst* are path names given as strings.
*dst* must be the complete target file name; look at :func:`shutil.copy`
for a copy that accepts a target directory path. If *src* and *dst*
- specify the same file, :exc:`Error` is raised.
+ specify the same file, :exc:`SameFileError` is raised.
The destination location must be writable; otherwise, an :exc:`OSError`
exception will be raised. If *dst* already exists, it will be replaced.
@@ -69,6 +69,19 @@ Directory and files operations
Added *follow_symlinks* argument.
Now returns *dst*.
+ .. versionchanged:: 3.4
+ Raise :exc:`SameFileError` instead of :exc:`Error`. Since the former is
+ a subclass of the latter, this change is backward compatible.
+
+
+.. exception:: SameFileError
+
+ This exception is raised if source and destination in :func:`copyfile`
+ are the same file.
+
+ .. versionadded:: 3.4
+
+
.. function:: copymode(src, dst, *, follow_symlinks=True)
Copy the permission bits from *src* to *dst*. The file contents, owner, and
diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst
index cd6d4bf..0da2be8 100644
--- a/Doc/library/sys.rst
+++ b/Doc/library/sys.rst
@@ -837,8 +837,6 @@ always available.
Windows ``'win32'``
Windows/Cygwin ``'cygwin'``
Mac OS X ``'darwin'``
- OS/2 ``'os2'``
- OS/2 EMX ``'os2emx'``
================ ===========================
.. versionchanged:: 3.3
@@ -1117,7 +1115,6 @@ always available.
| :const:`name` | Name of the thread implementation: |
| | |
| | * ``'nt'``: Windows threads |
- | | * ``'os2'``: OS/2 threads |
| | * ``'pthread'``: POSIX threads |
| | * ``'solaris'``: Solaris threads |
+------------------+---------------------------------------------------------+
diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst
index c47dcce..535ac54 100644
--- a/Doc/library/sysconfig.rst
+++ b/Doc/library/sysconfig.rst
@@ -83,8 +83,6 @@ Python currently supports seven schemes:
located under the user home directory.
- *nt*: scheme for NT platforms like Windows.
- *nt_user*: scheme for NT platforms, when the *user* option is used.
-- *os2*: scheme for OS/2 platforms.
-- *os2_home*: scheme for OS/2 patforms, when the *user* option is used.
Each scheme is itself composed of a series of paths and each path has a unique
identifier. Python currently uses eight paths:
diff --git a/Doc/library/timeit.rst b/Doc/library/timeit.rst
index a487917..0cc1586 100644
--- a/Doc/library/timeit.rst
+++ b/Doc/library/timeit.rst
@@ -151,7 +151,7 @@ The module defines three convenience functions and a public class:
t = Timer(...) # outside the try/except
try:
t.timeit(...) # or t.repeat(...)
- except:
+ except Exception:
t.print_exc()
The advantage over the standard traceback is that source lines in the
diff --git a/Doc/library/tkinter.rst b/Doc/library/tkinter.rst
index 83a5375..377694f 100644
--- a/Doc/library/tkinter.rst
+++ b/Doc/library/tkinter.rst
@@ -750,32 +750,6 @@ Entry widget indexes (index, view index, etc.)
displayed. You can use these :mod:`tkinter` functions to access these special
points in text widgets:
-.. function:: AtEnd()
- refers to the last position in the text
-
- .. deprecated:: 3.3
-
-.. function:: AtInsert()
- refers to the point where the text cursor is
-
- .. deprecated:: 3.3
-
-.. function:: AtSelFirst()
- indicates the beginning point of the selected text
-
- .. deprecated:: 3.3
-
-.. function:: AtSelLast()
- denotes the last point of the selected text and finally
-
- .. deprecated:: 3.3
-
-.. function:: At(x[, y])
- refers to the character at pixel location *x*, *y* (with *y* not used in the
- case of a text entry widget, which contains a single line of text).
-
- .. deprecated:: 3.3
-
Text widget indexes
The index notation for Text widgets is very rich and is best described in the Tk
man pages.
diff --git a/Doc/library/traceback.rst b/Doc/library/traceback.rst
index 32e5733..0533bea 100644
--- a/Doc/library/traceback.rst
+++ b/Doc/library/traceback.rst
@@ -146,7 +146,7 @@ module. ::
source = input(">>> ")
try:
exec(source, envdir)
- except:
+ except Exception:
print("Exception in user code:")
print("-"*60)
traceback.print_exc(file=sys.stdout)
diff --git a/Doc/library/unicodedata.rst b/Doc/library/unicodedata.rst
index 3787c36..d05142a 100644
--- a/Doc/library/unicodedata.rst
+++ b/Doc/library/unicodedata.rst
@@ -15,8 +15,8 @@
This module provides access to the Unicode Character Database (UCD) which
defines character properties for all Unicode characters. The data contained in
-this database is compiled from the `UCD version 6.1.0
-<http://www.unicode.org/Public/6.1.0/ucd>`_.
+this database is compiled from the `UCD version 6.2.0
+<http://www.unicode.org/Public/6.2.0/ucd>`_.
The module uses the same names and symbols as defined by Unicode
Standard Annex #44, `"Unicode Character Database"
diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst
index 2499962..b2ecd93 100644
--- a/Doc/library/venv.rst
+++ b/Doc/library/venv.rst
@@ -75,8 +75,8 @@ creation according to their needs, the :class:`EnvBuilder` class.
* ``system_site_packages`` -- a Boolean value indicating that the system Python
site-packages should be available to the environment (defaults to ``False``).
- * ``clear`` -- a Boolean value which, if True, will delete any existing target
- directory instead of raising an exception (defaults to ``False``).
+ * ``clear`` -- a Boolean value which, if True, will delete the contents of
+ any existing target directory, before creating the environment.
* ``symlinks`` -- a Boolean value indicating whether to attempt to symlink the
Python binary (and any necessary DLLs or other binaries,
@@ -88,7 +88,6 @@ creation according to their needs, the :class:`EnvBuilder` class.
upgraded in-place (defaults to ``False``).
-
Creators of third-party virtual environment tools will be free to use the
provided ``EnvBuilder`` class as a base class.
diff --git a/Doc/license.rst b/Doc/license.rst
index 6326ce4..e56ca5b 100644
--- a/Doc/license.rst
+++ b/Doc/license.rst
@@ -122,6 +122,8 @@ been GPL-compatible; the table below summarizes the various releases.
+----------------+--------------+------------+------------+-----------------+
| 3.3.0 | 3.2 | 2012 | PSF | yes |
+----------------+--------------+------------+------------+-----------------+
+| 3.4.0 | 3.3.0 | 2014 | PSF | yes |
++----------------+--------------+------------+------------+-----------------+
.. note::
@@ -656,6 +658,25 @@ The :mod:`select` and contains the following notice for the kqueue interface::
SUCH DAMAGE.
+SHA-3
+-----
+
+The module :mod:`_sha3` and :mod:`hashlib` are using the reference
+implementation of Keccak. The files at :file:`Modules/_sha3/keccak/` contain
+the following note::
+
+ The Keccak sponge function, designed by Guido Bertoni, Joan Daemen,
+ Michaël Peeters and Gilles Van Assche. For more information, feedback or
+ questions, please refer to our website: http://keccak.noekeon.org/
+
+ Implementation by the designers,
+ hereby denoted as "the implementer".
+
+ To the extent possible under law, the implementer has waived all copyright
+ and related or neighboring rights to the source code in this file.
+ http://creativecommons.org/publicdomain/zero/1.0/
+
+
strtod and dtoa
---------------
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index d093383..beeaf83 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -1805,6 +1805,15 @@ through the container; for mappings, :meth:`__iter__` should be the same as
considered to be false in a Boolean context.
+.. method:: object.__length_hint__(self)
+
+ Called to implement :func:`operator.length_hint`. Should return an estimated
+ length for the object (which may be greater or less than the actual length).
+ The length must be an integer ``>=`` 0. This method is purely an
+ optimization and is never required for correctness.
+
+ .. versionadded:: 3.4
+
.. note::
Slicing is done exclusively with the following three methods. A call like ::
diff --git a/Doc/tools/sphinxext/indexsidebar.html b/Doc/tools/sphinxext/indexsidebar.html
index a0ec32f..ed5da0c 100644
--- a/Doc/tools/sphinxext/indexsidebar.html
+++ b/Doc/tools/sphinxext/indexsidebar.html
@@ -3,7 +3,7 @@
<h3>Docs for other versions</h3>
<ul>
<li><a href="http://docs.python.org/2.7/">Python 2.7 (stable)</a></li>
- <li><a href="http://docs.python.org/3.4/">Python 3.4 (in development)</a></li>
+ <li><a href="http://docs.python.org/3.3/">Python 3.3 (stable)</a></li>
<li><a href="http://www.python.org/doc/versions/">Old versions</a></li>
</ul>
diff --git a/Doc/tools/sphinxext/pyspecific.py b/Doc/tools/sphinxext/pyspecific.py
index e8eb703..df6e48a 100644
--- a/Doc/tools/sphinxext/pyspecific.py
+++ b/Doc/tools/sphinxext/pyspecific.py
@@ -10,7 +10,7 @@
"""
ISSUE_URI = 'http://bugs.python.org/issue%s'
-SOURCE_URI = 'http://hg.python.org/cpython/file/3.3/%s'
+SOURCE_URI = 'http://hg.python.org/cpython/file/default/%s'
from docutils import nodes, utils
from sphinx.util.nodes import split_explicit_title
diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst
index cdc2bf2..c182511 100644
--- a/Doc/tutorial/interpreter.rst
+++ b/Doc/tutorial/interpreter.rst
@@ -10,13 +10,13 @@ Using the Python Interpreter
Invoking the Interpreter
========================
-The Python interpreter is usually installed as :file:`/usr/local/bin/python3.3`
+The Python interpreter is usually installed as :file:`/usr/local/bin/python3.4`
on those machines where it is available; putting :file:`/usr/local/bin` in your
Unix shell's search path makes it possible to start it by typing the command:
.. code-block:: text
- python3.3
+ python3.4
to the shell. [#]_ Since the choice of the directory where the interpreter lives
is an installation option, other places are possible; check with your local
@@ -24,11 +24,11 @@ Python guru or system administrator. (E.g., :file:`/usr/local/python` is a
popular alternative location.)
On Windows machines, the Python installation is usually placed in
-:file:`C:\\Python33`, though you can change this when you're running the
+:file:`C:\\Python34`, though you can change this when you're running the
installer. To add this directory to your path, you can type the following
command into the command prompt in a DOS box::
- set path=%path%;C:\python33
+ set path=%path%;C:\python34
Typing an end-of-file character (:kbd:`Control-D` on Unix, :kbd:`Control-Z` on
Windows) at the primary prompt causes the interpreter to exit with a zero exit
@@ -95,8 +95,8 @@ with the *secondary prompt*, by default three dots (``...``). The interpreter
prints a welcome message stating its version number and a copyright notice
before printing the first prompt::
- $ python3.3
- Python 3.3 (default, Sep 24 2012, 09:25:04)
+ $ python3.4
+ Python 3.4 (default, Sep 24 2012, 09:25:04)
[GCC 4.6.3] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>>
@@ -149,7 +149,7 @@ Executable Python Scripts
On BSD'ish Unix systems, Python scripts can be made directly executable, like
shell scripts, by putting the line ::
- #! /usr/bin/env python3.3
+ #! /usr/bin/env python3.4
(assuming that the interpreter is on the user's :envvar:`PATH`) at the beginning
of the script and giving the file an executable mode. The ``#!`` must be the
diff --git a/Doc/tutorial/stdlib.rst b/Doc/tutorial/stdlib.rst
index 1ebf792..cb892fd 100644
--- a/Doc/tutorial/stdlib.rst
+++ b/Doc/tutorial/stdlib.rst
@@ -15,7 +15,7 @@ operating system::
>>> import os
>>> os.getcwd() # Return the current working directory
- 'C:\\Python33'
+ 'C:\\Python34'
>>> os.chdir('/server/accesslogs') # Change current working directory
>>> os.system('mkdir today') # Run the command mkdir in the system shell
0
diff --git a/Doc/tutorial/stdlib2.rst b/Doc/tutorial/stdlib2.rst
index 4b6e036..3b9122f 100644
--- a/Doc/tutorial/stdlib2.rst
+++ b/Doc/tutorial/stdlib2.rst
@@ -275,7 +275,7 @@ applications include caching objects that are expensive to create::
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
d['primary'] # entry was automatically removed
- File "C:/python33/lib/weakref.py", line 46, in __getitem__
+ File "C:/python34/lib/weakref.py", line 46, in __getitem__
o = self.data[key]()
KeyError: 'primary'
diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc
index 5fdbc9b..706ac5d 100644
--- a/Doc/using/venv-create.inc
+++ b/Doc/using/venv-create.inc
@@ -56,20 +56,21 @@ virtualenv will be created, according to the given options, at each
provided path.
Once a venv has been created, it can be "activated" using a script in the
-venv's binary directory. The invocation of the script is platform-specific: on
-a Posix platform, you would typically do::
-
- $ source <venv>/bin/activate
-
-whereas on Windows, you might do::
-
- C:\> <venv>/Scripts/activate
-
-if you are using the ``cmd.exe`` shell, or perhaps::
-
- PS C:\> <venv>/Scripts/Activate.ps1
-
-if you use PowerShell.
+venv's binary directory. The invocation of the script is platform-specific:
+
++-------------+-----------------+-----------------------------------------+
+| Platform | Shell | Command to activate virtual environment |
++=============+=================+=========================================+
+| Posix | bash/zsh | $ source <venv>/bin/activate |
++-------------+-----------------+-----------------------------------------+
+| | fish | $ . <venv>/bin/activate.fish |
++-------------+-----------------+-----------------------------------------+
+| | csh/tcsh | $ source <venv>/bin/activate.csh |
++-------------+-----------------+-----------------------------------------+
+| Windows | cmd.exe | C:\> <venv>/Scripts/activate.bat |
++-------------+-----------------+-----------------------------------------+
+| | PowerShell | PS C:\> <venv>/Scripts/Activate.ps1 |
++-------------+-----------------+-----------------------------------------+
You don't specifically *need* to activate an environment; activation just
prepends the venv's binary directory to your path, so that "python" invokes the
diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst
new file mode 100644
index 0000000..d260838
--- /dev/null
+++ b/Doc/whatsnew/3.4.rst
@@ -0,0 +1,204 @@
+****************************
+ What's New In Python 3.4
+****************************
+
+.. :Author: Someone <email>
+ (uncomment if there is a principal author)
+
+.. Rules for maintenance:
+
+ * Anyone can add text to this document, but the maintainer reserves the
+ right to rewrite any additions. In particular, for obscure or esoteric
+ features, the maintainer may reduce any addition to a simple reference to
+ the new documentation rather than explaining the feature inline.
+
+ * While the maintainer will periodically go through Misc/NEWS
+ and add changes, it's best not to rely on this. We know from experience
+ that any changes that aren't in the What's New documentation around the
+ time of the original release will remain largely unknown to the community
+ for years, even if they're added later. We also know from experience that
+ other priorities can arise, and the maintainer will run out of time to do
+ updates - in such cases, end users will be much better served by partial
+ notifications that at least give a hint about new features to
+ investigate.
+
+ * This is not a complete list of every single change; completeness
+ is the purpose of Misc/NEWS. The What's New should focus on changes that
+ are visible to Python *users* and that *require* a feature release (i.e.
+ most bug fixes should only be recorded in Misc/NEWS)
+
+ * PEPs should not be marked Final until they have an entry in What's New.
+ A placeholder entry that is just a section header and a link to the PEP
+ (e.g ":pep:`397` has been implemented") is acceptable. If a PEP has been
+ implemented and noted in What's New, don't forget to mark it as Final!
+
+ * If you want to draw your new text to the attention of the
+ maintainer, add 'XXX' to the beginning of the paragraph or
+ section.
+
+ * It's OK to add just a very brief note about a change. For
+ example: "The :ref:`~socket.transmogrify()` function was added to the
+ :mod:`socket` module." The maintainer will research the change and
+ write the necessary text (if appropriate). The advantage of doing this
+ is that even if no more descriptive text is ever added, readers will at
+ least have a notification that the new feature exists and a link to the
+ relevant documentation.
+
+ * You can comment out your additions if you like, but it's not
+ necessary (especially when a final release is some months away).
+
+ * Credit the author of a patch or bugfix. Just the name is
+ sufficient; the e-mail address isn't necessary.
+
+ * It's helpful to add the bug/patch number as a comment:
+
+ The :ref:`~socket.transmogrify()` function was added to the
+ :mod:`socket` module. (Contributed by P.Y. Developer in :issue:`12345`.)
+
+ This saves the maintainer the effort of going through the Mercurial log
+ when researching a change.
+
+ * Cross referencing tip: :ref:`mod.attr` will display as ``mod.attr``,
+ while :ref:`~mod.attr` will display as ``attr``.
+
+This article explains the new features in Python 3.4, compared to 3.3.
+
+.. Python 3.4 was released on TBD.
+
+For full details, see the :source:`Misc/NEWS` file.
+
+.. note:: Prerelease users should be aware that this document is currently in
+ draft form. It will be updated substantially as Python 3.4 moves towards
+ release, so it's worth checking back even after reading earlier versions.
+
+
+.. seealso::
+
+ .. :pep:`4XX` - Python 3.4 Release Schedule
+
+
+Summary -- Release highlights
+=============================
+
+.. This section singles out the most important changes in Python 3.3.
+ Brevity is key.
+
+New syntax features:
+
+* None yet.
+
+New library modules:
+
+* None yet.
+
+New built-in features:
+
+* None yet.
+
+Implementation improvements:
+
+* None yet.
+
+Significantly Improved Library Modules:
+
+* SHA-3 (Keccak) support for :mod:`hashlib`.
+
+Security improvements:
+
+* None yet.
+
+Please read on for a comprehensive list of user-facing changes.
+
+
+.. PEP-sized items next.
+
+.. _pep-4XX:
+
+.. PEP 4XX: Example PEP
+.. ====================
+
+
+.. (Implemented by Foo Bar.)
+
+.. .. seealso::
+
+ :pep:`4XX` - Example PEP
+ PEP written by Example Author
+
+
+
+
+Other Language Changes
+======================
+
+Some smaller changes made to the core Python language are:
+
+* Unicode database updated to UCD version 6.2.
+
+
+
+New Modules
+===========
+
+.. module name
+.. -----------
+
+* None yet.
+
+
+Improved Modules
+================
+
+* None yet.
+
+
+Optimizations
+=============
+
+Major performance enhancements have been added:
+
+* The UTF-32 decoder is now 3x to 4x faster.
+
+
+Build and C API Changes
+=======================
+
+Changes to Python's build process and to the C API include:
+
+* None yet.
+
+
+Deprecated
+==========
+
+Unsupported Operating Systems
+-----------------------------
+
+* None yet.
+
+
+Deprecated Python modules, functions and methods
+------------------------------------------------
+
+* None yet.
+
+
+Deprecated functions and types of the C API
+-------------------------------------------
+
+* None yet.
+
+
+Deprecated features
+-------------------
+
+* None yet.
+
+
+Porting to Python 3.4
+=====================
+
+This section lists previously described changes and other bugfixes
+that may require changes to your code.
+
+* Nothing yet.
diff --git a/Doc/whatsnew/index.rst b/Doc/whatsnew/index.rst
index bc1206b..29902e4 100644
--- a/Doc/whatsnew/index.rst
+++ b/Doc/whatsnew/index.rst
@@ -11,6 +11,7 @@ anyone wishing to stay up-to-date after a new release.
.. toctree::
:maxdepth: 2
+ 3.4.rst
3.3.rst
3.2.rst
3.1.rst
diff --git a/Include/abstract.h b/Include/abstract.h
index 44b5af7..e2b0750 100644
--- a/Include/abstract.h
+++ b/Include/abstract.h
@@ -404,8 +404,9 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
#define PyObject_Length PyObject_Size
#ifndef Py_LIMITED_API
- PyAPI_FUNC(Py_ssize_t) _PyObject_LengthHint(PyObject *o, Py_ssize_t);
+ PyAPI_FUNC(int) _PyObject_HasLen(PyObject *o);
#endif
+PyAPI_FUNC(Py_ssize_t) PyObject_LengthHint(PyObject *o, Py_ssize_t);
/*
Guess the size of object o using len(o) or o.__length_hint__().
diff --git a/Include/grammar.h b/Include/grammar.h
index 8426da3..862f6a8 100644
--- a/Include/grammar.h
+++ b/Include/grammar.h
@@ -76,7 +76,7 @@ dfa *PyGrammar_FindDFA(grammar *g, int type);
int addlabel(labellist *ll, int type, char *str);
int findlabel(labellist *ll, int type, char *str);
-char *PyGrammar_LabelRepr(label *lb);
+const char *PyGrammar_LabelRepr(label *lb);
void translatelabels(grammar *g);
void addfirstsets(grammar *g);
diff --git a/Include/object.h b/Include/object.h
index 387cadb..36d63af 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -362,7 +362,7 @@ typedef struct _typeobject {
PyBufferProcs *tp_as_buffer;
/* Flags to define presence of optional/expanded features */
- long tp_flags;
+ unsigned long tp_flags;
const char *tp_doc; /* Documentation string */
@@ -428,7 +428,7 @@ typedef struct{
const char* name;
int basicsize;
int itemsize;
- int flags;
+ unsigned int flags;
PyType_Slot *slots; /* terminated by slot==0. */
} PyType_Spec;
@@ -470,7 +470,7 @@ PyAPI_DATA(PyTypeObject) PyType_Type; /* built-in 'type' */
PyAPI_DATA(PyTypeObject) PyBaseObject_Type; /* built-in 'object' */
PyAPI_DATA(PyTypeObject) PySuper_Type; /* built-in 'super' */
-PyAPI_FUNC(long) PyType_GetFlags(PyTypeObject*);
+PyAPI_FUNC(unsigned long) PyType_GetFlags(PyTypeObject*);
#define PyType_Check(op) \
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_TYPE_SUBCLASS)
@@ -603,44 +603,44 @@ given type object has a specified feature.
*/
/* Set if the type object is dynamically allocated */
-#define Py_TPFLAGS_HEAPTYPE (1L<<9)
+#define Py_TPFLAGS_HEAPTYPE (1UL << 9)
/* Set if the type allows subclassing */
-#define Py_TPFLAGS_BASETYPE (1L<<10)
+#define Py_TPFLAGS_BASETYPE (1UL << 10)
/* Set if the type is 'ready' -- fully initialized */
-#define Py_TPFLAGS_READY (1L<<12)
+#define Py_TPFLAGS_READY (1UL << 12)
/* Set while the type is being 'readied', to prevent recursive ready calls */
-#define Py_TPFLAGS_READYING (1L<<13)
+#define Py_TPFLAGS_READYING (1UL << 13)
/* Objects support garbage collection (see objimp.h) */
-#define Py_TPFLAGS_HAVE_GC (1L<<14)
+#define Py_TPFLAGS_HAVE_GC (1UL << 14)
/* These two bits are preserved for Stackless Python, next after this is 17 */
#ifdef STACKLESS
-#define Py_TPFLAGS_HAVE_STACKLESS_EXTENSION (3L<<15)
+#define Py_TPFLAGS_HAVE_STACKLESS_EXTENSION (3UL << 15)
#else
#define Py_TPFLAGS_HAVE_STACKLESS_EXTENSION 0
#endif
/* Objects support type attribute cache */
-#define Py_TPFLAGS_HAVE_VERSION_TAG (1L<<18)
-#define Py_TPFLAGS_VALID_VERSION_TAG (1L<<19)
+#define Py_TPFLAGS_HAVE_VERSION_TAG (1UL << 18)
+#define Py_TPFLAGS_VALID_VERSION_TAG (1UL << 19)
/* Type is abstract and cannot be instantiated */
-#define Py_TPFLAGS_IS_ABSTRACT (1L<<20)
+#define Py_TPFLAGS_IS_ABSTRACT (1UL << 20)
/* These flags are used to determine if a type is a subclass. */
-#define Py_TPFLAGS_INT_SUBCLASS (1L<<23)
-#define Py_TPFLAGS_LONG_SUBCLASS (1L<<24)
-#define Py_TPFLAGS_LIST_SUBCLASS (1L<<25)
-#define Py_TPFLAGS_TUPLE_SUBCLASS (1L<<26)
-#define Py_TPFLAGS_BYTES_SUBCLASS (1L<<27)
-#define Py_TPFLAGS_UNICODE_SUBCLASS (1L<<28)
-#define Py_TPFLAGS_DICT_SUBCLASS (1L<<29)
-#define Py_TPFLAGS_BASE_EXC_SUBCLASS (1L<<30)
-#define Py_TPFLAGS_TYPE_SUBCLASS (1L<<31)
+#define Py_TPFLAGS_INT_SUBCLASS (1UL << 23)
+#define Py_TPFLAGS_LONG_SUBCLASS (1UL << 24)
+#define Py_TPFLAGS_LIST_SUBCLASS (1UL << 25)
+#define Py_TPFLAGS_TUPLE_SUBCLASS (1UL << 26)
+#define Py_TPFLAGS_BYTES_SUBCLASS (1UL << 27)
+#define Py_TPFLAGS_UNICODE_SUBCLASS (1UL << 28)
+#define Py_TPFLAGS_DICT_SUBCLASS (1UL << 29)
+#define Py_TPFLAGS_BASE_EXC_SUBCLASS (1UL << 30)
+#define Py_TPFLAGS_TYPE_SUBCLASS (1UL << 31)
#define Py_TPFLAGS_DEFAULT ( \
Py_TPFLAGS_HAVE_STACKLESS_EXTENSION | \
diff --git a/Include/osdefs.h b/Include/osdefs.h
index 90b430a..ed88da1 100644
--- a/Include/osdefs.h
+++ b/Include/osdefs.h
@@ -9,16 +9,10 @@ extern "C" {
/* Mod by chrish: QNX has WATCOM, but isn't DOS */
#if !defined(__QNX__)
-#if defined(MS_WINDOWS) || defined(__BORLANDC__) || defined(__WATCOMC__) || defined(__DJGPP__) || defined(PYOS_OS2)
-#if defined(PYOS_OS2) && defined(PYCC_GCC)
-#define MAXPATHLEN 260
-#define SEP L'/'
-#define ALTSEP L'\\'
-#else
+#if defined(MS_WINDOWS) || defined(__BORLANDC__) || defined(__WATCOMC__) || defined(__DJGPP__)
#define SEP L'\\'
#define ALTSEP L'/'
#define MAXPATHLEN 256
-#endif
#define DELIM L';'
#endif
#endif
diff --git a/Include/patchlevel.h b/Include/patchlevel.h
index b5919a4..a759045 100644
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -17,13 +17,13 @@
/* Version parsed out into numeric values */
/*--start constants--*/
#define PY_MAJOR_VERSION 3
-#define PY_MINOR_VERSION 3
+#define PY_MINOR_VERSION 4
#define PY_MICRO_VERSION 0
-#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL
+#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA
#define PY_RELEASE_SERIAL 0
/* Version as a string */
-#define PY_VERSION "3.3.0+"
+#define PY_VERSION "3.4.0a0"
/*--end constants--*/
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
diff --git a/Include/pyport.h b/Include/pyport.h
index eba34f9..944b83a 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -379,9 +379,6 @@ typedef size_t Py_uhash_t;
#endif
#ifdef HAVE_SYS_STAT_H
-#if defined(PYOS_OS2) && defined(PYCC_GCC)
-#include <sys/types.h>
-#endif
#include <sys/stat.h>
#elif defined(HAVE_STAT_H)
#include <stat.h>
@@ -868,4 +865,18 @@ extern pid_t forkpty(int *, char *, struct termios *, struct winsize *);
#endif
#endif
+/*
+ * Convenient macros to deal with endianness of the platform. WORDS_BIGENDIAN is
+ * detected by configure and defined in pyconfig.h. The code in pyconfig.h
+ * also also takes care of Apple's universal builds.
+ */
+
+#ifdef WORDS_BIGENDIAN
+#define PY_BIG_ENDIAN 1
+#define PY_LITTLE_ENDIAN 0
+#else
+#define PY_BIG_ENDIAN 0
+#define PY_LITTLE_ENDIAN 1
+#endif
+
#endif /* Py_PYPORT_H */
diff --git a/Include/symtable.h b/Include/symtable.h
index 6ed3a2b..4d82f0c 100644
--- a/Include/symtable.h
+++ b/Include/symtable.h
@@ -41,6 +41,7 @@ typedef struct _symtable_entry {
PyObject *ste_name; /* string: name of current block */
PyObject *ste_varnames; /* list of function parameters */
PyObject *ste_children; /* list of child blocks */
+ PyObject *ste_directives;/* locations of global and nonlocal statements */
_Py_block_ty ste_type; /* module, class, or function */
int ste_unoptimized; /* false if namespace is optimized */
int ste_nested; /* true if block is nested */
diff --git a/Include/token.h b/Include/token.h
index f7f6504..905022b 100644
--- a/Include/token.h
+++ b/Include/token.h
@@ -75,7 +75,7 @@ extern "C" {
#define ISEOF(x) ((x) == ENDMARKER)
-PyAPI_DATA(char *) _PyParser_TokenNames[]; /* Token names */
+PyAPI_DATA(const char *) _PyParser_TokenNames[]; /* Token names */
PyAPI_FUNC(int) PyToken_OneChar(int);
PyAPI_FUNC(int) PyToken_TwoChars(int, int);
PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h
index a8f5b5d..363776b 100644
--- a/Include/unicodeobject.h
+++ b/Include/unicodeobject.h
@@ -180,9 +180,9 @@ typedef unsigned char Py_UCS1;
} while (0)
/* macros to work with surrogates */
-#define Py_UNICODE_IS_SURROGATE(ch) (0xD800 <= ch && ch <= 0xDFFF)
-#define Py_UNICODE_IS_HIGH_SURROGATE(ch) (0xD800 <= ch && ch <= 0xDBFF)
-#define Py_UNICODE_IS_LOW_SURROGATE(ch) (0xDC00 <= ch && ch <= 0xDFFF)
+#define Py_UNICODE_IS_SURROGATE(ch) (0xD800 <= (ch) && (ch) <= 0xDFFF)
+#define Py_UNICODE_IS_HIGH_SURROGATE(ch) (0xD800 <= (ch) && (ch) <= 0xDBFF)
+#define Py_UNICODE_IS_LOW_SURROGATE(ch) (0xDC00 <= (ch) && (ch) <= 0xDFFF)
/* Join two surrogate characters and return a single Py_UCS4 value. */
#define Py_UNICODE_JOIN_SURROGATES(high, low) \
(((((Py_UCS4)(high) & 0x03FF) << 10) | \
@@ -933,12 +933,28 @@ PyAPI_FUNC(int)
_PyUnicodeWriter_PrepareInternal(_PyUnicodeWriter *writer,
Py_ssize_t length, Py_UCS4 maxchar);
+/* Append a Unicode string.
+ Return 0 on success, raise an exception and return -1 on error. */
PyAPI_FUNC(int)
-_PyUnicodeWriter_WriteStr(_PyUnicodeWriter *writer, PyObject *str);
+_PyUnicodeWriter_WriteStr(_PyUnicodeWriter *writer,
+ PyObject *str /* Unicode string */
+ );
+/* Append a latin1-encoded byte string.
+ Return 0 on success, raise an exception and return -1 on error. */
+PyAPI_FUNC(int)
+_PyUnicodeWriter_WriteCstr(_PyUnicodeWriter *writer,
+ const char *str, /* latin1-encoded byte string */
+ Py_ssize_t len /* length in bytes */
+ );
+
+/* Get the value of the write as an Unicode string. Clear the
+ buffer of the writer. Raise an exception and return NULL
+ on error. */
PyAPI_FUNC(PyObject *)
_PyUnicodeWriter_Finish(_PyUnicodeWriter *writer);
+/* Deallocate memory of a writer (clear its internal buffer). */
PyAPI_FUNC(void)
_PyUnicodeWriter_Dealloc(_PyUnicodeWriter *writer);
#endif
@@ -1950,7 +1966,8 @@ PyAPI_FUNC(PyObject *) PyUnicode_Replace(
);
/* Compare two strings and return -1, 0, 1 for less than, equal,
- greater than resp. */
+ greater than resp.
+ Raise an exception and return -1 on error. */
PyAPI_FUNC(int) PyUnicode_Compare(
PyObject *left, /* Left string */
diff --git a/LICENSE b/LICENSE
index 88eed1f..d98353f 100644
--- a/LICENSE
+++ b/LICENSE
@@ -75,6 +75,7 @@ the various releases.
3.2.2 3.2.1 2011 PSF yes
3.2.3 3.2.2 2012 PSF yes
3.3.0 3.2 2012 PSF yes
+ 3.4.0 3.3.0 2014 PSF yes
Footnotes:
diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py
index b3aad56..1076778 100644
--- a/Lib/_osx_support.py
+++ b/Lib/_osx_support.py
@@ -38,7 +38,7 @@ def _find_executable(executable, path=None):
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
- if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+ if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
diff --git a/Lib/argparse.py b/Lib/argparse.py
index f25b1b6..202f2cb 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -606,8 +606,7 @@ class HelpFormatter(object):
pass
else:
self._indent()
- for subaction in get_subactions():
- yield subaction
+ yield from get_subactions()
self._dedent()
def _split_lines(self, text, width):
diff --git a/Lib/bz2.py b/Lib/bz2.py
index c307507..6e6a2b9 100644
--- a/Lib/bz2.py
+++ b/Lib/bz2.py
@@ -9,7 +9,6 @@ __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor",
__author__ = "Nadeem Vawda <nadeem.vawda@gmail.com>"
-import builtins
import io
import warnings
@@ -28,6 +27,8 @@ _MODE_WRITE = 3
_BUFFER_SIZE = 8192
+_builtin_open = open
+
class BZ2File(io.BufferedIOBase):
@@ -43,12 +44,13 @@ class BZ2File(io.BufferedIOBase):
def __init__(self, filename, mode="r", buffering=None, compresslevel=9):
"""Open a bzip2-compressed file.
- If filename is a str or bytes object, is gives the name of the file to
- be opened. Otherwise, it should be a file object, which will be used to
- read or write the compressed data.
+ If filename is a str or bytes object, it gives the name
+ of the file to be opened. Otherwise, it should be a file object,
+ which will be used to read or write the compressed data.
- mode can be 'r' for reading (default), 'w' for (over)writing, or 'a' for
- appending. These can equivalently be given as 'rb', 'wb', and 'ab'.
+ mode can be 'r' for reading (default), 'w' for (over)writing,
+ or 'a' for appending. These can equivalently be given as 'rb',
+ 'wb', and 'ab'.
buffering is ignored. Its use is deprecated.
@@ -90,10 +92,10 @@ class BZ2File(io.BufferedIOBase):
mode_code = _MODE_WRITE
self._compressor = BZ2Compressor(compresslevel)
else:
- raise ValueError("Invalid mode: {!r}".format(mode))
+ raise ValueError("Invalid mode: %r" % (mode,))
if isinstance(filename, (str, bytes)):
- self._fp = builtins.open(filename, mode)
+ self._fp = _builtin_open(filename, mode)
self._closefp = True
self._mode = mode_code
elif hasattr(filename, "read") or hasattr(filename, "write"):
@@ -189,15 +191,17 @@ class BZ2File(io.BufferedIOBase):
if not rawblock:
if self._decompressor.eof:
+ # End-of-stream marker and end of file. We're good.
self._mode = _MODE_READ_EOF
self._size = self._pos
return False
else:
+ # Problem - we were expecting more compressed data.
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
- # Continue to next stream.
if self._decompressor.eof:
+ # Continue to next stream.
self._decompressor = BZ2Decompressor()
self._buffer = self._decompressor.decompress(rawblock)
@@ -412,7 +416,7 @@ class BZ2File(io.BufferedIOBase):
self._read_all(return_data=False)
offset = self._size + offset
else:
- raise ValueError("Invalid value for whence: {}".format(whence))
+ raise ValueError("Invalid value for whence: %s" % (whence,))
# Make it so that offset is the number of bytes to skip forward.
if offset < self._pos:
@@ -436,20 +440,20 @@ def open(filename, mode="rb", compresslevel=9,
encoding=None, errors=None, newline=None):
"""Open a bzip2-compressed file in binary or text mode.
- The filename argument can be an actual filename (a str or bytes object), or
- an existing file object to read from or write to.
+ The filename argument can be an actual filename (a str or bytes
+ object), or an existing file object to read from or write to.
- The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for binary mode,
- or "rt", "wt" or "at" for text mode. The default mode is "rb", and the
- default compresslevel is 9.
+ The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for
+ binary mode, or "rt", "wt" or "at" for text mode. The default mode
+ is "rb", and the default compresslevel is 9.
- For binary mode, this function is equivalent to the BZ2File constructor:
- BZ2File(filename, mode, compresslevel). In this case, the encoding, errors
- and newline arguments must not be provided.
+ For binary mode, this function is equivalent to the BZ2File
+ constructor: BZ2File(filename, mode, compresslevel). In this case,
+ the encoding, errors and newline arguments must not be provided.
For text mode, a BZ2File object is created, and wrapped in an
- io.TextIOWrapper instance with the specified encoding, error handling
- behavior, and line ending(s).
+ io.TextIOWrapper instance with the specified encoding, error
+ handling behavior, and line ending(s).
"""
if "t" in mode:
diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py
index c23b7dd..18c07bf 100644
--- a/Lib/collections/abc.py
+++ b/Lib/collections/abc.py
@@ -430,8 +430,7 @@ class KeysView(MappingView, Set):
return key in self._mapping
def __iter__(self):
- for key in self._mapping:
- yield key
+ yield from self._mapping
KeysView.register(dict_keys)
diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py
index 1e098be..883d993 100644
--- a/Lib/concurrent/futures/_base.py
+++ b/Lib/concurrent/futures/_base.py
@@ -198,8 +198,7 @@ def as_completed(fs, timeout=None):
waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
try:
- for future in finished:
- yield future
+ yield from finished
while pending:
if timeout is None:
diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py
index 04238a7..3c20b93 100644
--- a/Lib/concurrent/futures/process.py
+++ b/Lib/concurrent/futures/process.py
@@ -40,7 +40,7 @@ Local worker thread:
Process #1..n:
- reads _CallItems from "Call Q", executes the calls, and puts the resulting
- _ResultItems in "Request Q"
+ _ResultItems in "Result Q"
"""
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
@@ -240,6 +240,8 @@ def _queue_management_worker(executor_reference,
"terminated abruptly while the future was "
"running or pending."
))
+ # Delete references to object. See issue16284
+ del work_item
pending_work_items.clear()
# Terminate remaining workers forcibly: the queues or their
# locks may be in a dirty state and block forever.
@@ -264,6 +266,8 @@ def _queue_management_worker(executor_reference,
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
+ # Delete references to object. See issue16284
+ del work_item
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py
index 95bb682..f9beb0f 100644
--- a/Lib/concurrent/futures/thread.py
+++ b/Lib/concurrent/futures/thread.py
@@ -63,6 +63,8 @@ def _worker(executor_reference, work_queue):
work_item = work_queue.get(block=True)
if work_item is not None:
work_item.run()
+ # Delete references to object. See issue16284
+ del work_item
continue
executor = executor_reference()
# Exit if:
diff --git a/Lib/dbm/__init__.py b/Lib/dbm/__init__.py
index 813a29d..3bff170 100644
--- a/Lib/dbm/__init__.py
+++ b/Lib/dbm/__init__.py
@@ -106,10 +106,8 @@ def whichdb(filename):
try:
f = io.open(filename + ".pag", "rb")
f.close()
- # dbm linked with gdbm on OS/2 doesn't have .dir file
- if not (ndbm.library == "GNU gdbm" and sys.platform == "os2emx"):
- f = io.open(filename + ".dir", "rb")
- f.close()
+ f = io.open(filename + ".dir", "rb")
+ f.close()
return "dbm.ndbm"
except IOError:
# some dbm emulations based on Berkeley DB generate a .db file
diff --git a/Lib/decimal.py b/Lib/decimal.py
index b74ab01..bb0c96e 100644
--- a/Lib/decimal.py
+++ b/Lib/decimal.py
@@ -703,8 +703,7 @@ class Decimal(object):
raise TypeError("Cannot convert %r to Decimal" % value)
- # @classmethod, but @decorator is not valid Python 2.3 syntax, so
- # don't use it (see notes on Py2.3 compatibility at top of file)
+ @classmethod
def from_float(cls, f):
"""Converts a float to a decimal number, exactly.
@@ -743,7 +742,6 @@ class Decimal(object):
return result
else:
return cls(result)
- from_float = classmethod(from_float)
def _isnan(self):
"""Returns whether the number is not actually one.
diff --git a/Lib/difflib.py b/Lib/difflib.py
index ae377d7..db5f82e 100644
--- a/Lib/difflib.py
+++ b/Lib/difflib.py
@@ -922,8 +922,7 @@ class Differ:
else:
raise ValueError('unknown tag %r' % (tag,))
- for line in g:
- yield line
+ yield from g
def _dump(self, tag, x, lo, hi):
"""Generate comparison results for a same-tagged range."""
@@ -942,8 +941,7 @@ class Differ:
second = self._dump('+', b, blo, bhi)
for g in first, second:
- for line in g:
- yield line
+ yield from g
def _fancy_replace(self, a, alo, ahi, b, blo, bhi):
r"""
@@ -997,8 +995,7 @@ class Differ:
# no non-identical "pretty close" pair
if eqi is None:
# no identical pair either -- treat it as a straight replace
- for line in self._plain_replace(a, alo, ahi, b, blo, bhi):
- yield line
+ yield from self._plain_replace(a, alo, ahi, b, blo, bhi)
return
# no close pair, but an identical pair -- synch up on that
best_i, best_j, best_ratio = eqi, eqj, 1.0
@@ -1010,8 +1007,7 @@ class Differ:
# identical
# pump out diffs from before the synch point
- for line in self._fancy_helper(a, alo, best_i, b, blo, best_j):
- yield line
+ yield from self._fancy_helper(a, alo, best_i, b, blo, best_j)
# do intraline marking on the synch pair
aelt, belt = a[best_i], b[best_j]
@@ -1033,15 +1029,13 @@ class Differ:
btags += ' ' * lb
else:
raise ValueError('unknown tag %r' % (tag,))
- for line in self._qformat(aelt, belt, atags, btags):
- yield line
+ yield from self._qformat(aelt, belt, atags, btags)
else:
# the synch pair is identical
yield ' ' + aelt
# pump out diffs from after the synch point
- for line in self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi):
- yield line
+ yield from self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi)
def _fancy_helper(self, a, alo, ahi, b, blo, bhi):
g = []
@@ -1053,8 +1047,7 @@ class Differ:
elif blo < bhi:
g = self._dump('+', b, blo, bhi)
- for line in g:
- yield line
+ yield from g
def _qformat(self, aline, bline, atags, btags):
r"""
diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py
index 345ac4f..70a72b0 100644
--- a/Lib/distutils/__init__.py
+++ b/Lib/distutils/__init__.py
@@ -13,5 +13,5 @@ used from a setup script as
# Updated automatically by the Python release process.
#
#--start constants--
-__version__ = "3.3.0"
+__version__ = "3.4.0a0"
#--end constants--
diff --git a/Lib/distutils/ccompiler.py b/Lib/distutils/ccompiler.py
index c795c95..911e84d 100644
--- a/Lib/distutils/ccompiler.py
+++ b/Lib/distutils/ccompiler.py
@@ -351,7 +351,7 @@ class CCompiler:
return macros, objects, extra, pp_opts, build
def _get_cc_args(self, pp_opts, debug, before):
- # works for unixccompiler, emxccompiler, cygwinccompiler
+ # works for unixccompiler, cygwinccompiler
cc_args = pp_opts + ['-c']
if debug:
cc_args[:0] = ['-g']
@@ -926,7 +926,6 @@ _default_compilers = (
# on a cygwin built python we can use gcc like an ordinary UNIXish
# compiler
('cygwin.*', 'unix'),
- ('os2emx', 'emx'),
# OS name mappings
('posix', 'unix'),
@@ -968,8 +967,6 @@ compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler',
"Mingw32 port of GNU C Compiler for Win32"),
'bcpp': ('bcppcompiler', 'BCPPCompiler',
"Borland C++ Compiler"),
- 'emx': ('emxccompiler', 'EMXCCompiler',
- "EMX port of GNU C Compiler for OS/2"),
}
def show_compilers():
diff --git a/Lib/distutils/command/bdist.py b/Lib/distutils/command/bdist.py
index c5188eb..38b169a 100644
--- a/Lib/distutils/command/bdist.py
+++ b/Lib/distutils/command/bdist.py
@@ -52,8 +52,7 @@ class bdist(Command):
# This won't do in reality: will need to distinguish RPM-ish Linux,
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
default_format = {'posix': 'gztar',
- 'nt': 'zip',
- 'os2': 'zip'}
+ 'nt': 'zip'}
# Establish the preferred order (for the --help-formats option).
format_commands = ['rpm', 'gztar', 'bztar', 'ztar', 'tar',
diff --git a/Lib/distutils/command/bdist_dumb.py b/Lib/distutils/command/bdist_dumb.py
index 1ab09d1..eefdfea 100644
--- a/Lib/distutils/command/bdist_dumb.py
+++ b/Lib/distutils/command/bdist_dumb.py
@@ -38,8 +38,7 @@ class bdist_dumb(Command):
boolean_options = ['keep-temp', 'skip-build', 'relative']
default_format = { 'posix': 'gztar',
- 'nt': 'zip',
- 'os2': 'zip' }
+ 'nt': 'zip' }
def initialize_options(self):
self.bdist_dir = None
@@ -85,11 +84,6 @@ class bdist_dumb(Command):
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
self.plat_name)
- # OS/2 objects to any ":" characters in a filename (such as when
- # a timestamp is used in a version) so change them to hyphens.
- if os.name == "os2":
- archive_basename = archive_basename.replace(":", "-")
-
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
index b1d951e..f7c71b3 100644
--- a/Lib/distutils/command/build_ext.py
+++ b/Lib/distutils/command/build_ext.py
@@ -230,11 +230,6 @@ class build_ext(Command):
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VC6'))
- # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
- # import libraries in its "Config" subdirectory
- if os.name == 'os2':
- self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
-
# for extensions under Cygwin and AtheOS Python's library directory must be
# appended to library_dirs
if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
@@ -620,9 +615,6 @@ class build_ext(Command):
return fn
else:
return "swig.exe"
- elif os.name == "os2":
- # assume swig available in the PATH.
- return "swig.exe"
else:
raise DistutilsPlatformError(
"I don't know how to find (much less run) SWIG "
@@ -673,9 +665,6 @@ class build_ext(Command):
"""
from distutils.sysconfig import get_config_var
ext_path = ext_name.split('.')
- # OS/2 has an 8 character module (extension) limit :-(
- if os.name == "os2":
- ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
# extensions in debug_mode are named 'module_d.pyd' under windows
so_ext = get_config_var('SO')
if os.name == 'nt' and self.debug:
@@ -696,7 +685,7 @@ class build_ext(Command):
def get_libraries(self, ext):
"""Return the list of libraries to link against when building a
shared extension. On most platforms, this is just 'ext.libraries';
- on Windows and OS/2, we add the Python library (eg. python20.dll).
+ on Windows, we add the Python library (eg. python20.dll).
"""
# The python library is always needed on Windows. For MSVC, this
# is redundant, since the library is mentioned in a pragma in
@@ -716,19 +705,6 @@ class build_ext(Command):
return ext.libraries + [pythonlib]
else:
return ext.libraries
- elif sys.platform == "os2emx":
- # EMX/GCC requires the python library explicitly, and I
- # believe VACPP does as well (though not confirmed) - AIM Apr01
- template = "python%d%d"
- # debug versions of the main DLL aren't supported, at least
- # not at this time - AIM Apr01
- #if self.debug:
- # template = template + '_d'
- pythonlib = (template %
- (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
- # don't extend ext.libraries, it may be shared with other
- # extensions, it is a reference to the original list
- return ext.libraries + [pythonlib]
elif sys.platform[:6] == "cygwin":
template = "python%d.%d"
pythonlib = (template %
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index 0161898..04326a1 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -58,13 +58,6 @@ INSTALL_SCHEMES = {
'data' : '$base',
},
'nt': WINDOWS_SCHEME,
- 'os2': {
- 'purelib': '$base/Lib/site-packages',
- 'platlib': '$base/Lib/site-packages',
- 'headers': '$base/Include/$dist_name',
- 'scripts': '$base/Scripts',
- 'data' : '$base',
- },
}
# user site schemes
@@ -86,14 +79,6 @@ if HAS_USER_SITE:
'data' : '$userbase',
}
- INSTALL_SCHEMES['os2_home'] = {
- 'purelib': '$usersite',
- 'platlib': '$usersite',
- 'headers': '$userbase/include/python$py_version_short/$dist_name',
- 'scripts': '$userbase/bin',
- 'data' : '$userbase',
- }
-
# The keys to an installation scheme; if any new types of files are to be
# installed, be sure to add an entry to every installation scheme above,
# and to SCHEME_KEYS here.
diff --git a/Lib/distutils/emxccompiler.py b/Lib/distutils/emxccompiler.py
deleted file mode 100644
index 3675f8d..0000000
--- a/Lib/distutils/emxccompiler.py
+++ /dev/null
@@ -1,315 +0,0 @@
-"""distutils.emxccompiler
-
-Provides the EMXCCompiler class, a subclass of UnixCCompiler that
-handles the EMX port of the GNU C compiler to OS/2.
-"""
-
-# issues:
-#
-# * OS/2 insists that DLLs can have names no longer than 8 characters
-# We put export_symbols in a def-file, as though the DLL can have
-# an arbitrary length name, but truncate the output filename.
-#
-# * only use OMF objects and use LINK386 as the linker (-Zomf)
-#
-# * always build for multithreading (-Zmt) as the accompanying OS/2 port
-# of Python is only distributed with threads enabled.
-#
-# tested configurations:
-#
-# * EMX gcc 2.81/EMX 0.9d fix03
-
-import os,sys,copy
-from distutils.ccompiler import gen_preprocess_options, gen_lib_options
-from distutils.unixccompiler import UnixCCompiler
-from distutils.file_util import write_file
-from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
-from distutils import log
-
-class EMXCCompiler (UnixCCompiler):
-
- compiler_type = 'emx'
- obj_extension = ".obj"
- static_lib_extension = ".lib"
- shared_lib_extension = ".dll"
- static_lib_format = "%s%s"
- shared_lib_format = "%s%s"
- res_extension = ".res" # compiled resource file
- exe_extension = ".exe"
-
- def __init__ (self,
- verbose=0,
- dry_run=0,
- force=0):
-
- UnixCCompiler.__init__ (self, verbose, dry_run, force)
-
- (status, details) = check_config_h()
- self.debug_print("Python's GCC status: %s (details: %s)" %
- (status, details))
- if status is not CONFIG_H_OK:
- self.warn(
- "Python's pyconfig.h doesn't seem to support your compiler. " +
- ("Reason: %s." % details) +
- "Compiling may fail because of undefined preprocessor macros.")
-
- (self.gcc_version, self.ld_version) = \
- get_versions()
- self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
- (self.gcc_version,
- self.ld_version) )
-
- # Hard-code GCC because that's what this is all about.
- # XXX optimization, warnings etc. should be customizable.
- self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
- compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
- linker_exe='gcc -Zomf -Zmt -Zcrtdll',
- linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
-
- # want the gcc library statically linked (so that we don't have
- # to distribute a version dependent on the compiler we have)
- self.dll_libraries=["gcc"]
-
- # __init__ ()
-
- def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
- if ext == '.rc':
- # gcc requires '.rc' compiled to binary ('.res') files !!!
- try:
- self.spawn(["rc", "-r", src])
- except DistutilsExecError as msg:
- raise CompileError(msg)
- else: # for other files use the C-compiler
- try:
- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
- extra_postargs)
- except DistutilsExecError as msg:
- raise CompileError(msg)
-
- def link (self,
- target_desc,
- objects,
- output_filename,
- output_dir=None,
- libraries=None,
- library_dirs=None,
- runtime_library_dirs=None,
- export_symbols=None,
- debug=0,
- extra_preargs=None,
- extra_postargs=None,
- build_temp=None,
- target_lang=None):
-
- # use separate copies, so we can modify the lists
- extra_preargs = copy.copy(extra_preargs or [])
- libraries = copy.copy(libraries or [])
- objects = copy.copy(objects or [])
-
- # Additional libraries
- libraries.extend(self.dll_libraries)
-
- # handle export symbols by creating a def-file
- # with executables this only works with gcc/ld as linker
- if ((export_symbols is not None) and
- (target_desc != self.EXECUTABLE)):
- # (The linker doesn't do anything if output is up-to-date.
- # So it would probably better to check if we really need this,
- # but for this we had to insert some unchanged parts of
- # UnixCCompiler, and this is not what we want.)
-
- # we want to put some files in the same directory as the
- # object files are, build_temp doesn't help much
- # where are the object files
- temp_dir = os.path.dirname(objects[0])
- # name of dll to give the helper files the same base name
- (dll_name, dll_extension) = os.path.splitext(
- os.path.basename(output_filename))
-
- # generate the filenames for these files
- def_file = os.path.join(temp_dir, dll_name + ".def")
-
- # Generate .def file
- contents = [
- "LIBRARY %s INITINSTANCE TERMINSTANCE" % \
- os.path.splitext(os.path.basename(output_filename))[0],
- "DATA MULTIPLE NONSHARED",
- "EXPORTS"]
- for sym in export_symbols:
- contents.append(' "%s"' % sym)
- self.execute(write_file, (def_file, contents),
- "writing %s" % def_file)
-
- # next add options for def-file and to creating import libraries
- # for gcc/ld the def-file is specified as any other object files
- objects.append(def_file)
-
- #end: if ((export_symbols is not None) and
- # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
-
- # who wants symbols and a many times larger output file
- # should explicitly switch the debug mode on
- # otherwise we let dllwrap/ld strip the output file
- # (On my machine: 10KB < stripped_file < ??100KB
- # unstripped_file = stripped_file + XXX KB
- # ( XXX=254 for a typical python extension))
- if not debug:
- extra_preargs.append("-s")
-
- UnixCCompiler.link(self,
- target_desc,
- objects,
- output_filename,
- output_dir,
- libraries,
- library_dirs,
- runtime_library_dirs,
- None, # export_symbols, we do this in our def-file
- debug,
- extra_preargs,
- extra_postargs,
- build_temp,
- target_lang)
-
- # link ()
-
- # -- Miscellaneous methods -----------------------------------------
-
- # override the object_filenames method from CCompiler to
- # support rc and res-files
- def object_filenames (self,
- source_filenames,
- strip_dir=0,
- output_dir=''):
- if output_dir is None: output_dir = ''
- obj_names = []
- for src_name in source_filenames:
- # use normcase to make sure '.rc' is really '.rc' and not '.RC'
- (base, ext) = os.path.splitext (os.path.normcase(src_name))
- if ext not in (self.src_extensions + ['.rc']):
- raise UnknownFileError("unknown file type '%s' (from '%s')" % \
- (ext, src_name))
- if strip_dir:
- base = os.path.basename (base)
- if ext == '.rc':
- # these need to be compiled to object files
- obj_names.append (os.path.join (output_dir,
- base + self.res_extension))
- else:
- obj_names.append (os.path.join (output_dir,
- base + self.obj_extension))
- return obj_names
-
- # object_filenames ()
-
- # override the find_library_file method from UnixCCompiler
- # to deal with file naming/searching differences
- def find_library_file(self, dirs, lib, debug=0):
- shortlib = '%s.lib' % lib
- longlib = 'lib%s.lib' % lib # this form very rare
-
- # get EMX's default library directory search path
- try:
- emx_dirs = os.environ['LIBRARY_PATH'].split(';')
- except KeyError:
- emx_dirs = []
-
- for dir in dirs + emx_dirs:
- shortlibp = os.path.join(dir, shortlib)
- longlibp = os.path.join(dir, longlib)
- if os.path.exists(shortlibp):
- return shortlibp
- elif os.path.exists(longlibp):
- return longlibp
-
- # Oops, didn't find it in *any* of 'dirs'
- return None
-
-# class EMXCCompiler
-
-
-# Because these compilers aren't configured in Python's pyconfig.h file by
-# default, we should at least warn the user if he is using a unmodified
-# version.
-
-CONFIG_H_OK = "ok"
-CONFIG_H_NOTOK = "not ok"
-CONFIG_H_UNCERTAIN = "uncertain"
-
-def check_config_h():
-
- """Check if the current Python installation (specifically, pyconfig.h)
- appears amenable to building extensions with GCC. Returns a tuple
- (status, details), where 'status' is one of the following constants:
- CONFIG_H_OK
- all is well, go ahead and compile
- CONFIG_H_NOTOK
- doesn't look good
- CONFIG_H_UNCERTAIN
- not sure -- unable to read pyconfig.h
- 'details' is a human-readable string explaining the situation.
-
- Note there are two ways to conclude "OK": either 'sys.version' contains
- the string "GCC" (implying that this Python was built with GCC), or the
- installed "pyconfig.h" contains the string "__GNUC__".
- """
-
- # XXX since this function also checks sys.version, it's not strictly a
- # "pyconfig.h" check -- should probably be renamed...
-
- from distutils import sysconfig
- # if sys.version contains GCC then python was compiled with
- # GCC, and the pyconfig.h file should be OK
- if sys.version.find("GCC") >= 0:
- return (CONFIG_H_OK, "sys.version mentions 'GCC'")
-
- fn = sysconfig.get_config_h_filename()
- try:
- # It would probably better to read single lines to search.
- # But we do this only once, and it is fast enough
- f = open(fn)
- try:
- s = f.read()
- finally:
- f.close()
-
- except IOError as exc:
- # if we can't read this file, we cannot say it is wrong
- # the compiler will complain later about this file as missing
- return (CONFIG_H_UNCERTAIN,
- "couldn't read '%s': %s" % (fn, exc.strerror))
-
- else:
- # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
- if s.find("__GNUC__") >= 0:
- return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
- else:
- return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
-
-
-def get_versions():
- """ Try to find out the versions of gcc and ld.
- If not possible it returns None for it.
- """
- from distutils.version import StrictVersion
- from distutils.spawn import find_executable
- import re
-
- gcc_exe = find_executable('gcc')
- if gcc_exe:
- out = os.popen(gcc_exe + ' -dumpversion','r')
- try:
- out_string = out.read()
- finally:
- out.close()
- result = re.search('(\d+\.\d+\.\d+)', out_string, re.ASCII)
- if result:
- gcc_version = StrictVersion(result.group(1))
- else:
- gcc_version = None
- else:
- gcc_version = None
- # EMX ld has no way of reporting version number, and we use GCC
- # anyway - so we can link OMF DLLs
- ld_version = None
- return (gcc_version, ld_version)
diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py
index f58c55f..b1c5a44 100644
--- a/Lib/distutils/spawn.py
+++ b/Lib/distutils/spawn.py
@@ -32,8 +32,6 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0):
_spawn_posix(cmd, search_path, dry_run=dry_run)
elif os.name == 'nt':
_spawn_nt(cmd, search_path, dry_run=dry_run)
- elif os.name == 'os2':
- _spawn_os2(cmd, search_path, dry_run=dry_run)
else:
raise DistutilsPlatformError(
"don't know how to spawn programs on platform '%s'" % os.name)
@@ -74,26 +72,6 @@ def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0):
raise DistutilsExecError(
"command '%s' failed with exit status %d" % (cmd[0], rc))
-def _spawn_os2(cmd, search_path=1, verbose=0, dry_run=0):
- executable = cmd[0]
- if search_path:
- # either we find one or it stays the same
- executable = find_executable(executable) or executable
- log.info(' '.join([executable] + cmd[1:]))
- if not dry_run:
- # spawnv for OS/2 EMX requires a full path to the .exe
- try:
- rc = os.spawnv(os.P_WAIT, executable, cmd)
- except OSError as exc:
- # this seems to happen when the command isn't found
- raise DistutilsExecError(
- "command '%s' failed: %s" % (cmd[0], exc.args[-1]))
- if rc != 0:
- # and this reflects the command running but failing
- log.debug("command '%s' failed with exit status %d" % (cmd[0], rc))
- raise DistutilsExecError(
- "command '%s' failed with exit status %d" % (cmd[0], rc))
-
if sys.platform == 'darwin':
from distutils import sysconfig
_cfg_target = None
@@ -180,7 +158,7 @@ def find_executable(executable, path=None):
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
- if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+ if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index 317640c..3b6549f 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -110,8 +110,6 @@ def get_python_inc(plat_specific=0, prefix=None):
return os.path.join(prefix, "include", python_dir)
elif os.name == "nt":
return os.path.join(prefix, "include")
- elif os.name == "os2":
- return os.path.join(prefix, "Include")
else:
raise DistutilsPlatformError(
"I don't know where Python installs its C header files "
@@ -153,11 +151,6 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
return prefix
else:
return os.path.join(prefix, "Lib", "site-packages")
- elif os.name == "os2":
- if standard_lib:
- return os.path.join(prefix, "Lib")
- else:
- return os.path.join(prefix, "Lib", "site-packages")
else:
raise DistutilsPlatformError(
"I don't know where Python installs its library "
@@ -492,23 +485,6 @@ def _init_nt():
_config_vars = g
-def _init_os2():
- """Initialize the module as appropriate for OS/2"""
- g = {}
- # set basic install directories
- g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
- g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
-
- # XXX hmmm.. a normal install puts include files here
- g['INCLUDEPY'] = get_python_inc(plat_specific=0)
-
- g['SO'] = '.pyd'
- g['EXE'] = ".exe"
-
- global _config_vars
- _config_vars = g
-
-
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform. Generally this includes
diff --git a/Lib/distutils/tests/test_bdist_dumb.py b/Lib/distutils/tests/test_bdist_dumb.py
index 1037d82..761051c 100644
--- a/Lib/distutils/tests/test_bdist_dumb.py
+++ b/Lib/distutils/tests/test_bdist_dumb.py
@@ -75,8 +75,6 @@ class BuildDumbTestCase(support.TempdirManager,
# see what we have
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name)
- if os.name == 'os2':
- base = base.replace(':', '-')
self.assertEqual(dist_created, [base])
diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
index cb2e1f2..47d630c 100644
--- a/Lib/distutils/tests/test_install.py
+++ b/Lib/distutils/tests/test_install.py
@@ -94,7 +94,7 @@ class InstallTestCase(support.TempdirManager,
self.addCleanup(cleanup)
- for key in ('nt_user', 'unix_user', 'os2_home'):
+ for key in ('nt_user', 'unix_user'):
self.assertIn(key, INSTALL_SCHEMES)
dist = Distribution({'name': 'xx'})
diff --git a/Lib/distutils/tests/test_util.py b/Lib/distutils/tests/test_util.py
index eac9b51..b73cfe9 100644
--- a/Lib/distutils/tests/test_util.py
+++ b/Lib/distutils/tests/test_util.py
@@ -236,7 +236,7 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase):
self.assertRaises(DistutilsPlatformError,
change_root, 'c:\\root', 'its\\here')
- # XXX platforms to be covered: os2, mac
+ # XXX platforms to be covered: mac
def test_check_environ(self):
util._environ_checked = 0
diff --git a/Lib/distutils/util.py b/Lib/distutils/util.py
index 67d8166..ba09ac4 100644
--- a/Lib/distutils/util.py
+++ b/Lib/distutils/util.py
@@ -154,12 +154,6 @@ def change_root (new_root, pathname):
path = path[1:]
return os.path.join(new_root, path)
- elif os.name == 'os2':
- (drive, path) = os.path.splitdrive(pathname)
- if path[0] == os.sep:
- path = path[1:]
- return os.path.join(new_root, path)
-
else:
raise DistutilsPlatformError("nothing known about platform '%s'" % os.name)
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
index 1924ed1..b596462 100644
--- a/Lib/email/_header_value_parser.py
+++ b/Lib/email/_header_value_parser.py
@@ -367,8 +367,7 @@ class TokenList(list):
yield (indent + ' !! invalid element in token '
'list: {!r}'.format(token))
else:
- for line in token._pp(indent+' '):
- yield line
+ yield from token._pp(indent+' ')
if self.defects:
extra = ' Defects: {}'.format(self.defects)
else:
diff --git a/Lib/email/iterators.py b/Lib/email/iterators.py
index 3adc4a0..b5502ee 100644
--- a/Lib/email/iterators.py
+++ b/Lib/email/iterators.py
@@ -26,8 +26,7 @@ def walk(self):
yield self
if self.is_multipart():
for subpart in self.get_payload():
- for subsubpart in subpart.walk():
- yield subsubpart
+ yield from subpart.walk()
@@ -40,8 +39,7 @@ def body_line_iterator(msg, decode=False):
for subpart in msg.walk():
payload = subpart.get_payload(decode=decode)
if isinstance(payload, str):
- for line in StringIO(payload):
- yield line
+ yield from StringIO(payload)
def typed_subpart_iterator(msg, maintype='text', subtype=None):
diff --git a/Lib/glob.py b/Lib/glob.py
index 36d493d..3431a69 100644
--- a/Lib/glob.py
+++ b/Lib/glob.py
@@ -26,8 +26,7 @@ def iglob(pathname):
return
dirname, basename = os.path.split(pathname)
if not dirname:
- for name in glob1(None, basename):
- yield name
+ yield from glob1(None, basename)
return
if has_magic(dirname):
dirs = iglob(dirname)
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
index 21454c7..a1bd8b2 100644
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -54,7 +54,8 @@ More condensed:
# This tuple and __get_builtin_constructor() must be modified if a new
# always available algorithm is added.
-__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
+__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
+ 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512')
algorithms_guaranteed = set(__always_supported)
algorithms_available = set(__always_supported)
@@ -85,6 +86,18 @@ def __get_builtin_constructor(name):
return _sha512.sha512
elif bs == '384':
return _sha512.sha384
+ elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
+ 'SHA3_224', 'SHA3_256', 'SHA3_384', 'SHA3_512'}:
+ import _sha3
+ bs = name[5:]
+ if bs == '224':
+ return _sha3.sha3_224
+ elif bs == '256':
+ return _sha3.sha3_256
+ elif bs == '384':
+ return _sha3.sha3_384
+ elif bs == '512':
+ return _sha3.sha3_512
except ImportError:
pass # no extension module, this hash is unsupported.
diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py
index 901e762..a77dc3f 100644
--- a/Lib/http/cookiejar.py
+++ b/Lib/http/cookiejar.py
@@ -1193,8 +1193,7 @@ def deepvalues(mapping):
pass
else:
mapping = True
- for subobj in deepvalues(obj):
- yield subobj
+ yield from deepvalues(obj)
if not mapping:
yield obj
diff --git a/Lib/http/server.py b/Lib/http/server.py
index c4ac703..7167142 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -425,12 +425,14 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
content = (self.error_message_format %
{'code': code, 'message': _quote_html(message), 'explain': explain})
+ body = content.encode('UTF-8', 'replace')
self.send_response(code, message)
self.send_header("Content-Type", self.error_content_type)
self.send_header('Connection', 'close')
+ self.send_header('Content-Length', int(len(body)))
self.end_headers()
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
- self.wfile.write(content.encode('UTF-8', 'replace'))
+ self.wfile.write(body)
def send_response(self, code, message=None):
"""Add the response header to the headers buffer and log the
diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt
index f6e8917d..d193b0a 100644
--- a/Lib/idlelib/NEWS.txt
+++ b/Lib/idlelib/NEWS.txt
@@ -1,4 +1,4 @@
-What's New in IDLE 3.3.1?
+What's New in IDLE 3.4.0?
=========================
- Issue #16226: Fix IDLE Path Browser crash.
diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py
index 3b78f38..bff52a9 100644
--- a/Lib/idlelib/PyShell.py
+++ b/Lib/idlelib/PyShell.py
@@ -1014,7 +1014,10 @@ class PyShell(OutputWindow):
self.close()
return False
else:
- nosub = "==== No Subprocess ===="
+ nosub = ("==== No Subprocess ====\n\n" +
+ "WARNING: Running IDLE without a Subprocess is deprecated\n" +
+ "and will be removed in a later version. See Help/IDLE Help\n" +
+ "for details.\n\n")
sys.displayhook = rpc.displayhook
self.write("Python %s on %s\n%s\n%s" %
@@ -1314,7 +1317,8 @@ USAGE: idle [-deins] [-t title] [file]*
idle [-dns] [-t title] - [arg]*
-h print this help message and exit
- -n run IDLE without a subprocess (see Help/IDLE Help for details)
+ -n run IDLE without a subprocess (DEPRECATED,
+ see Help/IDLE Help for details)
The following options will override the IDLE 'settings' configuration:
@@ -1392,6 +1396,8 @@ def main():
if o == '-i':
enable_shell = True
if o == '-n':
+ print(" Warning: running IDLE without a subprocess is deprecated.",
+ file=sys.stderr)
use_subprocess = False
if o == '-r':
script = a
diff --git a/Lib/idlelib/help.txt b/Lib/idlelib/help.txt
index 815ee40..ef90a3a 100644
--- a/Lib/idlelib/help.txt
+++ b/Lib/idlelib/help.txt
@@ -274,7 +274,7 @@ Command line usage:
Enter idle -h at the command prompt to get a usage message.
-Running without a subprocess:
+Running without a subprocess: (DEPRECATED)
If IDLE is started with the -n command line switch it will run in a
single process and will not create the subprocess which runs the RPC
diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py
index 8d8317d..efe96a4 100644
--- a/Lib/idlelib/idlever.py
+++ b/Lib/idlelib/idlever.py
@@ -1 +1 @@
-IDLE_VERSION = "3.3.0"
+IDLE_VERSION = "3.4.0a0"
diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py
index 26d9250..36f1c8f 100644
--- a/Lib/importlib/_bootstrap.py
+++ b/Lib/importlib/_bootstrap.py
@@ -237,7 +237,7 @@ class _ModuleLock:
self.wakeup.release()
def __repr__(self):
- return "_ModuleLock(%r) at %d" % (self.name, id(self))
+ return "_ModuleLock({!r}) at {}".format(self.name, id(self))
class _DummyModuleLock:
@@ -258,7 +258,7 @@ class _DummyModuleLock:
self.count -= 1
def __repr__(self):
- return "_DummyModuleLock(%r) at %d" % (self.name, id(self))
+ return "_DummyModuleLock({!r}) at {}".format(self.name, id(self))
# The following two functions are for consumption by Python/import.c.
@@ -1437,7 +1437,7 @@ class FileFinder:
return path_hook_for_FileFinder
def __repr__(self):
- return "FileFinder(%r)" % (self.path,)
+ return "FileFinder({!r})".format(self.path)
# Import itself ###############################################################
@@ -1712,7 +1712,7 @@ def _setup(sys_module, _imp_module):
builtin_module = sys.modules[builtin_name]
setattr(self_module, builtin_name, builtin_module)
- os_details = ('posix', ['/']), ('nt', ['\\', '/']), ('os2', ['\\', '/'])
+ os_details = ('posix', ['/']), ('nt', ['\\', '/'])
for builtin_os, path_separators in os_details:
# Assumption made in _path_join()
assert all(len(sep) == 1 for sep in path_separators)
@@ -1723,9 +1723,6 @@ def _setup(sys_module, _imp_module):
else:
try:
os_module = BuiltinImporter.load_module(builtin_os)
- # TODO: rip out os2 code after 3.3 is released as per PEP 11
- if builtin_os == 'os2' and 'EMX GCC' in sys.version:
- path_sep = path_separators[1]
break
except ImportError:
continue
diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py
index 387567a..11e45f4 100644
--- a/Lib/importlib/abc.py
+++ b/Lib/importlib/abc.py
@@ -225,180 +225,3 @@ class SourceLoader(_bootstrap.SourceLoader, ResourceLoader, ExecutionLoader):
raise NotImplementedError
_register(SourceLoader, machinery.SourceFileLoader)
-
-class PyLoader(SourceLoader):
-
- """Implement the deprecated PyLoader ABC in terms of SourceLoader.
-
- This class has been deprecated! It is slated for removal in Python 3.4.
- If compatibility with Python 3.1 is not needed then implement the
- SourceLoader ABC instead of this class. If Python 3.1 compatibility is
- needed, then use the following idiom to have a single class that is
- compatible with Python 3.1 onwards::
-
- try:
- from importlib.abc import SourceLoader
- except ImportError:
- from importlib.abc import PyLoader as SourceLoader
-
-
- class CustomLoader(SourceLoader):
- def get_filename(self, fullname):
- # Implement ...
-
- def source_path(self, fullname):
- '''Implement source_path in terms of get_filename.'''
- try:
- return self.get_filename(fullname)
- except ImportError:
- return None
-
- def is_package(self, fullname):
- filename = os.path.basename(self.get_filename(fullname))
- return os.path.splitext(filename)[0] == '__init__'
-
- """
-
- @abc.abstractmethod
- def is_package(self, fullname):
- raise NotImplementedError
-
- @abc.abstractmethod
- def source_path(self, fullname):
- """Abstract method. Accepts a str module name and returns the path to
- the source code for the module."""
- raise NotImplementedError
-
- def get_filename(self, fullname):
- """Implement get_filename in terms of source_path.
-
- As get_filename should only return a source file path there is no
- chance of the path not existing but loading still being possible, so
- ImportError should propagate instead of being turned into returning
- None.
-
- """
- warnings.warn("importlib.abc.PyLoader is deprecated and is "
- "slated for removal in Python 3.4; "
- "use SourceLoader instead. "
- "See the importlib documentation on how to be "
- "compatible with Python 3.1 onwards.",
- DeprecationWarning)
- path = self.source_path(fullname)
- if path is None:
- raise ImportError(name=fullname)
- else:
- return path
-
-
-class PyPycLoader(PyLoader):
-
- """Abstract base class to assist in loading source and bytecode by
- requiring only back-end storage methods to be implemented.
-
- This class has been deprecated! Removal is slated for Python 3.4. Implement
- the SourceLoader ABC instead. If Python 3.1 compatibility is needed, see
- PyLoader.
-
- The methods get_code, get_source, and load_module are implemented for the
- user.
-
- """
-
- def get_filename(self, fullname):
- """Return the source or bytecode file path."""
- path = self.source_path(fullname)
- if path is not None:
- return path
- path = self.bytecode_path(fullname)
- if path is not None:
- return path
- raise ImportError("no source or bytecode path available for "
- "{0!r}".format(fullname), name=fullname)
-
- def get_code(self, fullname):
- """Get a code object from source or bytecode."""
- warnings.warn("importlib.abc.PyPycLoader is deprecated and slated for "
- "removal in Python 3.4; use SourceLoader instead. "
- "If Python 3.1 compatibility is required, see the "
- "latest documentation for PyLoader.",
- DeprecationWarning)
- source_timestamp = self.source_mtime(fullname)
- # Try to use bytecode if it is available.
- bytecode_path = self.bytecode_path(fullname)
- if bytecode_path:
- data = self.get_data(bytecode_path)
- try:
- magic = data[:4]
- if len(magic) < 4:
- raise ImportError(
- "bad magic number in {}".format(fullname),
- name=fullname, path=bytecode_path)
- raw_timestamp = data[4:8]
- if len(raw_timestamp) < 4:
- raise EOFError("bad timestamp in {}".format(fullname))
- pyc_timestamp = _bootstrap._r_long(raw_timestamp)
- raw_source_size = data[8:12]
- if len(raw_source_size) != 4:
- raise EOFError("bad file size in {}".format(fullname))
- # Source size is unused as the ABC does not provide a way to
- # get the size of the source ahead of reading it.
- bytecode = data[12:]
- # Verify that the magic number is valid.
- if imp.get_magic() != magic:
- raise ImportError(
- "bad magic number in {}".format(fullname),
- name=fullname, path=bytecode_path)
- # Verify that the bytecode is not stale (only matters when
- # there is source to fall back on.
- if source_timestamp:
- if pyc_timestamp < source_timestamp:
- raise ImportError("bytecode is stale", name=fullname,
- path=bytecode_path)
- except (ImportError, EOFError):
- # If source is available give it a shot.
- if source_timestamp is not None:
- pass
- else:
- raise
- else:
- # Bytecode seems fine, so try to use it.
- return marshal.loads(bytecode)
- elif source_timestamp is None:
- raise ImportError("no source or bytecode available to create code "
- "object for {0!r}".format(fullname),
- name=fullname)
- # Use the source.
- source_path = self.source_path(fullname)
- if source_path is None:
- message = "a source path must exist to load {0}".format(fullname)
- raise ImportError(message, name=fullname)
- source = self.get_data(source_path)
- code_object = compile(source, source_path, 'exec', dont_inherit=True)
- # Generate bytecode and write it out.
- if not sys.dont_write_bytecode:
- data = bytearray(imp.get_magic())
- data.extend(_bootstrap._w_long(source_timestamp))
- data.extend(_bootstrap._w_long(len(source) & 0xFFFFFFFF))
- data.extend(marshal.dumps(code_object))
- self.write_bytecode(fullname, data)
- return code_object
-
- @abc.abstractmethod
- def source_mtime(self, fullname):
- """Abstract method. Accepts a str filename and returns an int
- modification time for the source of the module."""
- raise NotImplementedError
-
- @abc.abstractmethod
- def bytecode_path(self, fullname):
- """Abstract method. Accepts a str filename and returns the str pathname
- to the bytecode for the module."""
- raise NotImplementedError
-
- @abc.abstractmethod
- def write_bytecode(self, fullname, bytecode):
- """Abstract method. Accepts a str filename and bytes object
- representing the bytecode for the module. Returns a boolean
- representing whether the bytecode was written or not."""
- raise NotImplementedError
diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py
index 75b7f49..ba57c2c 100644
--- a/Lib/json/encoder.py
+++ b/Lib/json/encoder.py
@@ -305,8 +305,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
+ yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
@@ -381,8 +380,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
+ yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
@@ -404,11 +402,9 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
- for chunk in _iterencode_list(o, _current_indent_level):
- yield chunk
+ yield from _iterencode_list(o, _current_indent_level)
elif isinstance(o, dict):
- for chunk in _iterencode_dict(o, _current_indent_level):
- yield chunk
+ yield from _iterencode_dict(o, _current_indent_level)
else:
if markers is not None:
markerid = id(o)
@@ -416,8 +412,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
- for chunk in _iterencode(o, _current_indent_level):
- yield chunk
+ yield from _iterencode(o, _current_indent_level)
if markers is not None:
del markers[markerid]
return _iterencode
diff --git a/Lib/lib2to3/btm_utils.py b/Lib/lib2to3/btm_utils.py
index 2276dc9..339750e 100644
--- a/Lib/lib2to3/btm_utils.py
+++ b/Lib/lib2to3/btm_utils.py
@@ -96,8 +96,7 @@ class MinNode(object):
def leaves(self):
"Generator that returns the leaves of the tree"
for child in self.children:
- for x in child.leaves():
- yield x
+ yield from child.leaves()
if not self.children:
yield self
@@ -277,7 +276,6 @@ def rec_test(sequence, test_func):
sub-iterables"""
for x in sequence:
if isinstance(x, (list, tuple)):
- for y in rec_test(x, test_func):
- yield y
+ yield from rec_test(x, test_func)
else:
yield test_func(x)
diff --git a/Lib/lib2to3/pytree.py b/Lib/lib2to3/pytree.py
index 17cbf0a..c4a1be3 100644
--- a/Lib/lib2to3/pytree.py
+++ b/Lib/lib2to3/pytree.py
@@ -194,8 +194,7 @@ class Base(object):
def leaves(self):
for child in self.children:
- for x in child.leaves():
- yield x
+ yield from child.leaves()
def depth(self):
if self.parent is None:
@@ -274,16 +273,14 @@ class Node(Base):
def post_order(self):
"""Return a post-order iterator for the tree."""
for child in self.children:
- for node in child.post_order():
- yield node
+ yield from child.post_order()
yield self
def pre_order(self):
"""Return a pre-order iterator for the tree."""
yield self
for child in self.children:
- for node in child.pre_order():
- yield node
+ yield from child.pre_order()
def _prefix_getter(self):
"""
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
index e79018f..9242023 100644
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -67,7 +67,7 @@ else: #pragma: no cover
"""Return the frame object for the caller's stack frame."""
try:
raise Exception
- except:
+ except Exception:
return sys.exc_info()[2].tb_frame.f_back
# _srcfile is only used in conjunction with sys._getframe().
@@ -879,16 +879,27 @@ class Handler(Filterer):
The record which was being processed is passed in to this method.
"""
if raiseExceptions and sys.stderr: # see issue 13807
- ei = sys.exc_info()
+ t, v, tb = sys.exc_info()
try:
- traceback.print_exception(ei[0], ei[1], ei[2],
- None, sys.stderr)
- sys.stderr.write('Logged from file %s, line %s\n' % (
- record.filename, record.lineno))
+ sys.stderr.write('--- Logging error ---\n')
+ traceback.print_exception(t, v, tb, None, sys.stderr)
+ sys.stderr.write('Call stack:\n')
+ # Walk the stack frame up until we're out of logging,
+ # so as to print the calling context.
+ frame = tb.tb_frame
+ while (frame and os.path.dirname(frame.f_code.co_filename) ==
+ __path__[0]):
+ frame = frame.f_back
+ if frame:
+ traceback.print_stack(frame, file=sys.stderr)
+ else:
+ # couldn't find the right stack frame, for some reason
+ sys.stderr.write('Logged from file %s, line %s\n' % (
+ record.filename, record.lineno))
except IOError: #pragma: no cover
pass # see issue 5971
finally:
- del ei
+ del t, v, tb
class StreamHandler(Handler):
"""
@@ -938,9 +949,7 @@ class StreamHandler(Handler):
stream.write(msg)
stream.write(self.terminator)
self.flush()
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class FileHandler(StreamHandler):
@@ -1837,7 +1846,7 @@ def shutdown(handlerList=_handlerList):
pass
finally:
h.release()
- except:
+ except: # ignore everything, as we're shutting down
if raiseExceptions:
raise
#else, swallow
diff --git a/Lib/logging/config.py b/Lib/logging/config.py
index 5ef5c91..0694d21 100644
--- a/Lib/logging/config.py
+++ b/Lib/logging/config.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2012 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
@@ -19,7 +19,7 @@ Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
-Copyright (C) 2001-2010 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2012 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
@@ -61,11 +61,14 @@ def fileConfig(fname, defaults=None, disable_existing_loggers=True):
"""
import configparser
- cp = configparser.ConfigParser(defaults)
- if hasattr(fname, 'readline'):
- cp.read_file(fname)
+ if isinstance(fname, configparser.RawConfigParser):
+ cp = fname
else:
- cp.read(fname)
+ cp = configparser.ConfigParser(defaults)
+ if hasattr(fname, 'readline'):
+ cp.read_file(fname)
+ else:
+ cp.read(fname)
formatters = _create_formatters(cp)
@@ -773,7 +776,7 @@ def dictConfig(config):
dictConfigClass(config).configure()
-def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
+def listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None):
"""
Start up a socket server on the specified port, and listen for new
configurations.
@@ -782,6 +785,15 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
+
+ Use the ``verify`` argument to verify any bytes received across the wire
+ from a client. If specified, it should be a callable which receives a
+ single argument - the bytes of configuration data received across the
+ network - and it should return either ``None``, to indicate that the
+ passed in bytes could not be verified and should be discarded, or a
+ byte string which is then passed to the configuration machinery as
+ normal. Note that you can return transformed bytes, e.g. by decrypting
+ the bytes passed in.
"""
if not thread: #pragma: no cover
raise NotImplementedError("listen() needs threading to work")
@@ -809,22 +821,23 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
- chunk = chunk.decode("utf-8")
- try:
- import json
- d =json.loads(chunk)
- assert isinstance(d, dict)
- dictConfig(d)
- except:
- #Apply new configuration.
-
- file = io.StringIO(chunk)
+ if self.server.verify is not None:
+ chunk = self.server.verify(chunk)
+ if chunk is not None: # verified, can process
+ chunk = chunk.decode("utf-8")
try:
- fileConfig(file)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
- traceback.print_exc()
+ import json
+ d =json.loads(chunk)
+ assert isinstance(d, dict)
+ dictConfig(d)
+ except Exception:
+ #Apply new configuration.
+
+ file = io.StringIO(chunk)
+ try:
+ fileConfig(file)
+ except Exception:
+ traceback.print_exc()
if self.server.ready:
self.server.ready.set()
except socket.error as e:
@@ -843,13 +856,14 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
- handler=None, ready=None):
+ handler=None, ready=None, verify=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
self.ready = ready
+ self.verify = verify
def serve_until_stopped(self):
import select
@@ -867,16 +881,18 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
class Server(threading.Thread):
- def __init__(self, rcvr, hdlr, port):
+ def __init__(self, rcvr, hdlr, port, verify):
super(Server, self).__init__()
self.rcvr = rcvr
self.hdlr = hdlr
self.port = port
+ self.verify = verify
self.ready = threading.Event()
def run(self):
server = self.rcvr(port=self.port, handler=self.hdlr,
- ready=self.ready)
+ ready=self.ready,
+ verify=self.verify)
if self.port == 0:
self.port = server.server_address[1]
self.ready.set()
@@ -886,7 +902,7 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
logging._releaseLock()
server.serve_until_stopped()
- return Server(ConfigSocketReceiver, ConfigStreamHandler, port)
+ return Server(ConfigSocketReceiver, ConfigStreamHandler, port, verify)
def stopListening():
"""
diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
index f286cd6..4cbc320 100644
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -72,9 +72,7 @@ class BaseRotatingHandler(logging.FileHandler):
if self.shouldRollover(record):
self.doRollover()
logging.FileHandler.emit(self, record)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def rotation_filename(self, default_name):
@@ -496,15 +494,7 @@ class SocketHandler(logging.Handler):
A factory method which allows subclasses to define the precise
type of socket they want.
"""
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- if hasattr(s, 'settimeout'):
- s.settimeout(timeout)
- try:
- s.connect((self.host, self.port))
- return s
- except socket.error:
- s.close()
- raise
+ return socket.create_connection((self.host, self.port), timeout=timeout)
def createSocket(self):
"""
@@ -548,15 +538,7 @@ class SocketHandler(logging.Handler):
#but are still unable to connect.
if self.sock:
try:
- if hasattr(self.sock, "sendall"):
- self.sock.sendall(s)
- else: #pragma: no cover
- sentsofar = 0
- left = len(s)
- while left > 0:
- sent = self.sock.send(s[sentsofar:])
- sentsofar = sentsofar + sent
- left = left - sent
+ self.sock.sendall(s)
except socket.error: #pragma: no cover
self.sock.close()
self.sock = None # so we can call createSocket next time
@@ -607,9 +589,7 @@ class SocketHandler(logging.Handler):
try:
s = self.makePickle(record)
self.send(s)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def close(self):
@@ -869,9 +849,7 @@ class SysLogHandler(logging.Handler):
self.socket.sendto(msg, self.address)
else:
self.socket.sendall(msg)
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class SMTPHandler(logging.Handler):
@@ -949,9 +927,7 @@ class SMTPHandler(logging.Handler):
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class NTEventLogHandler(logging.Handler):
@@ -1036,9 +1012,7 @@ class NTEventLogHandler(logging.Handler):
type = self.getEventType(record)
msg = self.format(record)
self._welu.ReportEvent(self.appname, id, cat, type, [msg])
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
def close(self):
@@ -1123,9 +1097,7 @@ class HTTPHandler(logging.Handler):
if self.method == "POST":
h.send(data.encode('utf-8'))
h.getresponse() #can't do anything with the result
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
class BufferingHandler(logging.Handler):
@@ -1305,9 +1277,7 @@ class QueueHandler(logging.Handler):
"""
try:
self.enqueue(self.prepare(record))
- except (KeyboardInterrupt, SystemExit): #pragma: no cover
- raise
- except:
+ except Exception:
self.handleError(record)
if threading:
diff --git a/Lib/lzma.py b/Lib/lzma.py
index 1a1b065..b2e2f7e 100644
--- a/Lib/lzma.py
+++ b/Lib/lzma.py
@@ -55,7 +55,7 @@ class LZMAFile(io.BufferedIOBase):
be an existing file object to read from or write to.
mode can be "r" for reading (default), "w" for (over)writing, or
- "a" for appending. These can equivalently be given as "rb", "wb",
+ "a" for appending. These can equivalently be given as "rb", "wb"
and "ab" respectively.
format specifies the container format to use for the file.
@@ -110,7 +110,8 @@ class LZMAFile(io.BufferedIOBase):
# stream will need a separate decompressor object.
self._init_args = {"format":format, "filters":filters}
self._decompressor = LZMADecompressor(**self._init_args)
- self._buffer = None
+ self._buffer = b""
+ self._buffer_offset = 0
elif mode in ("w", "wb", "a", "ab"):
if format is None:
format = FORMAT_XZ
@@ -143,7 +144,7 @@ class LZMAFile(io.BufferedIOBase):
try:
if self._mode in (_MODE_READ, _MODE_READ_EOF):
self._decompressor = None
- self._buffer = None
+ self._buffer = b""
elif self._mode == _MODE_WRITE:
self._fp.write(self._compressor.flush())
self._compressor = None
@@ -187,15 +188,18 @@ class LZMAFile(io.BufferedIOBase):
raise ValueError("I/O operation on closed file")
def _check_can_read(self):
- if not self.readable():
+ if self._mode not in (_MODE_READ, _MODE_READ_EOF):
+ self._check_not_closed()
raise io.UnsupportedOperation("File not open for reading")
def _check_can_write(self):
- if not self.writable():
+ if self._mode != _MODE_WRITE:
+ self._check_not_closed()
raise io.UnsupportedOperation("File not open for writing")
def _check_can_seek(self):
- if not self.readable():
+ if self._mode not in (_MODE_READ, _MODE_READ_EOF):
+ self._check_not_closed()
raise io.UnsupportedOperation("Seeking is only supported "
"on files open for reading")
if not self._fp.seekable():
@@ -204,16 +208,13 @@ class LZMAFile(io.BufferedIOBase):
# Fill the readahead buffer if it is empty. Returns False on EOF.
def _fill_buffer(self):
+ if self._mode == _MODE_READ_EOF:
+ return False
# Depending on the input data, our call to the decompressor may not
# return any data. In this case, try again after reading another block.
- while True:
- if self._buffer:
- return True
-
- if self._decompressor.unused_data:
- rawblock = self._decompressor.unused_data
- else:
- rawblock = self._fp.read(_BUFFER_SIZE)
+ while self._buffer_offset == len(self._buffer):
+ rawblock = (self._decompressor.unused_data or
+ self._fp.read(_BUFFER_SIZE))
if not rawblock:
if self._decompressor.eof:
@@ -229,30 +230,48 @@ class LZMAFile(io.BufferedIOBase):
self._decompressor = LZMADecompressor(**self._init_args)
self._buffer = self._decompressor.decompress(rawblock)
+ self._buffer_offset = 0
+ return True
# Read data until EOF.
# If return_data is false, consume the data without returning it.
def _read_all(self, return_data=True):
+ # The loop assumes that _buffer_offset is 0. Ensure that this is true.
+ self._buffer = self._buffer[self._buffer_offset:]
+ self._buffer_offset = 0
+
blocks = []
while self._fill_buffer():
if return_data:
blocks.append(self._buffer)
self._pos += len(self._buffer)
- self._buffer = None
+ self._buffer = b""
if return_data:
return b"".join(blocks)
# Read a block of up to n bytes.
# If return_data is false, consume the data without returning it.
def _read_block(self, n, return_data=True):
+ # If we have enough data buffered, return immediately.
+ end = self._buffer_offset + n
+ if end <= len(self._buffer):
+ data = self._buffer[self._buffer_offset : end]
+ self._buffer_offset = end
+ self._pos += len(data)
+ return data if return_data else None
+
+ # The loop assumes that _buffer_offset is 0. Ensure that this is true.
+ self._buffer = self._buffer[self._buffer_offset:]
+ self._buffer_offset = 0
+
blocks = []
while n > 0 and self._fill_buffer():
if n < len(self._buffer):
data = self._buffer[:n]
- self._buffer = self._buffer[n:]
+ self._buffer_offset = n
else:
data = self._buffer
- self._buffer = None
+ self._buffer = b""
if return_data:
blocks.append(data)
self._pos += len(data)
@@ -267,9 +286,9 @@ class LZMAFile(io.BufferedIOBase):
The exact number of bytes returned is unspecified.
"""
self._check_can_read()
- if self._mode == _MODE_READ_EOF or not self._fill_buffer():
+ if not self._fill_buffer():
return b""
- return self._buffer
+ return self._buffer[self._buffer_offset:]
def read(self, size=-1):
"""Read up to size uncompressed bytes from the file.
@@ -278,7 +297,7 @@ class LZMAFile(io.BufferedIOBase):
Returns b"" if the file is already at EOF.
"""
self._check_can_read()
- if self._mode == _MODE_READ_EOF or size == 0:
+ if size == 0:
return b""
elif size < 0:
return self._read_all()
@@ -295,18 +314,40 @@ class LZMAFile(io.BufferedIOBase):
# this does not give enough data for the decompressor to make progress.
# In this case we make multiple reads, to avoid returning b"".
self._check_can_read()
- if (size == 0 or self._mode == _MODE_READ_EOF or
- not self._fill_buffer()):
+ if (size == 0 or
+ # Only call _fill_buffer() if the buffer is actually empty.
+ # This gives a significant speedup if *size* is small.
+ (self._buffer_offset == len(self._buffer) and not self._fill_buffer())):
return b""
- if 0 < size < len(self._buffer):
- data = self._buffer[:size]
- self._buffer = self._buffer[size:]
+ if size > 0:
+ data = self._buffer[self._buffer_offset :
+ self._buffer_offset + size]
+ self._buffer_offset += len(data)
else:
- data = self._buffer
- self._buffer = None
+ data = self._buffer[self._buffer_offset:]
+ self._buffer = b""
+ self._buffer_offset = 0
self._pos += len(data)
return data
+ def readline(self, size=-1):
+ """Read a line of uncompressed bytes from the file.
+
+ The terminating newline (if present) is retained. If size is
+ non-negative, no more than size bytes will be read (in which
+ case the line may be incomplete). Returns b'' if already at EOF.
+ """
+ self._check_can_read()
+ # Shortcut for the common case - the whole line is in the buffer.
+ if size < 0:
+ end = self._buffer.find(b"\n", self._buffer_offset) + 1
+ if end > 0:
+ line = self._buffer[self._buffer_offset : end]
+ self._buffer_offset = end
+ self._pos += len(line)
+ return line
+ return io.BufferedIOBase.readline(self, size)
+
def write(self, data):
"""Write a bytes object to the file.
@@ -326,7 +367,8 @@ class LZMAFile(io.BufferedIOBase):
self._mode = _MODE_READ
self._pos = 0
self._decompressor = LZMADecompressor(**self._init_args)
- self._buffer = None
+ self._buffer = b""
+ self._buffer_offset = 0
def seek(self, offset, whence=0):
"""Change the file position.
@@ -365,8 +407,7 @@ class LZMAFile(io.BufferedIOBase):
offset -= self._pos
# Read and discard data until we reach the desired position.
- if self._mode != _MODE_READ_EOF:
- self._read_block(offset, return_data=False)
+ self._read_block(offset, return_data=False)
return self._pos
@@ -381,23 +422,24 @@ def open(filename, mode="rb", *,
encoding=None, errors=None, newline=None):
"""Open an LZMA-compressed file in binary or text mode.
- filename can be either an actual file name (given as a str or bytes object),
- in which case the named file is opened, or it can be an existing file object
- to read from or write to.
+ filename can be either an actual file name (given as a str or bytes
+ object), in which case the named file is opened, or it can be an
+ existing file object to read from or write to.
- The mode argument can be "r", "rb" (default), "w", "wb", "a", or "ab" for
- binary mode, or "rt", "wt" or "at" for text mode.
+ The mode argument can be "r", "rb" (default), "w", "wb", "a" or "ab"
+ for binary mode, or "rt", "wt" or "at" for text mode.
- The format, check, preset and filters arguments specify the compression
- settings, as for LZMACompressor, LZMADecompressor and LZMAFile.
+ The format, check, preset and filters arguments specify the
+ compression settings, as for LZMACompressor, LZMADecompressor and
+ LZMAFile.
- For binary mode, this function is equivalent to the LZMAFile constructor:
- LZMAFile(filename, mode, ...). In this case, the encoding, errors and
- newline arguments must not be provided.
+ For binary mode, this function is equivalent to the LZMAFile
+ constructor: LZMAFile(filename, mode, ...). In this case, the
+ encoding, errors and newline arguments must not be provided.
For text mode, a LZMAFile object is created, and wrapped in an
- io.TextIOWrapper instance with the specified encoding, error handling
- behavior, and line ending(s).
+ io.TextIOWrapper instance with the specified encoding, error
+ handling behavior, and line ending(s).
"""
if "t" in mode:
@@ -427,7 +469,7 @@ def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None):
Refer to LZMACompressor's docstring for a description of the
optional arguments *format*, *check*, *preset* and *filters*.
- For incremental compression, use an LZMACompressor object instead.
+ For incremental compression, use an LZMACompressor instead.
"""
comp = LZMACompressor(format, check, preset, filters)
return comp.compress(data) + comp.flush()
@@ -439,7 +481,7 @@ def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None):
Refer to LZMADecompressor's docstring for a description of the
optional arguments *format*, *check* and *filters*.
- For incremental decompression, use a LZMADecompressor object instead.
+ For incremental decompression, use an LZMADecompressor instead.
"""
results = []
while True:
diff --git a/Lib/mailbox.py b/Lib/mailbox.py
index d3bf3fd..01f3551 100644
--- a/Lib/mailbox.py
+++ b/Lib/mailbox.py
@@ -22,9 +22,6 @@ import email.generator
import io
import contextlib
try:
- if sys.platform == 'os2emx':
- # OS/2 EMX fcntl() not adequate
- raise ImportError
import fcntl
except ImportError:
fcntl = None
@@ -631,8 +628,7 @@ class _singlefileMailbox(Mailbox):
def iterkeys(self):
"""Return an iterator over keys."""
self._lookup()
- for key in self._toc.keys():
- yield key
+ yield from self._toc.keys()
def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise."""
@@ -711,8 +707,7 @@ class _singlefileMailbox(Mailbox):
try:
os.rename(new_file.name, self._path)
except OSError as e:
- if e.errno == errno.EEXIST or \
- (os.name == 'os2' and e.errno == errno.EACCES):
+ if e.errno == errno.EEXIST:
os.remove(self._path)
os.rename(new_file.name, self._path)
else:
@@ -2097,8 +2092,7 @@ def _lock_file(f, dotlock=True):
os.rename(pre_lock.name, f.name + '.lock')
dotlock_done = True
except OSError as e:
- if e.errno == errno.EEXIST or \
- (os.name == 'os2' and e.errno == errno.EACCES):
+ if e.errno == errno.EEXIST:
os.remove(pre_lock.name)
raise ExternalClashError('dot lock unavailable: %s' %
f.name)
diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py
index 3f0bd0e..2872ee4 100644
--- a/Lib/mimetypes.py
+++ b/Lib/mimetypes.py
@@ -378,12 +378,14 @@ def _default_mime_types():
'.taz': '.tar.gz',
'.tz': '.tar.gz',
'.tbz2': '.tar.bz2',
+ '.txz': '.tar.xz',
}
encodings_map = {
'.gz': 'gzip',
'.Z': 'compress',
'.bz2': 'bzip2',
+ '.xz': 'xz',
}
# Before adding new types, make sure they are either registered with IANA,
diff --git a/Lib/multiprocessing/__init__.py b/Lib/multiprocessing/__init__.py
index 1f3e67c..efad532 100644
--- a/Lib/multiprocessing/__init__.py
+++ b/Lib/multiprocessing/__init__.py
@@ -40,6 +40,13 @@ from multiprocessing.process import Process, current_process, active_children
from multiprocessing.util import SUBDEBUG, SUBWARNING
#
+# Alias for main module -- will be reset by bootstrapping child processes
+#
+
+if '__main__' in sys.modules:
+ sys.modules['__mp_main__'] = sys.modules['__main__']
+
+#
# Exceptions
#
diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py
index af6580d..fe4ee33 100644
--- a/Lib/multiprocessing/forking.py
+++ b/Lib/multiprocessing/forking.py
@@ -441,27 +441,17 @@ def prepare(data):
dirs = [os.path.dirname(main_path)]
assert main_name not in sys.modules, main_name
+ sys.modules.pop('__mp_main__', None)
file, path_name, etc = imp.find_module(main_name, dirs)
try:
- # We would like to do "imp.load_module('__main__', ...)"
- # here. However, that would cause 'if __name__ ==
- # "__main__"' clauses to be executed.
+ # We should not do 'imp.load_module("__main__", ...)'
+ # since that would execute 'if __name__ == "__main__"'
+ # clauses, potentially causing a psuedo fork bomb.
main_module = imp.load_module(
- '__parents_main__', file, path_name, etc
+ '__mp_main__', file, path_name, etc
)
finally:
if file:
file.close()
- sys.modules['__main__'] = main_module
- main_module.__name__ = '__main__'
-
- # Try to make the potentially picklable objects in
- # sys.modules['__main__'] realize they are in the main
- # module -- somewhat ugly.
- for obj in list(main_module.__dict__.values()):
- try:
- if obj.__module__ == '__parents_main__':
- obj.__module__ = '__main__'
- except Exception:
- pass
+ sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module
diff --git a/Lib/multiprocessing/queues.py b/Lib/multiprocessing/queues.py
index 37271fb..f6f02b6 100644
--- a/Lib/multiprocessing/queues.py
+++ b/Lib/multiprocessing/queues.py
@@ -243,10 +243,14 @@ class Queue(object):
if wacquire is None:
send(obj)
+ # Delete references to object. See issue16284
+ del obj
else:
wacquire()
try:
send(obj)
+ # Delete references to object. See issue16284
+ del obj
finally:
wrelease()
except IndexError:
diff --git a/Lib/ntpath.py b/Lib/ntpath.py
index 826be87..f70193f 100644
--- a/Lib/ntpath.py
+++ b/Lib/ntpath.py
@@ -30,9 +30,6 @@ altsep = '/'
defpath = '.;C:\\bin'
if 'ce' in sys.builtin_module_names:
defpath = '\\Windows'
-elif 'os2' in sys.builtin_module_names:
- # OS/2 w/ VACPP
- altsep = '/'
devnull = 'nul'
def _get_empty(path):
@@ -320,8 +317,7 @@ def dirname(p):
def islink(path):
"""Test whether a path is a symbolic link.
- This will always return false for Windows prior to 6.0
- and for OS/2.
+ This will always return false for Windows prior to 6.0.
"""
try:
st = os.lstat(path)
diff --git a/Lib/os.py b/Lib/os.py
index 84eeaeb..8241f36 100644
--- a/Lib/os.py
+++ b/Lib/os.py
@@ -1,9 +1,9 @@
r"""OS routines for Mac, NT, or Posix depending on what system we're on.
This exports:
- - all functions from posix, nt, os2, or ce, e.g. unlink, stat, etc.
+ - all functions from posix, nt or ce, e.g. unlink, stat, etc.
- os.path is either posixpath or ntpath
- - os.name is either 'posix', 'nt', 'os2' or 'ce'.
+ - os.name is either 'posix', 'nt' or 'ce'.
- os.curdir is a string representing the current directory ('.' or ':')
- os.pardir is a string representing the parent directory ('..' or '::')
- os.sep is the (or a most common) pathname separator ('/' or ':' or '\\')
@@ -81,30 +81,6 @@ elif 'nt' in _names:
except ImportError:
pass
-elif 'os2' in _names:
- name = 'os2'
- linesep = '\r\n'
- from os2 import *
- try:
- from os2 import _exit
- __all__.append('_exit')
- except ImportError:
- pass
- if sys.version.find('EMX GCC') == -1:
- import ntpath as path
- else:
- import os2emxpath as path
- from _emx_link import link
-
- import os2
- __all__.extend(_get_exports_list(os2))
- del os2
-
- try:
- from os2 import _have_functions
- except ImportError:
- pass
-
elif 'ce' in _names:
name = 'ce'
linesep = '\r\n'
@@ -715,7 +691,7 @@ else:
__all__.append("unsetenv")
def _createenviron():
- if name in ('os2', 'nt'):
+ if name == 'nt':
# Where Env Var Names Must Be UPPERCASE
def check_str(value):
if not isinstance(value, str):
@@ -755,7 +731,7 @@ def getenv(key, default=None):
key, default and the result are str."""
return environ.get(key, default)
-supports_bytes_environ = name not in ('os2', 'nt')
+supports_bytes_environ = (name != 'nt')
__all__.extend(("getenv", "supports_bytes_environ"))
if supports_bytes_environ:
diff --git a/Lib/os2emxpath.py b/Lib/os2emxpath.py
deleted file mode 100644
index 0ccbf8a..0000000
--- a/Lib/os2emxpath.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Module 'os2emxpath' -- common operations on OS/2 pathnames
-"""Common pathname manipulations, OS/2 EMX version.
-
-Instead of importing this module directly, import os and refer to this
-module as os.path.
-"""
-
-import os
-import stat
-from genericpath import *
-from ntpath import (expanduser, expandvars, isabs, islink, splitdrive,
- splitext, split)
-
-__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
- "basename","dirname","commonprefix","getsize","getmtime",
- "getatime","getctime", "islink","exists","lexists","isdir","isfile",
- "ismount","expanduser","expandvars","normpath","abspath",
- "splitunc","curdir","pardir","sep","pathsep","defpath","altsep",
- "extsep","devnull","realpath","supports_unicode_filenames"]
-
-# strings representing various path-related bits and pieces
-curdir = '.'
-pardir = '..'
-extsep = '.'
-sep = '/'
-altsep = '\\'
-pathsep = ';'
-defpath = '.;C:\\bin'
-devnull = 'nul'
-
-# Normalize the case of a pathname and map slashes to backslashes.
-# Other normalizations (such as optimizing '../' away) are not done
-# (this is done by normpath).
-
-def normcase(s):
- """Normalize case of pathname.
-
- Makes all characters lowercase and all altseps into seps."""
- if not isinstance(s, (bytes, str)):
- raise TypeError("normcase() argument must be str or bytes, "
- "not '{}'".format(s.__class__.__name__))
- return s.replace('\\', '/').lower()
-
-
-# Join two (or more) paths.
-
-def join(a, *p):
- """Join two or more pathname components, inserting sep as needed"""
- path = a
- for b in p:
- if isabs(b):
- path = b
- elif path == '' or path[-1:] in '/\\:':
- path = path + b
- else:
- path = path + '/' + b
- return path
-
-
-# Parse UNC paths
-def splitunc(p):
- """Split a pathname into UNC mount point and relative path specifiers.
-
- Return a 2-tuple (unc, rest); either part may be empty.
- If unc is not empty, it has the form '//host/mount' (or similar
- using backslashes). unc+rest is always the input path.
- Paths containing drive letters never have an UNC part.
- """
- if p[1:2] == ':':
- return '', p # Drive letter present
- firstTwo = p[0:2]
- if firstTwo == '/' * 2 or firstTwo == '\\' * 2:
- # is a UNC path:
- # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter
- # \\machine\mountpoint\directories...
- # directory ^^^^^^^^^^^^^^^
- normp = normcase(p)
- index = normp.find('/', 2)
- if index == -1:
- ##raise RuntimeError, 'illegal UNC path: "' + p + '"'
- return ("", p)
- index = normp.find('/', index + 1)
- if index == -1:
- index = len(p)
- return p[:index], p[index:]
- return '', p
-
-
-# Return the tail (basename) part of a path.
-
-def basename(p):
- """Returns the final component of a pathname"""
- return split(p)[1]
-
-
-# Return the head (dirname) part of a path.
-
-def dirname(p):
- """Returns the directory component of a pathname"""
- return split(p)[0]
-
-
-# alias exists to lexists
-lexists = exists
-
-
-# Is a path a directory?
-
-# Is a path a mount point? Either a root (with or without drive letter)
-# or an UNC path with at most a / or \ after the mount point.
-
-def ismount(path):
- """Test whether a path is a mount point (defined as root of drive)"""
- unc, rest = splitunc(path)
- if unc:
- return rest in ("", "/", "\\")
- p = splitdrive(path)[1]
- return len(p) == 1 and p[0] in '/\\'
-
-
-# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
-
-def normpath(path):
- """Normalize path, eliminating double slashes, etc."""
- path = path.replace('\\', '/')
- prefix, path = splitdrive(path)
- while path[:1] == '/':
- prefix = prefix + '/'
- path = path[1:]
- comps = path.split('/')
- i = 0
- while i < len(comps):
- if comps[i] == '.':
- del comps[i]
- elif comps[i] == '..' and i > 0 and comps[i-1] not in ('', '..'):
- del comps[i-1:i+1]
- i = i - 1
- elif comps[i] == '' and i > 0 and comps[i-1] != '':
- del comps[i]
- else:
- i = i + 1
- # If the path is now empty, substitute '.'
- if not prefix and not comps:
- comps.append('.')
- return prefix + '/'.join(comps)
-
-
-# Return an absolute path.
-def abspath(path):
- """Return the absolute version of a path"""
- if not isabs(path):
- path = join(os.getcwd(), path)
- return normpath(path)
-
-# realpath is a no-op on systems without islink support
-realpath = abspath
-
-supports_unicode_filenames = False
diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py
index 8bdeb32..c695cf1 100644
--- a/Lib/pkgutil.py
+++ b/Lib/pkgutil.py
@@ -121,8 +121,7 @@ def walk_packages(path=None, prefix='', onerror=None):
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
- for item in walk_packages(path, name+'.', onerror):
- yield item
+ yield from walk_packages(path, name+'.', onerror)
def iter_modules(path=None, prefix=''):
@@ -456,8 +455,7 @@ def iter_importers(fullname=""):
if path is None:
return
else:
- for importer in sys.meta_path:
- yield importer
+ yield from sys.meta_path
path = sys.path
for item in path:
yield get_importer(item)
diff --git a/Lib/plat-os2emx/IN.py b/Lib/plat-os2emx/IN.py
deleted file mode 100644
index 753ae24..0000000
--- a/Lib/plat-os2emx/IN.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Generated by h2py from f:/emx/include/netinet/in.h
-
-# Included from sys/param.h
-PAGE_SIZE = 0x1000
-HZ = 100
-MAXNAMLEN = 260
-MAXPATHLEN = 260
-def htonl(X): return _swapl(X)
-
-def ntohl(X): return _swapl(X)
-
-def htons(X): return _swaps(X)
-
-def ntohs(X): return _swaps(X)
-
-IPPROTO_IP = 0
-IPPROTO_ICMP = 1
-IPPROTO_IGMP = 2
-IPPROTO_GGP = 3
-IPPROTO_TCP = 6
-IPPROTO_EGP = 8
-IPPROTO_PUP = 12
-IPPROTO_UDP = 17
-IPPROTO_IDP = 22
-IPPROTO_TP = 29
-IPPROTO_EON = 80
-IPPROTO_RAW = 255
-IPPROTO_MAX = 256
-IPPORT_RESERVED = 1024
-IPPORT_USERRESERVED = 5000
-def IN_CLASSA(i): return (((int)(i) & 0x80000000) == 0)
-
-IN_CLASSA_NET = 0xff000000
-IN_CLASSA_NSHIFT = 24
-IN_CLASSA_HOST = 0x00ffffff
-IN_CLASSA_MAX = 128
-def IN_CLASSB(i): return (((int)(i) & 0xc0000000) == 0x80000000)
-
-IN_CLASSB_NET = 0xffff0000
-IN_CLASSB_NSHIFT = 16
-IN_CLASSB_HOST = 0x0000ffff
-IN_CLASSB_MAX = 65536
-def IN_CLASSC(i): return (((int)(i) & 0xe0000000) == 0xc0000000)
-
-IN_CLASSC_NET = 0xffffff00
-IN_CLASSC_NSHIFT = 8
-IN_CLASSC_HOST = 0x000000ff
-def IN_CLASSD(i): return (((int)(i) & 0xf0000000) == 0xe0000000)
-
-IN_CLASSD_NET = 0xf0000000
-IN_CLASSD_NSHIFT = 28
-IN_CLASSD_HOST = 0x0fffffff
-def IN_MULTICAST(i): return IN_CLASSD(i)
-
-def IN_EXPERIMENTAL(i): return (((int)(i) & 0xe0000000) == 0xe0000000)
-
-def IN_BADCLASS(i): return (((int)(i) & 0xf0000000) == 0xf0000000)
-
-INADDR_ANY = 0x00000000
-INADDR_LOOPBACK = 0x7f000001
-INADDR_BROADCAST = 0xffffffff
-INADDR_NONE = 0xffffffff
-INADDR_UNSPEC_GROUP = 0xe0000000
-INADDR_ALLHOSTS_GROUP = 0xe0000001
-INADDR_MAX_LOCAL_GROUP = 0xe00000ff
-IN_LOOPBACKNET = 127
-IP_OPTIONS = 1
-IP_MULTICAST_IF = 2
-IP_MULTICAST_TTL = 3
-IP_MULTICAST_LOOP = 4
-IP_ADD_MEMBERSHIP = 5
-IP_DROP_MEMBERSHIP = 6
-IP_HDRINCL = 2
-IP_TOS = 3
-IP_TTL = 4
-IP_RECVOPTS = 5
-IP_RECVRETOPTS = 6
-IP_RECVDSTADDR = 7
-IP_RETOPTS = 8
-IP_DEFAULT_MULTICAST_TTL = 1
-IP_DEFAULT_MULTICAST_LOOP = 1
-IP_MAX_MEMBERSHIPS = 20
diff --git a/Lib/plat-os2emx/SOCKET.py b/Lib/plat-os2emx/SOCKET.py
deleted file mode 100644
index dac594a..0000000
--- a/Lib/plat-os2emx/SOCKET.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Generated by h2py from f:/emx/include/sys/socket.h
-
-# Included from sys/types.h
-FD_SETSIZE = 256
-
-# Included from sys/uio.h
-FREAD = 1
-FWRITE = 2
-SOCK_STREAM = 1
-SOCK_DGRAM = 2
-SOCK_RAW = 3
-SOCK_RDM = 4
-SOCK_SEQPACKET = 5
-SO_DEBUG = 0x0001
-SO_ACCEPTCONN = 0x0002
-SO_REUSEADDR = 0x0004
-SO_KEEPALIVE = 0x0008
-SO_DONTROUTE = 0x0010
-SO_BROADCAST = 0x0020
-SO_USELOOPBACK = 0x0040
-SO_LINGER = 0x0080
-SO_OOBINLINE = 0x0100
-SO_L_BROADCAST = 0x0200
-SO_RCV_SHUTDOWN = 0x0400
-SO_SND_SHUTDOWN = 0x0800
-SO_SNDBUF = 0x1001
-SO_RCVBUF = 0x1002
-SO_SNDLOWAT = 0x1003
-SO_RCVLOWAT = 0x1004
-SO_SNDTIMEO = 0x1005
-SO_RCVTIMEO = 0x1006
-SO_ERROR = 0x1007
-SO_TYPE = 0x1008
-SO_OPTIONS = 0x1010
-SOL_SOCKET = 0xffff
-AF_UNSPEC = 0
-AF_UNIX = 1
-AF_INET = 2
-AF_IMPLINK = 3
-AF_PUP = 4
-AF_CHAOS = 5
-AF_NS = 6
-AF_NBS = 7
-AF_ISO = 7
-AF_OSI = AF_ISO
-AF_ECMA = 8
-AF_DATAKIT = 9
-AF_CCITT = 10
-AF_SNA = 11
-AF_DECnet = 12
-AF_DLI = 13
-AF_LAT = 14
-AF_HYLINK = 15
-AF_APPLETALK = 16
-AF_NB = 17
-AF_NETBIOS = AF_NB
-AF_OS2 = AF_UNIX
-AF_MAX = 18
-PF_UNSPEC = AF_UNSPEC
-PF_UNIX = AF_UNIX
-PF_INET = AF_INET
-PF_IMPLINK = AF_IMPLINK
-PF_PUP = AF_PUP
-PF_CHAOS = AF_CHAOS
-PF_NS = AF_NS
-PF_NBS = AF_NBS
-PF_ISO = AF_ISO
-PF_OSI = AF_ISO
-PF_ECMA = AF_ECMA
-PF_DATAKIT = AF_DATAKIT
-PF_CCITT = AF_CCITT
-PF_SNA = AF_SNA
-PF_DECnet = AF_DECnet
-PF_DLI = AF_DLI
-PF_LAT = AF_LAT
-PF_HYLINK = AF_HYLINK
-PF_APPLETALK = AF_APPLETALK
-PF_NB = AF_NB
-PF_NETBIOS = AF_NB
-PF_OS2 = AF_UNIX
-PF_MAX = AF_MAX
-SOMAXCONN = 5
-MSG_OOB = 0x1
-MSG_PEEK = 0x2
-MSG_DONTROUTE = 0x4
-MSG_EOR = 0x8
-MSG_TRUNC = 0x10
-MSG_CTRUNC = 0x20
-MSG_WAITALL = 0x40
-MSG_MAXIOVLEN = 16
-SCM_RIGHTS = 0x01
-MT_FREE = 0
-MT_DATA = 1
-MT_HEADER = 2
-MT_SOCKET = 3
-MT_PCB = 4
-MT_RTABLE = 5
-MT_HTABLE = 6
-MT_ATABLE = 7
-MT_SONAME = 8
-MT_ZOMBIE = 9
-MT_SOOPTS = 10
-MT_FTABLE = 11
-MT_RIGHTS = 12
-MT_IFADDR = 13
-MAXSOCKETS = 2048
diff --git a/Lib/plat-os2emx/_emx_link.py b/Lib/plat-os2emx/_emx_link.py
deleted file mode 100644
index 01e6b54..0000000
--- a/Lib/plat-os2emx/_emx_link.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# _emx_link.py
-
-# Written by Andrew I MacIntyre, December 2002.
-
-"""_emx_link.py is a simplistic emulation of the Unix link(2) library routine
-for creating so-called hard links. It is intended to be imported into
-the os module in place of the unimplemented (on OS/2) Posix link()
-function (os.link()).
-
-We do this on OS/2 by implementing a file copy, with link(2) semantics:-
- - the target cannot already exist;
- - we hope that the actual file open (if successful) is actually
- atomic...
-
-Limitations of this approach/implementation include:-
- - no support for correct link counts (EMX stat(target).st_nlink
- is always 1);
- - thread safety undefined;
- - default file permissions (r+w) used, can't be over-ridden;
- - implemented in Python so comparatively slow, especially for large
- source files;
- - need sufficient free disk space to store the copy.
-
-Behaviour:-
- - any exception should propagate to the caller;
- - want target to be an exact copy of the source, so use binary mode;
- - returns None, same as os.link() which is implemented in posixmodule.c;
- - target removed in the event of a failure where possible;
- - given the motivation to write this emulation came from trying to
- support a Unix resource lock implementation, where minimal overhead
- during creation of the target is desirable and the files are small,
- we read a source block before attempting to create the target so that
- we're ready to immediately write some data into it.
-"""
-
-import os
-import errno
-
-__all__ = ['link']
-
-def link(source, target):
- """link(source, target) -> None
-
- Attempt to hard link the source file to the target file name.
- On OS/2, this creates a complete copy of the source file.
- """
-
- s = os.open(source, os.O_RDONLY | os.O_BINARY)
- if os.isatty(s):
- raise OSError(errno.EXDEV, 'Cross-device link')
- data = os.read(s, 1024)
-
- try:
- t = os.open(target, os.O_WRONLY | os.O_BINARY | os.O_CREAT | os.O_EXCL)
- except OSError:
- os.close(s)
- raise
-
- try:
- while data:
- os.write(t, data)
- data = os.read(s, 1024)
- except OSError:
- os.close(s)
- os.close(t)
- os.unlink(target)
- raise
-
- os.close(s)
- os.close(t)
-
-if __name__ == '__main__':
- import sys
- try:
- link(sys.argv[1], sys.argv[2])
- except IndexError:
- print('Usage: emx_link <source> <target>')
- except OSError:
- print('emx_link: %s' % str(sys.exc_info()[1]))
diff --git a/Lib/plat-os2emx/grp.py b/Lib/plat-os2emx/grp.py
deleted file mode 100644
index ee63ef8..0000000
--- a/Lib/plat-os2emx/grp.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# this module is an OS/2 oriented replacement for the grp standard
-# extension module.
-
-# written by Andrew MacIntyre, April 2001.
-# updated July 2003, adding field accessor support
-
-# note that this implementation checks whether ":" or ";" as used as
-# the field separator character.
-
-"""Replacement for grp standard extension module, intended for use on
-OS/2 and similar systems which don't normally have an /etc/group file.
-
-The standard Unix group database is an ASCII text file with 4 fields per
-record (line), separated by a colon:
- - group name (string)
- - group password (optional encrypted string)
- - group id (integer)
- - group members (comma delimited list of userids, with no spaces)
-
-Note that members are only included in the group file for groups that
-aren't their primary groups.
-(see the section 8.2 of the Python Library Reference)
-
-This implementation differs from the standard Unix implementation by
-allowing use of the platform's native path separator character - ';' on OS/2,
-DOS and MS-Windows - as the field separator in addition to the Unix
-standard ":".
-
-The module looks for the group database at the following locations
-(in order first to last):
- - ${ETC_GROUP} (or %ETC_GROUP%)
- - ${ETC}/group (or %ETC%/group)
- - ${PYTHONHOME}/Etc/group (or %PYTHONHOME%/Etc/group)
-
-Classes
--------
-
-None
-
-Functions
----------
-
-getgrgid(gid) - return the record for group-id gid as a 4-tuple
-
-getgrnam(name) - return the record for group 'name' as a 4-tuple
-
-getgrall() - return a list of 4-tuples, each tuple being one record
- (NOTE: the order is arbitrary)
-
-Attributes
-----------
-
-group_file - the path of the group database file
-
-"""
-
-import os
-
-# try and find the group file
-__group_path = []
-if 'ETC_GROUP' in os.environ:
- __group_path.append(os.environ['ETC_GROUP'])
-if 'ETC' in os.environ:
- __group_path.append('%s/group' % os.environ['ETC'])
-if 'PYTHONHOME' in os.environ:
- __group_path.append('%s/Etc/group' % os.environ['PYTHONHOME'])
-
-group_file = None
-for __i in __group_path:
- try:
- __f = open(__i, 'r')
- __f.close()
- group_file = __i
- break
- except:
- pass
-
-# decide what field separator we can try to use - Unix standard, with
-# the platform's path separator as an option. No special field conversion
-# handlers are required for the group file.
-__field_sep = [':']
-if os.pathsep:
- if os.pathsep != ':':
- __field_sep.append(os.pathsep)
-
-# helper routine to identify which separator character is in use
-def __get_field_sep(record):
- fs = None
- for c in __field_sep:
- # there should be 3 delimiter characters (for 4 fields)
- if record.count(c) == 3:
- fs = c
- break
- if fs:
- return fs
- else:
- raise KeyError('>> group database fields not delimited <<')
-
-# class to match the new record field name accessors.
-# the resulting object is intended to behave like a read-only tuple,
-# with each member also accessible by a field name.
-class Group:
- def __init__(self, name, passwd, gid, mem):
- self.__dict__['gr_name'] = name
- self.__dict__['gr_passwd'] = passwd
- self.__dict__['gr_gid'] = gid
- self.__dict__['gr_mem'] = mem
- self.__dict__['_record'] = (self.gr_name, self.gr_passwd,
- self.gr_gid, self.gr_mem)
-
- def __len__(self):
- return 4
-
- def __getitem__(self, key):
- return self._record[key]
-
- def __setattr__(self, name, value):
- raise AttributeError('attribute read-only: %s' % name)
-
- def __repr__(self):
- return str(self._record)
-
- def __cmp__(self, other):
- this = str(self._record)
- if this == other:
- return 0
- elif this < other:
- return -1
- else:
- return 1
-
-
-# read the whole file, parsing each entry into tuple form
-# with dictionaries to speed recall by GID or group name
-def __read_group_file():
- if group_file:
- group = open(group_file, 'r')
- else:
- raise KeyError('>> no group database <<')
- gidx = {}
- namx = {}
- sep = None
- while 1:
- entry = group.readline().strip()
- if len(entry) > 3:
- if sep is None:
- sep = __get_field_sep(entry)
- fields = entry.split(sep)
- fields[2] = int(fields[2])
- fields[3] = [f.strip() for f in fields[3].split(',')]
- record = Group(*fields)
- if fields[2] not in gidx:
- gidx[fields[2]] = record
- if fields[0] not in namx:
- namx[fields[0]] = record
- elif len(entry) > 0:
- pass # skip empty or malformed records
- else:
- break
- group.close()
- if len(gidx) == 0:
- raise KeyError
- return (gidx, namx)
-
-# return the group database entry by GID
-def getgrgid(gid):
- g, n = __read_group_file()
- return g[gid]
-
-# return the group database entry by group name
-def getgrnam(name):
- g, n = __read_group_file()
- return n[name]
-
-# return all the group database entries
-def getgrall():
- g, n = __read_group_file()
- return g.values()
-
-# test harness
-if __name__ == '__main__':
- getgrall()
diff --git a/Lib/plat-os2emx/pwd.py b/Lib/plat-os2emx/pwd.py
deleted file mode 100644
index 2cb077f..0000000
--- a/Lib/plat-os2emx/pwd.py
+++ /dev/null
@@ -1,208 +0,0 @@
-# this module is an OS/2 oriented replacement for the pwd standard
-# extension module.
-
-# written by Andrew MacIntyre, April 2001.
-# updated July 2003, adding field accessor support
-
-# note that this implementation checks whether ":" or ";" as used as
-# the field separator character. Path conversions are are applied when
-# the database uses ":" as the field separator character.
-
-"""Replacement for pwd standard extension module, intended for use on
-OS/2 and similar systems which don't normally have an /etc/passwd file.
-
-The standard Unix password database is an ASCII text file with 7 fields
-per record (line), separated by a colon:
- - user name (string)
- - password (encrypted string, or "*" or "")
- - user id (integer)
- - group id (integer)
- - description (usually user's name)
- - home directory (path to user's home directory)
- - shell (path to the user's login shell)
-
-(see the section 8.1 of the Python Library Reference)
-
-This implementation differs from the standard Unix implementation by
-allowing use of the platform's native path separator character - ';' on OS/2,
-DOS and MS-Windows - as the field separator in addition to the Unix
-standard ":". Additionally, when ":" is the separator path conversions
-are applied to deal with any munging of the drive letter reference.
-
-The module looks for the password database at the following locations
-(in order first to last):
- - ${ETC_PASSWD} (or %ETC_PASSWD%)
- - ${ETC}/passwd (or %ETC%/passwd)
- - ${PYTHONHOME}/Etc/passwd (or %PYTHONHOME%/Etc/passwd)
-
-Classes
--------
-
-None
-
-Functions
----------
-
-getpwuid(uid) - return the record for user-id uid as a 7-tuple
-
-getpwnam(name) - return the record for user 'name' as a 7-tuple
-
-getpwall() - return a list of 7-tuples, each tuple being one record
- (NOTE: the order is arbitrary)
-
-Attributes
-----------
-
-passwd_file - the path of the password database file
-
-"""
-
-import os
-
-# try and find the passwd file
-__passwd_path = []
-if 'ETC_PASSWD' in os.environ:
- __passwd_path.append(os.environ['ETC_PASSWD'])
-if 'ETC' in os.environ:
- __passwd_path.append('%s/passwd' % os.environ['ETC'])
-if 'PYTHONHOME' in os.environ:
- __passwd_path.append('%s/Etc/passwd' % os.environ['PYTHONHOME'])
-
-passwd_file = None
-for __i in __passwd_path:
- try:
- __f = open(__i, 'r')
- __f.close()
- passwd_file = __i
- break
- except:
- pass
-
-# path conversion handlers
-def __nullpathconv(path):
- return path.replace(os.altsep, os.sep)
-
-def __unixpathconv(path):
- # two known drive letter variations: "x;" and "$x"
- if path[0] == '$':
- conv = path[1] + ':' + path[2:]
- elif path[1] == ';':
- conv = path[0] + ':' + path[2:]
- else:
- conv = path
- return conv.replace(os.altsep, os.sep)
-
-# decide what field separator we can try to use - Unix standard, with
-# the platform's path separator as an option. No special field conversion
-# handler is required when using the platform's path separator as field
-# separator, but are required for the home directory and shell fields when
-# using the standard Unix (":") field separator.
-__field_sep = {':': __unixpathconv}
-if os.pathsep:
- if os.pathsep != ':':
- __field_sep[os.pathsep] = __nullpathconv
-
-# helper routine to identify which separator character is in use
-def __get_field_sep(record):
- fs = None
- for c in __field_sep.keys():
- # there should be 6 delimiter characters (for 7 fields)
- if record.count(c) == 6:
- fs = c
- break
- if fs:
- return fs
- else:
- raise KeyError('>> passwd database fields not delimited <<')
-
-# class to match the new record field name accessors.
-# the resulting object is intended to behave like a read-only tuple,
-# with each member also accessible by a field name.
-class Passwd:
- def __init__(self, name, passwd, uid, gid, gecos, dir, shell):
- self.__dict__['pw_name'] = name
- self.__dict__['pw_passwd'] = passwd
- self.__dict__['pw_uid'] = uid
- self.__dict__['pw_gid'] = gid
- self.__dict__['pw_gecos'] = gecos
- self.__dict__['pw_dir'] = dir
- self.__dict__['pw_shell'] = shell
- self.__dict__['_record'] = (self.pw_name, self.pw_passwd,
- self.pw_uid, self.pw_gid,
- self.pw_gecos, self.pw_dir,
- self.pw_shell)
-
- def __len__(self):
- return 7
-
- def __getitem__(self, key):
- return self._record[key]
-
- def __setattr__(self, name, value):
- raise AttributeError('attribute read-only: %s' % name)
-
- def __repr__(self):
- return str(self._record)
-
- def __cmp__(self, other):
- this = str(self._record)
- if this == other:
- return 0
- elif this < other:
- return -1
- else:
- return 1
-
-
-# read the whole file, parsing each entry into tuple form
-# with dictionaries to speed recall by UID or passwd name
-def __read_passwd_file():
- if passwd_file:
- passwd = open(passwd_file, 'r')
- else:
- raise KeyError('>> no password database <<')
- uidx = {}
- namx = {}
- sep = None
- while True:
- entry = passwd.readline().strip()
- if len(entry) > 6:
- if sep is None:
- sep = __get_field_sep(entry)
- fields = entry.split(sep)
- for i in (2, 3):
- fields[i] = int(fields[i])
- for i in (5, 6):
- fields[i] = __field_sep[sep](fields[i])
- record = Passwd(*fields)
- if fields[2] not in uidx:
- uidx[fields[2]] = record
- if fields[0] not in namx:
- namx[fields[0]] = record
- elif len(entry) > 0:
- pass # skip empty or malformed records
- else:
- break
- passwd.close()
- if len(uidx) == 0:
- raise KeyError
- return (uidx, namx)
-
-# return the passwd database entry by UID
-def getpwuid(uid):
- u, n = __read_passwd_file()
- return u[uid]
-
-# return the passwd database entry by passwd name
-def getpwnam(name):
- u, n = __read_passwd_file()
- return n[name]
-
-# return all the passwd database entries
-def getpwall():
- u, n = __read_passwd_file()
- return n.values()
-
-# test harness
-if __name__ == '__main__':
- getpwall()
diff --git a/Lib/plat-os2emx/regen b/Lib/plat-os2emx/regen
deleted file mode 100755
index 3ecd2a8..0000000
--- a/Lib/plat-os2emx/regen
+++ /dev/null
@@ -1,7 +0,0 @@
-#! /bin/sh
-export INCLUDE=$C_INCLUDE_PATH
-set -v
-python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/fcntl.h
-python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/sys/socket.h
-python.exe ../../Tools/scripts/h2py.py -i '(u_long)' $C_INCLUDE_PATH/netinet/in.h
-#python.exe ../../Tools/scripts/h2py.py $C_INCLUDE_PATH/termios.h
diff --git a/Lib/platform.py b/Lib/platform.py
index 2b8a24a..db08eab 100755
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -122,7 +122,7 @@ try:
except AttributeError:
# os.devnull was added in Python 2.4, so emulate it for earlier
# Python versions
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# Use the old CP/M NUL as device name
DEV_NULL = 'NUL'
else:
@@ -403,13 +403,13 @@ _ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) '
def _syscmd_ver(system='', release='', version='',
- supported_platforms=('win32','win16','dos','os2')):
+ supported_platforms=('win32','win16','dos')):
""" Tries to figure out the OS version used and returns
a tuple (system,release,version).
It uses the "ver" shell command for this which is known
- to exists on Windows, DOS and OS/2. XXX Others too ?
+ to exists on Windows, DOS. XXX Others too ?
In case this fails, the given parameters are used as
defaults.
@@ -901,7 +901,7 @@ def _syscmd_uname(option,default=''):
""" Interface to the system's uname command.
"""
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# XXX Others too ?
return default
try:
@@ -924,7 +924,7 @@ def _syscmd_file(target,default=''):
default in case the command should fail.
"""
- if sys.platform in ('dos','win32','win16','os2'):
+ if sys.platform in ('dos','win32','win16'):
# XXX Others too ?
return default
target = _follow_symlinks(target)
diff --git a/Lib/pty.py b/Lib/pty.py
index 3ccf619..3b79202 100644
--- a/Lib/pty.py
+++ b/Lib/pty.py
@@ -178,3 +178,4 @@ def spawn(argv, master_read=_read, stdin_read=_read):
tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
os.close(master_fd)
+ return os.waitpid(pid, 0)[1]
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index fa531e9..490eb21 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -1393,7 +1393,7 @@ def getpager():
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
- if sys.platform == 'win32' or sys.platform.startswith('os2'):
+ if sys.platform == 'win32':
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
diff --git a/Lib/random.py b/Lib/random.py
index 6388f29..2ad3809 100644
--- a/Lib/random.py
+++ b/Lib/random.py
@@ -252,10 +252,11 @@ class Random(_random.Random):
return seq[i]
def shuffle(self, x, random=None, int=int):
- """x, random=random.random -> shuffle list x in place; return None.
+ """Shuffle list x in place, and return None.
- Optional arg random is a 0-argument function returning a random
- float in [0.0, 1.0); by default, the standard random.random.
+ Optional argument random is a 0-argument function returning a
+ random float in [0.0, 1.0); if it is the default None, the
+ standard random.random will be used.
"""
randbelow = self._randbelow
diff --git a/Lib/shelve.py b/Lib/shelve.py
index cc1815e..cef580e 100644
--- a/Lib/shelve.py
+++ b/Lib/shelve.py
@@ -61,7 +61,7 @@ from io import BytesIO
import collections
-__all__ = ["Shelf","BsdDbShelf","DbfilenameShelf","open"]
+__all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"]
class _ClosedDict(collections.MutableMapping):
'Marker for a closed dict. Access attempts raise a ValueError.'
@@ -131,6 +131,12 @@ class Shelf(collections.MutableMapping):
except KeyError:
pass
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.close()
+
def close(self):
self.sync()
try:
@@ -147,6 +153,7 @@ class Shelf(collections.MutableMapping):
def __del__(self):
if not hasattr(self, 'writeback'):
# __init__ didn't succeed, so don't bother closing
+ # see http://bugs.python.org/issue1339007 for details
return
self.close()
diff --git a/Lib/shutil.py b/Lib/shutil.py
index 5dc311e..27239f6 100644
--- a/Lib/shutil.py
+++ b/Lib/shutil.py
@@ -42,6 +42,9 @@ __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
class Error(EnvironmentError):
pass
+class SameFileError(Error):
+ """Raised when source and destination are the same file."""
+
class SpecialFileError(EnvironmentError):
"""Raised when trying to do a kind of operation (e.g. copying) which is
not supported on a special file (e.g. a named pipe)"""
@@ -90,7 +93,7 @@ def copyfile(src, dst, *, follow_symlinks=True):
"""
if _samefile(src, dst):
- raise Error("`%s` and `%s` are the same file" % (src, dst))
+ raise SameFileError("{!r} and {!r} are the same file".format(src, dst))
for fn in [src, dst]:
try:
@@ -215,6 +218,9 @@ def copy(src, dst, *, follow_symlinks=True):
If follow_symlinks is false, symlinks won't be followed. This
resembles GNU's "cp -P src dst".
+ If source and destination are the same file, a SameFileError will be
+ raised.
+
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
diff --git a/Lib/site.py b/Lib/site.py
index 0aaf46b..468d83e 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -300,7 +300,7 @@ def getsitepackages(prefixes=None):
continue
seen.add(prefix)
- if sys.platform in ('os2emx', 'riscos'):
+ if sys.platform == 'riscos':
sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
elif os.sep == '/':
sitepackages.append(os.path.join(prefix, "lib",
@@ -329,23 +329,6 @@ def addsitepackages(known_paths, prefixes=None):
return known_paths
-def setBEGINLIBPATH():
- """The OS/2 EMX port has optional extension modules that do double duty
- as DLLs (and must use the .DLL file extension) for other extensions.
- The library search path needs to be amended so these will be found
- during module import. Use BEGINLIBPATH so that these are at the start
- of the library search path.
-
- """
- dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
- libpath = os.environ['BEGINLIBPATH'].split(';')
- if libpath[-1]:
- libpath.append(dllpath)
- else:
- libpath[-1] = dllpath
- os.environ['BEGINLIBPATH'] = ';'.join(libpath)
-
-
def setquit():
"""Define new builtins 'quit' and 'exit'.
@@ -595,8 +578,6 @@ def main():
ENABLE_USER_SITE = check_enableusersite()
known_paths = addusersitepackages(known_paths)
known_paths = addsitepackages(known_paths)
- if sys.platform == 'os2emx':
- setBEGINLIBPATH()
setquit()
setcopyright()
sethelper()
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index 71da1db..e06e82a 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -52,25 +52,6 @@ _INSTALL_SCHEMES = {
'scripts': '{base}/Scripts',
'data': '{base}',
},
- 'os2': {
- 'stdlib': '{installed_base}/Lib',
- 'platstdlib': '{base}/Lib',
- 'purelib': '{base}/Lib/site-packages',
- 'platlib': '{base}/Lib/site-packages',
- 'include': '{installed_base}/Include',
- 'platinclude': '{installed_base}/Include',
- 'scripts': '{base}/Scripts',
- 'data': '{base}',
- },
- 'os2_home': {
- 'stdlib': '{userbase}/lib/python{py_version_short}',
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
- 'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'include': '{userbase}/include/python{py_version_short}',
- 'scripts': '{userbase}/bin',
- 'data': '{userbase}',
- },
'nt_user': {
'stdlib': '{userbase}/Python{py_version_nodot}',
'platstdlib': '{userbase}/Python{py_version_nodot}',
@@ -210,7 +191,7 @@ def _getuserbase():
def joinuser(*args):
return os.path.expanduser(os.path.join(*args))
- # what about 'os2emx', 'riscos' ?
+ # what about 'riscos' ?
if os.name == "nt":
base = os.environ.get("APPDATA") or "~"
if env_base:
@@ -551,7 +532,7 @@ def get_config_vars(*args):
# sys.abiflags may not be defined on all platforms.
_CONFIG_VARS['abiflags'] = ''
- if os.name in ('nt', 'os2'):
+ if os.name == 'nt':
_init_non_posix(_CONFIG_VARS)
if os.name == 'posix':
_init_posix(_CONFIG_VARS)
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index 7b9f407..a88224d 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -2213,8 +2213,7 @@ class TarFile(object):
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
else:
- if sys.platform != "os2emx":
- os.chown(targetpath, u, g)
+ os.chown(targetpath, u, g)
except EnvironmentError as e:
raise ExtractError("could not change owner")
diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py
index 8f3b689..c342d43 100755
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -1621,20 +1621,6 @@ _expectations = (
test_ossaudiodev
test_socketserver
"""),
- ('os2emx',
- """
- test_audioop
- test_curses
- test_epoll
- test_kqueue
- test_largefile
- test_mmap
- test_openpty
- test_ossaudiodev
- test_pty
- test_resource
- test_signal
- """),
('freebsd',
"""
test_devpoll
diff --git a/Lib/test/support.py b/Lib/test/support.py
index c5640e0..ec4b47d 100644
--- a/Lib/test/support.py
+++ b/Lib/test/support.py
@@ -603,6 +603,49 @@ else:
# module name.
TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid())
+# FS_NONASCII: non-ASCII character encodable by os.fsencode(),
+# or None if there is no such character.
+FS_NONASCII = None
+for character in (
+ # First try printable and common characters to have a readable filename.
+ # For each character, the encoding list are just example of encodings able
+ # to encode the character (the list is not exhaustive).
+
+ # U+00E6 (Latin Small Letter Ae): cp1252, iso-8859-1
+ '\u00E6',
+ # U+0130 (Latin Capital Letter I With Dot Above): cp1254, iso8859_3
+ '\u0130',
+ # U+0141 (Latin Capital Letter L With Stroke): cp1250, cp1257
+ '\u0141',
+ # U+03C6 (Greek Small Letter Phi): cp1253
+ '\u03C6',
+ # U+041A (Cyrillic Capital Letter Ka): cp1251
+ '\u041A',
+ # U+05D0 (Hebrew Letter Alef): Encodable to cp424
+ '\u05D0',
+ # U+060C (Arabic Comma): cp864, cp1006, iso8859_6, mac_arabic
+ '\u060C',
+ # U+062A (Arabic Letter Teh): cp720
+ '\u062A',
+ # U+0E01 (Thai Character Ko Kai): cp874
+ '\u0E01',
+
+ # Then try more "special" characters. "special" because they may be
+ # interpreted or displayed differently depending on the exact locale
+ # encoding and the font.
+
+ # U+00A0 (No-Break Space)
+ '\u00A0',
+ # U+20AC (Euro Sign)
+ '\u20AC',
+):
+ try:
+ os.fsdecode(os.fsencode(character))
+ except UnicodeError:
+ pass
+ else:
+ FS_NONASCII = character
+ break
# TESTFN_UNICODE is a non-ascii filename
TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f"
@@ -647,6 +690,26 @@ elif sys.platform != 'darwin':
# the byte 0xff. Skip some unicode filename tests.
pass
+# TESTFN_UNDECODABLE is a filename (bytes type) that should *not* be able to be
+# decoded from the filesystem encoding (in strict mode). It can be None if we
+# cannot generate such filename.
+TESTFN_UNDECODABLE = None
+# b'\xff' is not decodable by os.fsdecode() with code page 932. Windows
+# accepts it to create a file or a directory, or don't accept to enter to
+# such directory (when the bytes name is used). So test b'\xe7' first: it is
+# not decodable from cp932.
+for name in (b'\xe7w\xf0', b'abc\xff'):
+ try:
+ os.fsdecode(name)
+ except UnicodeDecodeError:
+ TESTFN_UNDECODABLE = name
+ break
+
+if FS_NONASCII:
+ TESTFN_NONASCII = TESTFN + '-' + FS_NONASCII
+else:
+ TESTFN_NONASCII = None
+
# Save the initial cwd
SAVEDCWD = os.getcwd()
diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py
index fe6e939..8ae90e4 100644
--- a/Lib/test/test_bytes.py
+++ b/Lib/test/test_bytes.py
@@ -288,8 +288,22 @@ class BaseBytesTest(unittest.TestCase):
self.assertEqual(self.type2test(b"").join(lst), b"abc")
self.assertEqual(self.type2test(b"").join(tuple(lst)), b"abc")
self.assertEqual(self.type2test(b"").join(iter(lst)), b"abc")
- self.assertEqual(self.type2test(b".").join([b"ab", b"cd"]), b"ab.cd")
- # XXX more...
+ dot_join = self.type2test(b".:").join
+ self.assertEqual(dot_join([b"ab", b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([memoryview(b"ab"), b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([b"ab", memoryview(b"cd")]), b"ab.:cd")
+ self.assertEqual(dot_join([bytearray(b"ab"), b"cd"]), b"ab.:cd")
+ self.assertEqual(dot_join([b"ab", bytearray(b"cd")]), b"ab.:cd")
+ # Stress it with many items
+ seq = [b"abc"] * 1000
+ expected = b"abc" + b".:abc" * 999
+ self.assertEqual(dot_join(seq), expected)
+ # Error handling and cleanup when some item in the middle of the
+ # sequence has the wrong type.
+ with self.assertRaises(TypeError):
+ dot_join([bytearray(b"ab"), "cd", b"ef"])
+ with self.assertRaises(TypeError):
+ dot_join([memoryview(b"ab"), "cd", b"ef"])
def test_count(self):
b = self.type2test(b'mississippi')
@@ -1267,6 +1281,11 @@ class BytearrayPEP3137Test(unittest.TestCase,
self.assertEqual(val, newval)
self.assertTrue(val is not newval,
expr+' returned val on a mutable object')
+ sep = self.marshal(b'')
+ newval = sep.join([val])
+ self.assertEqual(val, newval)
+ self.assertIsNot(val, newval)
+
class FixedStringTest(test.string_tests.BaseTest):
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index f4e81db..24a1813 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
from test import support
-from test.support import TESTFN, bigmemtest, _4G
+from test.support import bigmemtest, _4G
import unittest
from io import BytesIO
@@ -18,10 +18,10 @@ except ImportError:
bz2 = support.import_module('bz2')
from bz2 import BZ2File, BZ2Compressor, BZ2Decompressor
-has_cmdline_bunzip2 = sys.platform not in ("win32", "os2emx")
class BaseTest(unittest.TestCase):
"Base for other testcases."
+
TEXT_LINES = [
b'root:x:0:0:root:/root:/bin/bash\n',
b'bin:x:1:1:bin:/bin:\n',
@@ -49,13 +49,17 @@ class BaseTest(unittest.TestCase):
DATA = b'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`'
def setUp(self):
- self.filename = TESTFN
+ self.filename = support.TESTFN
def tearDown(self):
if os.path.isfile(self.filename):
os.unlink(self.filename)
- if has_cmdline_bunzip2:
+ if sys.platform == "win32":
+ # bunzip2 isn't available to run on Windows.
+ def decompress(self, data):
+ return bz2.decompress(data)
+ else:
def decompress(self, data):
pop = subprocess.Popen("bunzip2", shell=True,
stdin=subprocess.PIPE,
@@ -69,31 +73,21 @@ class BaseTest(unittest.TestCase):
ret = bz2.decompress(data)
return ret
- else:
- # bunzip2 isn't available to run on Windows.
- def decompress(self, data):
- return bz2.decompress(data)
class BZ2FileTest(BaseTest):
- "Test BZ2File type miscellaneous methods."
+ "Test the BZ2File class."
def createTempFile(self, streams=1):
with open(self.filename, "wb") as f:
f.write(self.DATA * streams)
def testBadArgs(self):
- with self.assertRaises(TypeError):
- BZ2File(123.456)
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "z")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "rx")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", "rbt")
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", compresslevel=0)
- with self.assertRaises(ValueError):
- BZ2File("/dev/null", compresslevel=10)
+ self.assertRaises(TypeError, BZ2File, 123.456)
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "z")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "rx")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", "rbt")
+ self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=0)
+ self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=10)
def testRead(self):
self.createTempFile()
@@ -214,9 +208,8 @@ class BZ2FileTest(BaseTest):
self.createTempFile()
bz2f = BZ2File(self.filename)
bz2f.close()
- self.assertRaises(ValueError, bz2f.__next__)
- # This call will deadlock if the above .__next__ call failed to
- # release the lock.
+ self.assertRaises(ValueError, next, bz2f)
+ # This call will deadlock if the above call failed to release the lock.
self.assertRaises(ValueError, bz2f.readlines)
def testWrite(self):
@@ -379,7 +372,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.seekable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertFalse(bz2f.seekable())
finally:
@@ -405,7 +398,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.readable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertFalse(bz2f.readable())
finally:
@@ -422,7 +415,7 @@ class BZ2FileTest(BaseTest):
bz2f.close()
self.assertRaises(ValueError, bz2f.writable)
- bz2f = BZ2File(BytesIO(), mode="w")
+ bz2f = BZ2File(BytesIO(), "w")
try:
self.assertTrue(bz2f.writable())
finally:
@@ -476,7 +469,7 @@ class BZ2FileTest(BaseTest):
# Issue #7205: Using a BZ2File from several threads shouldn't deadlock.
data = b"1" * 2**20
nthreads = 10
- with bz2.BZ2File(self.filename, 'wb') as f:
+ with BZ2File(self.filename, 'wb') as f:
def comp():
for i in range(5):
f.write(data)
@@ -487,28 +480,27 @@ class BZ2FileTest(BaseTest):
t.join()
def testWithoutThreading(self):
- bz2 = support.import_fresh_module("bz2", blocked=("threading",))
- with bz2.BZ2File(self.filename, "wb") as f:
+ module = support.import_fresh_module("bz2", blocked=("threading",))
+ with module.BZ2File(self.filename, "wb") as f:
f.write(b"abc")
- with bz2.BZ2File(self.filename, "rb") as f:
+ with module.BZ2File(self.filename, "rb") as f:
self.assertEqual(f.read(), b"abc")
def testMixedIterationAndReads(self):
self.createTempFile()
linelen = len(self.TEXT_LINES[0])
halflen = linelen // 2
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.read(halflen)
self.assertEqual(next(bz2f), self.TEXT_LINES[0][halflen:])
self.assertEqual(bz2f.read(), self.TEXT[linelen:])
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.readline()
self.assertEqual(next(bz2f), self.TEXT_LINES[1])
self.assertEqual(bz2f.readline(), self.TEXT_LINES[2])
- with bz2.BZ2File(self.filename) as bz2f:
+ with BZ2File(self.filename) as bz2f:
bz2f.readlines()
- with self.assertRaises(StopIteration):
- next(bz2f)
+ self.assertRaises(StopIteration, next, bz2f)
self.assertEqual(bz2f.readlines(), [])
def testMultiStreamOrdering(self):
@@ -576,6 +568,7 @@ class BZ2FileTest(BaseTest):
bz2f.seek(-150, 1)
self.assertEqual(bz2f.read(), self.TEXT[500-150:])
+
class BZ2CompressorTest(BaseTest):
def testCompress(self):
bz2c = BZ2Compressor()
@@ -688,97 +681,102 @@ class CompressDecompressTest(BaseTest):
class OpenTest(BaseTest):
+ "Test the open function."
+
+ def open(self, *args, **kwargs):
+ return bz2.open(*args, **kwargs)
+
def test_binary_modes(self):
- with bz2.open(self.filename, "wb") as f:
+ with self.open(self.filename, "wb") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT)
- with bz2.open(self.filename, "rb") as f:
+ with self.open(self.filename, "rb") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(self.filename, "ab") as f:
+ with self.open(self.filename, "ab") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT * 2)
def test_implicit_binary_modes(self):
# Test implicit binary modes (no "b" or "t" in mode string).
- with bz2.open(self.filename, "w") as f:
+ with self.open(self.filename, "w") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT)
- with bz2.open(self.filename, "r") as f:
+ with self.open(self.filename, "r") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(self.filename, "a") as f:
+ with self.open(self.filename, "a") as f:
f.write(self.TEXT)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read())
+ file_data = self.decompress(f.read())
self.assertEqual(file_data, self.TEXT * 2)
def test_text_modes(self):
text = self.TEXT.decode("ascii")
text_native_eol = text.replace("\n", os.linesep)
- with bz2.open(self.filename, "wt") as f:
+ with self.open(self.filename, "wt") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("ascii")
+ file_data = self.decompress(f.read()).decode("ascii")
self.assertEqual(file_data, text_native_eol)
- with bz2.open(self.filename, "rt") as f:
+ with self.open(self.filename, "rt") as f:
self.assertEqual(f.read(), text)
- with bz2.open(self.filename, "at") as f:
+ with self.open(self.filename, "at") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("ascii")
+ file_data = self.decompress(f.read()).decode("ascii")
self.assertEqual(file_data, text_native_eol * 2)
def test_fileobj(self):
- with bz2.open(BytesIO(self.DATA), "r") as f:
+ with self.open(BytesIO(self.DATA), "r") as f:
self.assertEqual(f.read(), self.TEXT)
- with bz2.open(BytesIO(self.DATA), "rb") as f:
+ with self.open(BytesIO(self.DATA), "rb") as f:
self.assertEqual(f.read(), self.TEXT)
text = self.TEXT.decode("ascii")
- with bz2.open(BytesIO(self.DATA), "rt") as f:
+ with self.open(BytesIO(self.DATA), "rt") as f:
self.assertEqual(f.read(), text)
def test_bad_params(self):
# Test invalid parameter combinations.
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "wbt")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", encoding="utf-8")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", errors="ignore")
- with self.assertRaises(ValueError):
- bz2.open(self.filename, "rb", newline="\n")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "wbt")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", encoding="utf-8")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", errors="ignore")
+ self.assertRaises(ValueError,
+ self.open, self.filename, "rb", newline="\n")
def test_encoding(self):
# Test non-default encoding.
text = self.TEXT.decode("ascii")
text_native_eol = text.replace("\n", os.linesep)
- with bz2.open(self.filename, "wt", encoding="utf-16-le") as f:
+ with self.open(self.filename, "wt", encoding="utf-16-le") as f:
f.write(text)
with open(self.filename, "rb") as f:
- file_data = bz2.decompress(f.read()).decode("utf-16-le")
+ file_data = self.decompress(f.read()).decode("utf-16-le")
self.assertEqual(file_data, text_native_eol)
- with bz2.open(self.filename, "rt", encoding="utf-16-le") as f:
+ with self.open(self.filename, "rt", encoding="utf-16-le") as f:
self.assertEqual(f.read(), text)
def test_encoding_error_handler(self):
# Test with non-default encoding error handler.
- with bz2.open(self.filename, "wb") as f:
+ with self.open(self.filename, "wb") as f:
f.write(b"foo\xffbar")
- with bz2.open(self.filename, "rt", encoding="ascii", errors="ignore") \
+ with self.open(self.filename, "rt", encoding="ascii", errors="ignore") \
as f:
self.assertEqual(f.read(), "foobar")
def test_newline(self):
# Test with explicit newline (universal newline mode disabled).
text = self.TEXT.decode("ascii")
- with bz2.open(self.filename, "wt", newline="\n") as f:
+ with self.open(self.filename, "wt", newline="\n") as f:
f.write(text)
- with bz2.open(self.filename, "rt", newline="\r") as f:
+ with self.open(self.filename, "rt", newline="\r") as f:
self.assertEqual(f.readlines(), [text])
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
index 2b0c6e2..7a10288 100644
--- a/Lib/test/test_cmd_line.py
+++ b/Lib/test/test_cmd_line.py
@@ -93,15 +93,15 @@ class CmdLineTest(unittest.TestCase):
# All good if execution is successful
assert_python_ok('-c', 'pass')
- @unittest.skipIf(sys.getfilesystemencoding() == 'ascii',
- 'need a filesystem encoding different than ASCII')
+ @unittest.skipUnless(test.support.FS_NONASCII, 'need support.FS_NONASCII')
def test_non_ascii(self):
# Test handling of non-ascii data
if test.support.verbose:
import locale
print('locale encoding = %s, filesystem encoding = %s'
% (locale.getpreferredencoding(), sys.getfilesystemencoding()))
- command = "assert(ord('\xe9') == 0xe9)"
+ command = ("assert(ord(%r) == %s)"
+ % (test.support.FS_NONASCII, ord(test.support.FS_NONASCII)))
assert_python_ok('-c', command)
# On Windows, pass bytes to subprocess doesn't test how Python decodes the
@@ -216,6 +216,23 @@ class CmdLineTest(unittest.TestCase):
self.assertIn(path1.encode('ascii'), out)
self.assertIn(path2.encode('ascii'), out)
+ def test_empty_PYTHONPATH_issue16309(self):
+ """On Posix, it is documented that setting PATH to the
+ empty string is equivalent to not setting PATH at all,
+ which is an exception to the rule that in a string like
+ "/bin::/usr/bin" the empty string in the middle gets
+ interpreted as '.'"""
+ code = """if 1:
+ import sys
+ path = ":".join(sys.path)
+ path = path.encode("ascii", "backslashreplace")
+ sys.stdout.buffer.write(path)"""
+ rc1, out1, err1 = assert_python_ok('-c', code, PYTHONPATH="")
+ rc2, out2, err2 = assert_python_ok('-c', code)
+ # regarding to Posix specification, outputs should be equal
+ # for empty and unset PYTHONPATH
+ self.assertEquals(out1, out2)
+
def test_displayhook_unencodable(self):
for encoding in ('ascii', 'latin-1', 'utf-8'):
env = os.environ.copy()
@@ -293,7 +310,7 @@ class CmdLineTest(unittest.TestCase):
rc, out, err = assert_python_ok('-c', code)
self.assertEqual(b'', out)
self.assertRegex(err.decode('ascii', 'ignore'),
- 'Exception OSError: .* ignored')
+ 'Exception ignored in.*\nOSError: .*')
def test_closed_stdout(self):
# Issue #13444: if stdout has been explicitly closed, we should
diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py
index 6e097e3..77152b3 100644
--- a/Lib/test/test_cmd_line_script.py
+++ b/Lib/test/test_cmd_line_script.py
@@ -363,14 +363,19 @@ class CmdLineTest(unittest.TestCase):
self.assertTrue(text[1].startswith(' File '))
self.assertTrue(text[3].startswith('NameError'))
- def test_non_utf8(self):
+ @unittest.skipUnless(support.TESTFN_NONASCII, 'need support.TESTFN_NONASCII')
+ def test_non_ascii(self):
# Issue #16218
- with temp_dir() as script_dir:
- script_name = _make_test_script(script_dir,
- '\udcf1\udcea\udcf0\udce8\udcef\udcf2')
- self._check_script(script_name, script_name, script_name,
- script_dir, None,
- importlib.machinery.SourceFileLoader)
+ source = 'print(ascii(__file__))\n'
+ script_name = _make_test_script(os.curdir, support.TESTFN_NONASCII, source)
+ self.addCleanup(support.unlink, script_name)
+ rc, stdout, stderr = assert_python_ok(script_name)
+ self.assertEqual(
+ ascii(script_name),
+ stdout.rstrip().decode('ascii'),
+ 'stdout=%r stderr=%r' % (stdout, stderr))
+ self.assertEqual(0, rc)
+
def test_main():
support.run_unittest(CmdLineTest)
diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py
index 6ae450d..4ad3309 100644
--- a/Lib/test/test_concurrent_futures.py
+++ b/Lib/test/test_concurrent_futures.py
@@ -15,6 +15,7 @@ import sys
import threading
import time
import unittest
+import weakref
from concurrent import futures
from concurrent.futures._base import (
@@ -52,6 +53,11 @@ def sleep_and_print(t, msg):
sys.stdout.flush()
+class MyObject(object):
+ def my_method(self):
+ pass
+
+
class ExecutorMixin:
worker_count = 5
@@ -396,6 +402,22 @@ class ExecutorTest(unittest.TestCase):
self.executor.map(str, [2] * (self.worker_count + 1))
self.executor.shutdown()
+ @test.support.cpython_only
+ def test_no_stale_references(self):
+ # Issue #16284: check that the executors don't unnecessarily hang onto
+ # references.
+ my_object = MyObject()
+ my_object_collected = threading.Event()
+ my_object_callback = weakref.ref(
+ my_object, lambda obj: my_object_collected.set())
+ # Deliberately discarding the future.
+ self.executor.submit(my_object.my_method)
+ del my_object
+
+ collected = my_object_collected.wait(timeout=5.0)
+ self.assertTrue(collected,
+ "Stale reference not collected within timeout.")
+
class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest):
def test_map_submits_without_iteration(self):
diff --git a/Lib/test/test_enumerate.py b/Lib/test/test_enumerate.py
index 2e904cf..a2d18d0 100644
--- a/Lib/test/test_enumerate.py
+++ b/Lib/test/test_enumerate.py
@@ -1,4 +1,5 @@
import unittest
+import operator
import sys
import pickle
@@ -168,15 +169,12 @@ class TestReversed(unittest.TestCase, PickleTest):
x = range(1)
self.assertEqual(type(reversed(x)), type(iter(x)))
- @support.cpython_only
def test_len(self):
- # This is an implementation detail, not an interface requirement
- from test.test_iterlen import len
for s in ('hello', tuple('hello'), list('hello'), range(5)):
- self.assertEqual(len(reversed(s)), len(s))
+ self.assertEqual(operator.length_hint(reversed(s)), len(s))
r = reversed(s)
list(r)
- self.assertEqual(len(r), 0)
+ self.assertEqual(operator.length_hint(r), 0)
class SeqWithWeirdLen:
called = False
def __len__(self):
@@ -187,7 +185,7 @@ class TestReversed(unittest.TestCase, PickleTest):
def __getitem__(self, index):
return index
r = reversed(SeqWithWeirdLen())
- self.assertRaises(ZeroDivisionError, len, r)
+ self.assertRaises(ZeroDivisionError, operator.length_hint, r)
def test_gc(self):
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 413f4dd..8e5a767 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -8,7 +8,7 @@ import weakref
import errno
from test.support import (TESTFN, unlink, run_unittest, captured_output,
- gc_collect, cpython_only, no_tracing)
+ check_warnings, gc_collect, cpython_only, no_tracing)
class NaiveException(Exception):
def __init__(self, x):
@@ -939,9 +939,10 @@ class ImportErrorTests(unittest.TestCase):
def test_non_str_argument(self):
# Issue #15778
- arg = b'abc'
- exc = ImportError(arg)
- self.assertEqual(str(arg), str(exc))
+ with check_warnings(('', BytesWarning), quiet=True):
+ arg = b'abc'
+ exc = ImportError(arg)
+ self.assertEqual(str(arg), str(exc))
def test_main():
diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py
index 29af99c..6d9ee0b 100644
--- a/Lib/test/test_fcntl.py
+++ b/Lib/test/test_fcntl.py
@@ -1,7 +1,4 @@
"""Test program for the fcntl C module.
-
-OS/2+EMX doesn't support the file locking operations.
-
"""
import os
import struct
@@ -35,8 +32,6 @@ def get_lockdata():
fcntl.F_WRLCK, 0)
elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']:
lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
- elif sys.platform in ['os2emx']:
- lockdata = None
else:
lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0)
if lockdata:
@@ -62,18 +57,16 @@ class TestFcntl(unittest.TestCase):
rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
if verbose:
print('Status from fcntl with O_NONBLOCK: ', rv)
- if sys.platform not in ['os2emx']:
- rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata)
- if verbose:
- print('String from fcntl with F_SETLKW: ', repr(rv))
+ rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata)
+ if verbose:
+ print('String from fcntl with F_SETLKW: ', repr(rv))
self.f.close()
def test_fcntl_file_descriptor(self):
# again, but pass the file rather than numeric descriptor
self.f = open(TESTFN, 'wb')
rv = fcntl.fcntl(self.f, fcntl.F_SETFL, os.O_NONBLOCK)
- if sys.platform not in ['os2emx']:
- rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata)
+ rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata)
self.f.close()
def test_fcntl_64_bit(self):
diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py
index b6e2540..e6b0d20 100644
--- a/Lib/test/test_format.py
+++ b/Lib/test/test_format.py
@@ -307,6 +307,22 @@ class FormatTest(unittest.TestCase):
finally:
locale.setlocale(locale.LC_ALL, oldloc)
+ @support.cpython_only
+ def test_optimisations(self):
+ text = "abcde" # 5 characters
+
+ self.assertIs("%s" % text, text)
+ self.assertIs("%.5s" % text, text)
+ self.assertIs("%.10s" % text, text)
+ self.assertIs("%1s" % text, text)
+ self.assertIs("%5s" % text, text)
+
+ self.assertIs("{0}".format(text), text)
+ self.assertIs("{0:s}".format(text), text)
+ self.assertIs("{0:.5s}".format(text), text)
+ self.assertIs("{0:.10s}".format(text), text)
+ self.assertIs("{0:1s}".format(text), text)
+ self.assertIs("{0:5s}".format(text), text)
def test_main():
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index c59b72e..85dbc97 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -610,6 +610,32 @@ class GCTests(unittest.TestCase):
stderr = run_command(code % "gc.DEBUG_SAVEALL")
self.assertNotIn(b"uncollectable objects at shutdown", stderr)
+ def test_get_stats(self):
+ stats = gc.get_stats()
+ self.assertEqual(len(stats), 3)
+ for st in stats:
+ self.assertIsInstance(st, dict)
+ self.assertEqual(set(st),
+ {"collected", "collections", "uncollectable"})
+ self.assertGreaterEqual(st["collected"], 0)
+ self.assertGreaterEqual(st["collections"], 0)
+ self.assertGreaterEqual(st["uncollectable"], 0)
+ # Check that collection counts are incremented correctly
+ if gc.isenabled():
+ self.addCleanup(gc.enable)
+ gc.disable()
+ old = gc.get_stats()
+ gc.collect(0)
+ new = gc.get_stats()
+ self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
+ self.assertEqual(new[1]["collections"], old[1]["collections"])
+ self.assertEqual(new[2]["collections"], old[2]["collections"])
+ gc.collect(2)
+ new = gc.get_stats()
+ self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
+ self.assertEqual(new[1]["collections"], old[1]["collections"])
+ self.assertEqual(new[2]["collections"], old[2]["collections"] + 1)
+
class GCCallbackTests(unittest.TestCase):
def setUp(self):
diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py
index 06f67c2..f5c1a7d 100644
--- a/Lib/test/test_generators.py
+++ b/Lib/test/test_generators.py
@@ -1728,9 +1728,7 @@ Our ill-behaved code should be invoked during GC:
>>> g = f()
>>> next(g)
>>> del g
->>> sys.stderr.getvalue().startswith(
-... "Exception RuntimeError: 'generator ignored GeneratorExit' in "
-... )
+>>> "RuntimeError: generator ignored GeneratorExit" in sys.stderr.getvalue()
True
>>> sys.stderr = old
@@ -1840,22 +1838,23 @@ to test.
... sys.stderr = io.StringIO()
... class Leaker:
... def __del__(self):
-... raise RuntimeError
+... def invoke(message):
+... raise RuntimeError(message)
+... invoke("test")
...
... l = Leaker()
... del l
... err = sys.stderr.getvalue().strip()
-... err.startswith(
-... "Exception RuntimeError: RuntimeError() in <"
-... )
-... err.endswith("> ignored")
-... len(err.splitlines())
+... "Exception ignored in" in err
+... "RuntimeError: test" in err
+... "Traceback" in err
+... "in invoke" in err
... finally:
... sys.stderr = old
True
True
-1
-
+True
+True
These refleak tests should perhaps be in a testfile of their own,
diff --git a/Lib/test/test_genericpath.py b/Lib/test/test_genericpath.py
index 3eadd58..9d1d2bc 100644
--- a/Lib/test/test_genericpath.py
+++ b/Lib/test/test_genericpath.py